diff --git a/.github/actions/setup-go/action.yaml b/.github/actions/setup-go/action.yaml index d4777e32a9bdf..2fa5c7dcfa9de 100644 --- a/.github/actions/setup-go/action.yaml +++ b/.github/actions/setup-go/action.yaml @@ -4,7 +4,7 @@ description: | inputs: version: description: "The Go version to use." - default: "1.22.6" + default: "1.22.8" runs: using: "composite" steps: diff --git a/.github/actions/setup-tf/action.yaml b/.github/actions/setup-tf/action.yaml index 12ee87f5a5c9f..c52f1138e03ca 100644 --- a/.github/actions/setup-tf/action.yaml +++ b/.github/actions/setup-tf/action.yaml @@ -7,5 +7,5 @@ runs: - name: Install Terraform uses: hashicorp/setup-terraform@b9cd54a3c349d3f38e8881555d616ced269862dd # v3.1.2 with: - terraform_version: 1.9.2 + terraform_version: 1.9.8 terraform_wrapper: false diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index fa5164b91caa4..e6d105d8890f4 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -197,7 +197,7 @@ jobs: # Check for any typos - name: Check for typos - uses: crate-ci/typos@6802cc60d4e7f78b9d5454f6cf3935c042d5e1e3 # v1.26.0 + uses: crate-ci/typos@0d9e0c2c1bd7f770f6eb90f87780848ca02fc12c # v1.26.8 with: config: .github/workflows/typos.toml @@ -233,8 +233,7 @@ jobs: gen: timeout-minutes: 8 runs-on: ${{ github.repository_owner == 'coder' && 'depot-ubuntu-22.04-8' || 'ubuntu-latest' }} - needs: changes - if: needs.changes.outputs.docs-only == 'false' || needs.changes.outputs.ci == 'true' || github.ref == 'refs/heads/main' + if: always() steps: - name: Harden Runner uses: step-security/harden-runner@91182cccc01eb5e619899d80e4e971d6181294a7 # v2.10.1 diff --git a/.github/workflows/docker-base.yaml b/.github/workflows/docker-base.yaml index 8053b12780855..c0a3e87c5fe98 100644 --- a/.github/workflows/docker-base.yaml +++ b/.github/workflows/docker-base.yaml @@ -22,10 +22,6 @@ on: permissions: contents: read - # Necessary to push docker images to ghcr.io. - packages: write - # Necessary for depot.dev authentication. - id-token: write # Avoid running multiple jobs for the same commit. concurrency: @@ -33,6 +29,11 @@ concurrency: jobs: build: + permissions: + # Necessary for depot.dev authentication. + id-token: write + # Necessary to push docker images to ghcr.io. + packages: write runs-on: ubuntu-latest if: github.repository_owner == 'coder' steps: diff --git a/.github/workflows/nightly-gauntlet.yaml b/.github/workflows/nightly-gauntlet.yaml index 99ce3f62618a7..2b2887a13934e 100644 --- a/.github/workflows/nightly-gauntlet.yaml +++ b/.github/workflows/nightly-gauntlet.yaml @@ -6,6 +6,10 @@ on: # Every day at midnight - cron: "0 0 * * *" workflow_dispatch: + +permissions: + contents: read + jobs: go-race: # While GitHub's toaster runners are likelier to flake, we want consistency diff --git a/.github/workflows/pr-cleanup.yaml b/.github/workflows/pr-cleanup.yaml index ebcf097c0ef6b..f5cee03a4c6c4 100644 --- a/.github/workflows/pr-cleanup.yaml +++ b/.github/workflows/pr-cleanup.yaml @@ -8,12 +8,12 @@ on: description: "PR number" required: true -permissions: - packages: write - jobs: cleanup: runs-on: "ubuntu-latest" + permissions: + # Necessary to delete docker images from ghcr.io. + packages: write steps: - name: Harden Runner uses: step-security/harden-runner@91182cccc01eb5e619899d80e4e971d6181294a7 # v2.10.1 diff --git a/.github/workflows/pr-deploy.yaml b/.github/workflows/pr-deploy.yaml index e86ad1f3dd351..49e73e9b0bf63 100644 --- a/.github/workflows/pr-deploy.yaml +++ b/.github/workflows/pr-deploy.yaml @@ -30,8 +30,6 @@ env: permissions: contents: read - packages: write - pull-requests: write # needed for commenting on PRs jobs: check_pr: @@ -171,6 +169,8 @@ jobs: needs: get_info if: needs.get_info.outputs.BUILD == 'true' || github.event.inputs.deploy == 'true' runs-on: "ubuntu-latest" + permissions: + pull-requests: write # needed for commenting on PRs steps: - name: Harden Runner uses: step-security/harden-runner@91182cccc01eb5e619899d80e4e971d6181294a7 # v2.10.1 @@ -205,6 +205,9 @@ jobs: # Run build job only if there are changes in the files that we care about or if the workflow is manually triggered with --build flag if: needs.get_info.outputs.BUILD == 'true' runs-on: ${{ github.repository_owner == 'coder' && 'depot-ubuntu-22.04-8' || 'ubuntu-latest' }} + permissions: + # Necessary to push docker images to ghcr.io. + packages: write # This concurrency only cancels build jobs if a new build is triggred. It will avoid cancelling the current deployemtn in case of docs chnages. concurrency: group: build-${{ github.workflow }}-${{ github.ref }}-${{ needs.get_info.outputs.BUILD }} @@ -425,7 +428,7 @@ jobs: --first-user-username coder \ --first-user-email pr${{ env.PR_NUMBER }}@coder.com \ --first-user-password $password \ - --first-user-trial \ + --first-user-trial=false \ --use-token-as-session \ https://${{ env.PR_HOSTNAME }} diff --git a/.github/workflows/release-validation.yaml b/.github/workflows/release-validation.yaml index 405e051f78526..2f12ac2bb5e7b 100644 --- a/.github/workflows/release-validation.yaml +++ b/.github/workflows/release-validation.yaml @@ -5,6 +5,9 @@ on: tags: - "v*" +permissions: + contents: read + jobs: network-performance: runs-on: ubuntu-latest diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index b2757b25181d5..74b5b7b35a1e7 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -18,12 +18,7 @@ on: default: false permissions: - # Required to publish a release - contents: write - # Necessary to push docker images to ghcr.io. - packages: write - # Necessary for GCP authentication (https://github.com/google-github-actions/setup-gcloud#usage) - id-token: write + contents: read concurrency: ${{ github.workflow }}-${{ github.ref }} @@ -40,6 +35,13 @@ jobs: release: name: Build and publish runs-on: ${{ github.repository_owner == 'coder' && 'depot-ubuntu-22.04-8' || 'ubuntu-latest' }} + permissions: + # Required to publish a release + contents: write + # Necessary to push docker images to ghcr.io. + packages: write + # Necessary for GCP authentication (https://github.com/google-github-actions/setup-gcloud#usage) + id-token: write env: # Necessary for Docker manifest DOCKER_CLI_EXPERIMENTAL: "enabled" diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml index 5913c0349e99a..77a8d36a6a6f3 100644 --- a/.github/workflows/scorecard.yml +++ b/.github/workflows/scorecard.yml @@ -47,6 +47,6 @@ jobs: # Upload the results to GitHub's code scanning dashboard. - name: "Upload to code-scanning" - uses: github/codeql-action/upload-sarif@f779452ac5af1c261dce0346a8f964149f49322b # v3.26.13 + uses: github/codeql-action/upload-sarif@662472033e021d55d94146f66f6058822b0b39fd # v3.27.0 with: sarif_file: results.sarif diff --git a/.github/workflows/security.yaml b/.github/workflows/security.yaml index 5ae6de7b2fe7d..4ae50b2aa4792 100644 --- a/.github/workflows/security.yaml +++ b/.github/workflows/security.yaml @@ -3,7 +3,6 @@ name: "security" permissions: actions: read contents: read - security-events: write on: workflow_dispatch: @@ -23,6 +22,8 @@ concurrency: jobs: codeql: + permissions: + security-events: write runs-on: ${{ github.repository_owner == 'coder' && 'depot-ubuntu-22.04-8' || 'ubuntu-latest' }} steps: - name: Harden Runner @@ -37,7 +38,7 @@ jobs: uses: ./.github/actions/setup-go - name: Initialize CodeQL - uses: github/codeql-action/init@f779452ac5af1c261dce0346a8f964149f49322b # v3.26.13 + uses: github/codeql-action/init@662472033e021d55d94146f66f6058822b0b39fd # v3.27.0 with: languages: go, javascript @@ -47,7 +48,7 @@ jobs: rm Makefile - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@f779452ac5af1c261dce0346a8f964149f49322b # v3.26.13 + uses: github/codeql-action/analyze@662472033e021d55d94146f66f6058822b0b39fd # v3.27.0 - name: Send Slack notification on failure if: ${{ failure() }} @@ -61,6 +62,8 @@ jobs: "${{ secrets.SLACK_SECURITY_FAILURE_WEBHOOK_URL }}" trivy: + permissions: + security-events: write runs-on: ${{ github.repository_owner == 'coder' && 'depot-ubuntu-22.04-8' || 'ubuntu-latest' }} steps: - name: Harden Runner @@ -95,13 +98,20 @@ jobs: # protoc must be in lockstep with our dogfood Dockerfile or the # version in the comments will differ. This is also defined in # ci.yaml. - set -x + set -euxo pipefail cd dogfood/contents + mkdir -p /usr/local/bin + mkdir -p /usr/local/include + DOCKER_BUILDKIT=1 docker build . --target proto -t protoc protoc_path=/usr/local/bin/protoc docker run --rm --entrypoint cat protoc /tmp/bin/protoc > $protoc_path chmod +x $protoc_path protoc --version + # Copy the generated files to the include directory. + docker run --rm -v /usr/local/include:/target protoc cp -r /tmp/include/google /target/ + ls -la /usr/local/include/google/protobuf/ + stat /usr/local/include/google/protobuf/timestamp.proto - name: Build Coder linux amd64 Docker image id: build @@ -132,7 +142,7 @@ jobs: severity: "CRITICAL,HIGH" - name: Upload Trivy scan results to GitHub Security tab - uses: github/codeql-action/upload-sarif@f779452ac5af1c261dce0346a8f964149f49322b # v3.26.13 + uses: github/codeql-action/upload-sarif@662472033e021d55d94146f66f6058822b0b39fd # v3.27.0 with: sarif_file: trivy-results.sarif category: "Trivy" diff --git a/.github/workflows/stale.yaml b/.github/workflows/stale.yaml index a05632d181ed3..d055c4f451e4e 100644 --- a/.github/workflows/stale.yaml +++ b/.github/workflows/stale.yaml @@ -1,16 +1,21 @@ -name: Stale Issue, Banch and Old Workflows Cleanup +name: Stale Issue, Branch and Old Workflows Cleanup on: schedule: # Every day at midnight - cron: "0 0 * * *" workflow_dispatch: + +permissions: + contents: read + jobs: issues: runs-on: ubuntu-latest permissions: + # Needed to close issues. issues: write + # Needed to close PRs. pull-requests: write - actions: write steps: - name: Harden Runner uses: step-security/harden-runner@91182cccc01eb5e619899d80e4e971d6181294a7 # v2.10.1 @@ -86,6 +91,9 @@ jobs: branches: runs-on: ubuntu-latest + permissions: + # Needed to delete branches. + contents: write steps: - name: Harden Runner uses: step-security/harden-runner@91182cccc01eb5e619899d80e4e971d6181294a7 # v2.10.1 @@ -105,6 +113,9 @@ jobs: exclude_open_pr_branches: true del_runs: runs-on: ubuntu-latest + permissions: + # Needed to delete workflow runs. + actions: write steps: - name: Harden Runner uses: step-security/harden-runner@91182cccc01eb5e619899d80e4e971d6181294a7 # v2.10.1 diff --git a/.vscode/settings.json b/.vscode/settings.json index 2476e330cd306..6695a12faa8dc 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -175,6 +175,7 @@ "unauthenticate", "unconvert", "untar", + "userauth", "userspace", "VMID", "walkthrough", diff --git a/cli/login.go b/cli/login.go index 484de69fdf1b5..3bb4f0796e4a5 100644 --- a/cli/login.go +++ b/cli/login.go @@ -267,12 +267,59 @@ func (r *RootCmd) login() *serpent.Command { trial = v == "yes" || v == "y" } + var trialInfo codersdk.CreateFirstUserTrialInfo + if trial { + if trialInfo.FirstName == "" { + trialInfo.FirstName, err = promptTrialInfo(inv, "firstName") + if err != nil { + return err + } + } + if trialInfo.LastName == "" { + trialInfo.LastName, err = promptTrialInfo(inv, "lastName") + if err != nil { + return err + } + } + if trialInfo.PhoneNumber == "" { + trialInfo.PhoneNumber, err = promptTrialInfo(inv, "phoneNumber") + if err != nil { + return err + } + } + if trialInfo.JobTitle == "" { + trialInfo.JobTitle, err = promptTrialInfo(inv, "jobTitle") + if err != nil { + return err + } + } + if trialInfo.CompanyName == "" { + trialInfo.CompanyName, err = promptTrialInfo(inv, "companyName") + if err != nil { + return err + } + } + if trialInfo.Country == "" { + trialInfo.Country, err = promptCountry(inv) + if err != nil { + return err + } + } + if trialInfo.Developers == "" { + trialInfo.Developers, err = promptDevelopers(inv) + if err != nil { + return err + } + } + } + _, err = client.CreateFirstUser(ctx, codersdk.CreateFirstUserRequest{ - Email: email, - Username: username, - Name: name, - Password: password, - Trial: trial, + Email: email, + Username: username, + Name: name, + Password: password, + Trial: trial, + TrialInfo: trialInfo, }) if err != nil { return xerrors.Errorf("create initial user: %w", err) @@ -449,3 +496,75 @@ func openURL(inv *serpent.Invocation, urlToOpen string) error { return browser.OpenURL(urlToOpen) } + +func promptTrialInfo(inv *serpent.Invocation, fieldName string) (string, error) { + value, err := cliui.Prompt(inv, cliui.PromptOptions{ + Text: fmt.Sprintf("Please enter %s:", pretty.Sprint(cliui.DefaultStyles.Field, fieldName)), + Validate: func(s string) error { + if strings.TrimSpace(s) == "" { + return xerrors.Errorf("%s is required", fieldName) + } + return nil + }, + }) + if err != nil { + if errors.Is(err, cliui.Canceled) { + return "", nil + } + return "", err + } + return value, nil +} + +func promptDevelopers(inv *serpent.Invocation) (string, error) { + options := []string{"1-100", "101-500", "501-1000", "1001-2500", "2500+"} + selection, err := cliui.Select(inv, cliui.SelectOptions{ + Options: options, + HideSearch: false, + Message: "Select the number of developers:", + }) + if err != nil { + return "", xerrors.Errorf("select developers: %w", err) + } + return selection, nil +} + +func promptCountry(inv *serpent.Invocation) (string, error) { + countries := []string{ + "Afghanistan", "Åland Islands", "Albania", "Algeria", "American Samoa", "Andorra", "Angola", "Anguilla", "Antarctica", "Antigua and Barbuda", + "Argentina", "Armenia", "Aruba", "Australia", "Austria", "Azerbaijan", "Bahamas", "Bahrain", "Bangladesh", "Barbados", + "Belarus", "Belgium", "Belize", "Benin", "Bermuda", "Bhutan", "Bolivia, Plurinational State of", "Bonaire, Sint Eustatius and Saba", "Bosnia and Herzegovina", "Botswana", + "Bouvet Island", "Brazil", "British Indian Ocean Territory", "Brunei Darussalam", "Bulgaria", "Burkina Faso", "Burundi", "Cambodia", "Cameroon", "Canada", + "Cape Verde", "Cayman Islands", "Central African Republic", "Chad", "Chile", "China", "Christmas Island", "Cocos (Keeling) Islands", "Colombia", "Comoros", + "Congo", "Congo, the Democratic Republic of the", "Cook Islands", "Costa Rica", "Côte d'Ivoire", "Croatia", "Cuba", "Curaçao", "Cyprus", "Czech Republic", + "Denmark", "Djibouti", "Dominica", "Dominican Republic", "Ecuador", "Egypt", "El Salvador", "Equatorial Guinea", "Eritrea", "Estonia", + "Ethiopia", "Falkland Islands (Malvinas)", "Faroe Islands", "Fiji", "Finland", "France", "French Guiana", "French Polynesia", "French Southern Territories", "Gabon", + "Gambia", "Georgia", "Germany", "Ghana", "Gibraltar", "Greece", "Greenland", "Grenada", "Guadeloupe", "Guam", + "Guatemala", "Guernsey", "Guinea", "Guinea-Bissau", "Guyana", "Haiti", "Heard Island and McDonald Islands", "Holy See (Vatican City State)", "Honduras", "Hong Kong", + "Hungary", "Iceland", "India", "Indonesia", "Iran, Islamic Republic of", "Iraq", "Ireland", "Isle of Man", "Israel", "Italy", + "Jamaica", "Japan", "Jersey", "Jordan", "Kazakhstan", "Kenya", "Kiribati", "Korea, Democratic People's Republic of", "Korea, Republic of", "Kuwait", + "Kyrgyzstan", "Lao People's Democratic Republic", "Latvia", "Lebanon", "Lesotho", "Liberia", "Libya", "Liechtenstein", "Lithuania", "Luxembourg", + "Macao", "Macedonia, the Former Yugoslav Republic of", "Madagascar", "Malawi", "Malaysia", "Maldives", "Mali", "Malta", "Marshall Islands", "Martinique", + "Mauritania", "Mauritius", "Mayotte", "Mexico", "Micronesia, Federated States of", "Moldova, Republic of", "Monaco", "Mongolia", "Montenegro", "Montserrat", + "Morocco", "Mozambique", "Myanmar", "Namibia", "Nauru", "Nepal", "Netherlands", "New Caledonia", "New Zealand", "Nicaragua", + "Niger", "Nigeria", "Niue", "Norfolk Island", "Northern Mariana Islands", "Norway", "Oman", "Pakistan", "Palau", "Palestine, State of", + "Panama", "Papua New Guinea", "Paraguay", "Peru", "Philippines", "Pitcairn", "Poland", "Portugal", "Puerto Rico", "Qatar", + "Réunion", "Romania", "Russian Federation", "Rwanda", "Saint Barthélemy", "Saint Helena, Ascension and Tristan da Cunha", "Saint Kitts and Nevis", "Saint Lucia", "Saint Martin (French part)", "Saint Pierre and Miquelon", + "Saint Vincent and the Grenadines", "Samoa", "San Marino", "Sao Tome and Principe", "Saudi Arabia", "Senegal", "Serbia", "Seychelles", "Sierra Leone", "Singapore", + "Sint Maarten (Dutch part)", "Slovakia", "Slovenia", "Solomon Islands", "Somalia", "South Africa", "South Georgia and the South Sandwich Islands", "South Sudan", "Spain", "Sri Lanka", + "Sudan", "Suriname", "Svalbard and Jan Mayen", "Swaziland", "Sweden", "Switzerland", "Syrian Arab Republic", "Taiwan, Province of China", "Tajikistan", "Tanzania, United Republic of", + "Thailand", "Timor-Leste", "Togo", "Tokelau", "Tonga", "Trinidad and Tobago", "Tunisia", "Turkey", "Turkmenistan", "Turks and Caicos Islands", + "Tuvalu", "Uganda", "Ukraine", "United Arab Emirates", "United Kingdom", "United States", "United States Minor Outlying Islands", "Uruguay", "Uzbekistan", "Vanuatu", + "Venezuela, Bolivarian Republic of", "Vietnam", "Virgin Islands, British", "Virgin Islands, U.S.", "Wallis and Futuna", "Western Sahara", "Yemen", "Zambia", "Zimbabwe", + } + + selection, err := cliui.Select(inv, cliui.SelectOptions{ + Options: countries, + Message: "Select the country:", + HideSearch: false, + }) + if err != nil { + return "", xerrors.Errorf("select country: %w", err) + } + return selection, nil +} diff --git a/cli/login_test.go b/cli/login_test.go index 0428c332d02b0..9a86e7caad351 100644 --- a/cli/login_test.go +++ b/cli/login_test.go @@ -96,6 +96,58 @@ func TestLogin(t *testing.T) { "password", coderdtest.FirstUserParams.Password, "password", coderdtest.FirstUserParams.Password, // confirm "trial", "yes", + "firstName", coderdtest.TrialUserParams.FirstName, + "lastName", coderdtest.TrialUserParams.LastName, + "phoneNumber", coderdtest.TrialUserParams.PhoneNumber, + "jobTitle", coderdtest.TrialUserParams.JobTitle, + "companyName", coderdtest.TrialUserParams.CompanyName, + // `developers` and `country` `cliui.Select` automatically selects the first option during tests. + } + for i := 0; i < len(matches); i += 2 { + match := matches[i] + value := matches[i+1] + pty.ExpectMatch(match) + pty.WriteLine(value) + } + pty.ExpectMatch("Welcome to Coder") + <-doneChan + ctx := testutil.Context(t, testutil.WaitShort) + resp, err := client.LoginWithPassword(ctx, codersdk.LoginWithPasswordRequest{ + Email: coderdtest.FirstUserParams.Email, + Password: coderdtest.FirstUserParams.Password, + }) + require.NoError(t, err) + client.SetSessionToken(resp.SessionToken) + me, err := client.User(ctx, codersdk.Me) + require.NoError(t, err) + assert.Equal(t, coderdtest.FirstUserParams.Username, me.Username) + assert.Equal(t, coderdtest.FirstUserParams.Name, me.Name) + assert.Equal(t, coderdtest.FirstUserParams.Email, me.Email) + }) + + t.Run("InitialUserTTYWithNoTrial", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t, nil) + // The --force-tty flag is required on Windows, because the `isatty` library does not + // accurately detect Windows ptys when they are not attached to a process: + // https://github.com/mattn/go-isatty/issues/59 + doneChan := make(chan struct{}) + root, _ := clitest.New(t, "login", "--force-tty", client.URL.String()) + pty := ptytest.New(t).Attach(root) + go func() { + defer close(doneChan) + err := root.Run() + assert.NoError(t, err) + }() + + matches := []string{ + "first user?", "yes", + "username", coderdtest.FirstUserParams.Username, + "name", coderdtest.FirstUserParams.Name, + "email", coderdtest.FirstUserParams.Email, + "password", coderdtest.FirstUserParams.Password, + "password", coderdtest.FirstUserParams.Password, // confirm + "trial", "no", } for i := 0; i < len(matches); i += 2 { match := matches[i] @@ -142,6 +194,12 @@ func TestLogin(t *testing.T) { "password", coderdtest.FirstUserParams.Password, "password", coderdtest.FirstUserParams.Password, // confirm "trial", "yes", + "firstName", coderdtest.TrialUserParams.FirstName, + "lastName", coderdtest.TrialUserParams.LastName, + "phoneNumber", coderdtest.TrialUserParams.PhoneNumber, + "jobTitle", coderdtest.TrialUserParams.JobTitle, + "companyName", coderdtest.TrialUserParams.CompanyName, + // `developers` and `country` `cliui.Select` automatically selects the first option during tests. } for i := 0; i < len(matches); i += 2 { match := matches[i] @@ -185,6 +243,12 @@ func TestLogin(t *testing.T) { "password", coderdtest.FirstUserParams.Password, "password", coderdtest.FirstUserParams.Password, // confirm "trial", "yes", + "firstName", coderdtest.TrialUserParams.FirstName, + "lastName", coderdtest.TrialUserParams.LastName, + "phoneNumber", coderdtest.TrialUserParams.PhoneNumber, + "jobTitle", coderdtest.TrialUserParams.JobTitle, + "companyName", coderdtest.TrialUserParams.CompanyName, + // `developers` and `country` `cliui.Select` automatically selects the first option during tests. } for i := 0; i < len(matches); i += 2 { match := matches[i] @@ -220,6 +284,17 @@ func TestLogin(t *testing.T) { ) pty := ptytest.New(t).Attach(inv) w := clitest.StartWithWaiter(t, inv) + pty.ExpectMatch("firstName") + pty.WriteLine(coderdtest.TrialUserParams.FirstName) + pty.ExpectMatch("lastName") + pty.WriteLine(coderdtest.TrialUserParams.LastName) + pty.ExpectMatch("phoneNumber") + pty.WriteLine(coderdtest.TrialUserParams.PhoneNumber) + pty.ExpectMatch("jobTitle") + pty.WriteLine(coderdtest.TrialUserParams.JobTitle) + pty.ExpectMatch("companyName") + pty.WriteLine(coderdtest.TrialUserParams.CompanyName) + // `developers` and `country` `cliui.Select` automatically selects the first option during tests. pty.ExpectMatch("Welcome to Coder") w.RequireSuccess() ctx := testutil.Context(t, testutil.WaitShort) @@ -248,6 +323,17 @@ func TestLogin(t *testing.T) { ) pty := ptytest.New(t).Attach(inv) w := clitest.StartWithWaiter(t, inv) + pty.ExpectMatch("firstName") + pty.WriteLine(coderdtest.TrialUserParams.FirstName) + pty.ExpectMatch("lastName") + pty.WriteLine(coderdtest.TrialUserParams.LastName) + pty.ExpectMatch("phoneNumber") + pty.WriteLine(coderdtest.TrialUserParams.PhoneNumber) + pty.ExpectMatch("jobTitle") + pty.WriteLine(coderdtest.TrialUserParams.JobTitle) + pty.ExpectMatch("companyName") + pty.WriteLine(coderdtest.TrialUserParams.CompanyName) + // `developers` and `country` `cliui.Select` automatically selects the first option during tests. pty.ExpectMatch("Welcome to Coder") w.RequireSuccess() ctx := testutil.Context(t, testutil.WaitShort) @@ -299,12 +385,21 @@ func TestLogin(t *testing.T) { // Validate that we reprompt for matching passwords. pty.ExpectMatch("Passwords do not match") pty.ExpectMatch("Enter a " + pretty.Sprint(cliui.DefaultStyles.Field, "password")) - pty.WriteLine(coderdtest.FirstUserParams.Password) pty.ExpectMatch("Confirm") pty.WriteLine(coderdtest.FirstUserParams.Password) pty.ExpectMatch("trial") pty.WriteLine("yes") + pty.ExpectMatch("firstName") + pty.WriteLine(coderdtest.TrialUserParams.FirstName) + pty.ExpectMatch("lastName") + pty.WriteLine(coderdtest.TrialUserParams.LastName) + pty.ExpectMatch("phoneNumber") + pty.WriteLine(coderdtest.TrialUserParams.PhoneNumber) + pty.ExpectMatch("jobTitle") + pty.WriteLine(coderdtest.TrialUserParams.JobTitle) + pty.ExpectMatch("companyName") + pty.WriteLine(coderdtest.TrialUserParams.CompanyName) pty.ExpectMatch("Welcome to Coder") <-doneChan }) diff --git a/cli/server.go b/cli/server.go index 5adb44c3c0a7d..d0282004a2aa1 100644 --- a/cli/server.go +++ b/cli/server.go @@ -10,7 +10,6 @@ import ( "crypto/tls" "crypto/x509" "database/sql" - "encoding/hex" "errors" "flag" "fmt" @@ -62,6 +61,7 @@ import ( "github.com/coder/serpent" "github.com/coder/wgtunnel/tunnelsdk" + "github.com/coder/coder/v2/coderd/cryptokeys" "github.com/coder/coder/v2/coderd/entitlements" "github.com/coder/coder/v2/coderd/notifications/reports" "github.com/coder/coder/v2/coderd/runtimeconfig" @@ -97,7 +97,6 @@ import ( "github.com/coder/coder/v2/coderd/updatecheck" "github.com/coder/coder/v2/coderd/util/slice" stringutil "github.com/coder/coder/v2/coderd/util/strings" - "github.com/coder/coder/v2/coderd/workspaceapps" "github.com/coder/coder/v2/coderd/workspaceapps/appurl" "github.com/coder/coder/v2/coderd/workspacestats" "github.com/coder/coder/v2/codersdk" @@ -718,7 +717,9 @@ func (r *RootCmd) Server(newAPI func(context.Context, *coderd.Options) (*coderd. } if options.DeploymentValues.Prometheus.Enable && options.DeploymentValues.Prometheus.CollectDBMetrics { - options.Database = dbmetrics.New(options.Database, options.PrometheusRegistry) + options.Database = dbmetrics.NewQueryMetrics(options.Database, options.Logger, options.PrometheusRegistry) + } else { + options.Database = dbmetrics.NewDBMetrics(options.Database, options.Logger, options.PrometheusRegistry) } var deploymentID string @@ -741,90 +742,31 @@ func (r *RootCmd) Server(newAPI func(context.Context, *coderd.Options) (*coderd. return xerrors.Errorf("set deployment id: %w", err) } } - - // Read the app signing key from the DB. We store it hex encoded - // since the config table uses strings for the value and we - // don't want to deal with automatic encoding issues. - appSecurityKeyStr, err := tx.GetAppSecurityKey(ctx) - if err != nil && !xerrors.Is(err, sql.ErrNoRows) { - return xerrors.Errorf("get app signing key: %w", err) - } - // If the string in the DB is an invalid hex string or the - // length is not equal to the current key length, generate a new - // one. - // - // If the key is regenerated, old signed tokens and encrypted - // strings will become invalid. New signed app tokens will be - // generated automatically on failure. Any workspace app token - // smuggling operations in progress may fail, although with a - // helpful error. - if decoded, err := hex.DecodeString(appSecurityKeyStr); err != nil || len(decoded) != len(workspaceapps.SecurityKey{}) { - b := make([]byte, len(workspaceapps.SecurityKey{})) - _, err := rand.Read(b) - if err != nil { - return xerrors.Errorf("generate fresh app signing key: %w", err) - } - - appSecurityKeyStr = hex.EncodeToString(b) - err = tx.UpsertAppSecurityKey(ctx, appSecurityKeyStr) - if err != nil { - return xerrors.Errorf("insert freshly generated app signing key to database: %w", err) - } - } - - appSecurityKey, err := workspaceapps.KeyFromString(appSecurityKeyStr) - if err != nil { - return xerrors.Errorf("decode app signing key from database: %w", err) - } - - options.AppSecurityKey = appSecurityKey - - // Read the oauth signing key from the database. Like the app security, generate a new one - // if it is invalid for any reason. - oauthSigningKeyStr, err := tx.GetOAuthSigningKey(ctx) - if err != nil && !xerrors.Is(err, sql.ErrNoRows) { - return xerrors.Errorf("get app oauth signing key: %w", err) - } - if decoded, err := hex.DecodeString(oauthSigningKeyStr); err != nil || len(decoded) != len(options.OAuthSigningKey) { - b := make([]byte, len(options.OAuthSigningKey)) - _, err := rand.Read(b) - if err != nil { - return xerrors.Errorf("generate fresh oauth signing key: %w", err) - } - - oauthSigningKeyStr = hex.EncodeToString(b) - err = tx.UpsertOAuthSigningKey(ctx, oauthSigningKeyStr) - if err != nil { - return xerrors.Errorf("insert freshly generated oauth signing key to database: %w", err) - } - } - - oauthKeyBytes, err := hex.DecodeString(oauthSigningKeyStr) - if err != nil { - return xerrors.Errorf("decode oauth signing key from database: %w", err) - } - if len(oauthKeyBytes) != len(options.OAuthSigningKey) { - return xerrors.Errorf("oauth signing key in database is not the correct length, expect %d got %d", len(options.OAuthSigningKey), len(oauthKeyBytes)) - } - copy(options.OAuthSigningKey[:], oauthKeyBytes) - if options.OAuthSigningKey == [32]byte{} { - return xerrors.Errorf("oauth signing key in database is empty") - } - - // Read the coordinator resume token signing key from the - // database. - resumeTokenKey, err := tailnet.ResumeTokenSigningKeyFromDatabase(ctx, tx) - if err != nil { - return xerrors.Errorf("get coordinator resume token key from database: %w", err) - } - options.CoordinatorResumeTokenProvider = tailnet.NewResumeTokenKeyProvider(resumeTokenKey, quartz.NewReal(), tailnet.DefaultResumeTokenExpiry) - return nil }, nil) if err != nil { - return err + return xerrors.Errorf("set deployment id: %w", err) + } + + fetcher := &cryptokeys.DBFetcher{ + DB: options.Database, + } + + resumeKeycache, err := cryptokeys.NewSigningCache(ctx, + logger, + fetcher, + codersdk.CryptoKeyFeatureTailnetResume, + ) + if err != nil { + logger.Critical(ctx, "failed to properly instantiate tailnet resume signing cache", slog.Error(err)) } + options.CoordinatorResumeTokenProvider = tailnet.NewResumeTokenKeyProvider( + resumeKeycache, + quartz.NewReal(), + tailnet.DefaultResumeTokenExpiry, + ) + options.RuntimeConfig = runtimeconfig.NewManager() // This should be output before the logs start streaming. @@ -1093,7 +1035,7 @@ func (r *RootCmd) Server(newAPI func(context.Context, *coderd.Options) (*coderd. autobuildTicker := time.NewTicker(vals.AutobuildPollInterval.Value()) defer autobuildTicker.Stop() autobuildExecutor := autobuild.NewExecutor( - ctx, options.Database, options.Pubsub, coderAPI.TemplateScheduleStore, &coderAPI.Auditor, coderAPI.AccessControlStore, logger, autobuildTicker.C, options.NotificationsEnqueuer) + ctx, options.Database, options.Pubsub, options.PrometheusRegistry, coderAPI.TemplateScheduleStore, &coderAPI.Auditor, coderAPI.AccessControlStore, logger, autobuildTicker.C, options.NotificationsEnqueuer) autobuildExecutor.Run() hangDetectorTicker := time.NewTicker(vals.JobHangDetectorInterval.Value()) diff --git a/cli/templatepush.go b/cli/templatepush.go index f5ff1dcb3cf85..22a77791c5f77 100644 --- a/cli/templatepush.go +++ b/cli/templatepush.go @@ -282,7 +282,7 @@ func (pf *templateUploadFlags) stdin(inv *serpent.Invocation) (out bool) { } }() // We let the directory override our isTTY check - return pf.directory == "-" || (!isTTYIn(inv) && pf.directory == "") + return pf.directory == "-" || (!isTTYIn(inv) && pf.directory == ".") } func (pf *templateUploadFlags) upload(inv *serpent.Invocation, client *codersdk.Client) (*codersdk.UploadResponse, error) { diff --git a/cli/testdata/coder_server_--help.golden b/cli/testdata/coder_server_--help.golden index 1387e31710e88..cd647d0537a93 100644 --- a/cli/testdata/coder_server_--help.golden +++ b/cli/testdata/coder_server_--help.golden @@ -106,6 +106,58 @@ Use a YAML configuration file when your server launch become unwieldy. Write out the current server config as YAML to stdout. +EMAIL OPTIONS: +Configure how emails are sent. + + --email-force-tls bool, $CODER_EMAIL_FORCE_TLS (default: false) + Force a TLS connection to the configured SMTP smarthost. + + --email-from string, $CODER_EMAIL_FROM + The sender's address to use. + + --email-hello string, $CODER_EMAIL_HELLO (default: localhost) + The hostname identifying the SMTP server. + + --email-smarthost host:port, $CODER_EMAIL_SMARTHOST (default: localhost:587) + The intermediary SMTP host through which emails are sent. + +EMAIL / EMAIL AUTHENTICATION OPTIONS: +Configure SMTP authentication options. + + --email-auth-identity string, $CODER_EMAIL_AUTH_IDENTITY + Identity to use with PLAIN authentication. + + --email-auth-password string, $CODER_EMAIL_AUTH_PASSWORD + Password to use with PLAIN/LOGIN authentication. + + --email-auth-password-file string, $CODER_EMAIL_AUTH_PASSWORD_FILE + File from which to load password for use with PLAIN/LOGIN + authentication. + + --email-auth-username string, $CODER_EMAIL_AUTH_USERNAME + Username to use with PLAIN/LOGIN authentication. + +EMAIL / EMAIL TLS OPTIONS: +Configure TLS for your SMTP server target. + + --email-tls-ca-cert-file string, $CODER_EMAIL_TLS_CACERTFILE + CA certificate file to use. + + --email-tls-cert-file string, $CODER_EMAIL_TLS_CERTFILE + Certificate file to use. + + --email-tls-cert-key-file string, $CODER_EMAIL_TLS_CERTKEYFILE + Certificate key file to use. + + --email-tls-server-name string, $CODER_EMAIL_TLS_SERVERNAME + Server name to verify against the target certificate. + + --email-tls-skip-verify bool, $CODER_EMAIL_TLS_SKIPVERIFY + Skip verification of the target server's certificate (insecure). + + --email-tls-starttls bool, $CODER_EMAIL_TLS_STARTTLS + Enable STARTTLS to upgrade insecure SMTP connections using TLS. + INTROSPECTION / HEALTH CHECK OPTIONS: --health-check-refresh duration, $CODER_HEALTH_CHECK_REFRESH (default: 10m0s) Refresh interval for healthchecks. @@ -145,7 +197,9 @@ INTROSPECTION / PROMETHEUS OPTIONS: Collect agent stats (may increase charges for metrics storage). --prometheus-collect-db-metrics bool, $CODER_PROMETHEUS_COLLECT_DB_METRICS (default: false) - Collect database metrics (may increase charges for metrics storage). + Collect database query metrics (may increase charges for metrics + storage). If set to false, a reduced set of database metrics are still + collected. --prometheus-enable bool, $CODER_PROMETHEUS_ENABLE Serve prometheus metrics on the address defined by prometheus address. @@ -347,54 +401,68 @@ Configure how notifications are processed and delivered. NOTIFICATIONS / EMAIL OPTIONS: Configure how email notifications are sent. - --notifications-email-force-tls bool, $CODER_NOTIFICATIONS_EMAIL_FORCE_TLS (default: false) + --notifications-email-force-tls bool, $CODER_NOTIFICATIONS_EMAIL_FORCE_TLS Force a TLS connection to the configured SMTP smarthost. + DEPRECATED: Use --email-force-tls instead. --notifications-email-from string, $CODER_NOTIFICATIONS_EMAIL_FROM The sender's address to use. + DEPRECATED: Use --email-from instead. - --notifications-email-hello string, $CODER_NOTIFICATIONS_EMAIL_HELLO (default: localhost) + --notifications-email-hello string, $CODER_NOTIFICATIONS_EMAIL_HELLO The hostname identifying the SMTP server. + DEPRECATED: Use --email-hello instead. - --notifications-email-smarthost host:port, $CODER_NOTIFICATIONS_EMAIL_SMARTHOST (default: localhost:587) + --notifications-email-smarthost host:port, $CODER_NOTIFICATIONS_EMAIL_SMARTHOST The intermediary SMTP host through which emails are sent. + DEPRECATED: Use --email-smarthost instead. NOTIFICATIONS / EMAIL / EMAIL AUTHENTICATION OPTIONS: Configure SMTP authentication options. --notifications-email-auth-identity string, $CODER_NOTIFICATIONS_EMAIL_AUTH_IDENTITY Identity to use with PLAIN authentication. + DEPRECATED: Use --email-auth-identity instead. --notifications-email-auth-password string, $CODER_NOTIFICATIONS_EMAIL_AUTH_PASSWORD Password to use with PLAIN/LOGIN authentication. + DEPRECATED: Use --email-auth-password instead. --notifications-email-auth-password-file string, $CODER_NOTIFICATIONS_EMAIL_AUTH_PASSWORD_FILE File from which to load password for use with PLAIN/LOGIN authentication. + DEPRECATED: Use --email-auth-password-file instead. --notifications-email-auth-username string, $CODER_NOTIFICATIONS_EMAIL_AUTH_USERNAME Username to use with PLAIN/LOGIN authentication. + DEPRECATED: Use --email-auth-username instead. NOTIFICATIONS / EMAIL / EMAIL TLS OPTIONS: Configure TLS for your SMTP server target. --notifications-email-tls-ca-cert-file string, $CODER_NOTIFICATIONS_EMAIL_TLS_CACERTFILE CA certificate file to use. + DEPRECATED: Use --email-tls-ca-cert-file instead. --notifications-email-tls-cert-file string, $CODER_NOTIFICATIONS_EMAIL_TLS_CERTFILE Certificate file to use. + DEPRECATED: Use --email-tls-cert-file instead. --notifications-email-tls-cert-key-file string, $CODER_NOTIFICATIONS_EMAIL_TLS_CERTKEYFILE Certificate key file to use. + DEPRECATED: Use --email-tls-cert-key-file instead. --notifications-email-tls-server-name string, $CODER_NOTIFICATIONS_EMAIL_TLS_SERVERNAME Server name to verify against the target certificate. + DEPRECATED: Use --email-tls-server-name instead. --notifications-email-tls-skip-verify bool, $CODER_NOTIFICATIONS_EMAIL_TLS_SKIPVERIFY Skip verification of the target server's certificate (insecure). + DEPRECATED: Use --email-tls-skip-verify instead. --notifications-email-tls-starttls bool, $CODER_NOTIFICATIONS_EMAIL_TLS_STARTTLS Enable STARTTLS to upgrade insecure SMTP connections using TLS. + DEPRECATED: Use --email-tls-starttls instead. NOTIFICATIONS / WEBHOOK OPTIONS: --notifications-webhook-endpoint url, $CODER_NOTIFICATIONS_WEBHOOK_ENDPOINT diff --git a/cli/testdata/server-config.yaml.golden b/cli/testdata/server-config.yaml.golden index 78c893c58ae16..38b2b68c24de1 100644 --- a/cli/testdata/server-config.yaml.golden +++ b/cli/testdata/server-config.yaml.golden @@ -197,7 +197,8 @@ introspection: - template_name - username - workspace_name - # Collect database metrics (may increase charges for metrics storage). + # Collect database query metrics (may increase charges for metrics storage). If + # set to false, a reduced set of database metrics are still collected. # (default: false, type: bool) collect_db_metrics: false pprof: @@ -517,6 +518,51 @@ userQuietHoursSchedule: # compatibility reasons, this will be removed in a future release. # (default: false, type: bool) allowWorkspaceRenames: false +# Configure how emails are sent. +email: + # The sender's address to use. + # (default: , type: string) + from: "" + # The intermediary SMTP host through which emails are sent. + # (default: localhost:587, type: host:port) + smarthost: localhost:587 + # The hostname identifying the SMTP server. + # (default: localhost, type: string) + hello: localhost + # Force a TLS connection to the configured SMTP smarthost. + # (default: false, type: bool) + forceTLS: false + # Configure SMTP authentication options. + emailAuth: + # Identity to use with PLAIN authentication. + # (default: , type: string) + identity: "" + # Username to use with PLAIN/LOGIN authentication. + # (default: , type: string) + username: "" + # File from which to load password for use with PLAIN/LOGIN authentication. + # (default: , type: string) + passwordFile: "" + # Configure TLS for your SMTP server target. + emailTLS: + # Enable STARTTLS to upgrade insecure SMTP connections using TLS. + # (default: , type: bool) + startTLS: false + # Server name to verify against the target certificate. + # (default: , type: string) + serverName: "" + # Skip verification of the target server's certificate (insecure). + # (default: , type: bool) + insecureSkipVerify: false + # CA certificate file to use. + # (default: , type: string) + caCertFile: "" + # Certificate file to use. + # (default: , type: string) + certFile: "" + # Certificate key file to use. + # (default: , type: string) + certKeyFile: "" # Configure how notifications are processed and delivered. notifications: # Which delivery method to use (available options: 'smtp', 'webhook'). @@ -531,13 +577,13 @@ notifications: # (default: , type: string) from: "" # The intermediary SMTP host through which emails are sent. - # (default: localhost:587, type: host:port) + # (default: , type: host:port) smarthost: localhost:587 # The hostname identifying the SMTP server. - # (default: localhost, type: string) + # (default: , type: string) hello: localhost # Force a TLS connection to the configured SMTP smarthost. - # (default: false, type: bool) + # (default: , type: bool) forceTLS: false # Configure SMTP authentication options. emailAuth: diff --git a/coderd/agentapi/stats_test.go b/coderd/agentapi/stats_test.go index d2c8e4f163df5..83edb8cccc4e1 100644 --- a/coderd/agentapi/stats_test.go +++ b/coderd/agentapi/stats_test.go @@ -70,6 +70,11 @@ func TestUpdateStates(t *testing.T) { } batcher = &workspacestatstest.StatsBatcher{} updateAgentMetricsFnCalled = false + tickCh = make(chan time.Time) + flushCh = make(chan int, 1) + wut = workspacestats.NewTracker(dbM, + workspacestats.TrackerWithTickFlush(tickCh, flushCh), + ) req = &agentproto.UpdateStatsRequest{ Stats: &agentproto.Stats{ @@ -109,6 +114,7 @@ func TestUpdateStates(t *testing.T) { Database: dbM, Pubsub: ps, StatsBatcher: batcher, + UsageTracker: wut, TemplateScheduleStore: templateScheduleStorePtr(templateScheduleStore), UpdateAgentMetricsFn: func(ctx context.Context, labels prometheusmetrics.AgentMetricLabels, metrics []*agentproto.Stats_Metric) { updateAgentMetricsFnCalled = true @@ -126,10 +132,14 @@ func TestUpdateStates(t *testing.T) { return now }, } + defer wut.Close() // Workspace gets fetched. dbM.EXPECT().GetWorkspaceByAgentID(gomock.Any(), agent.ID).Return(workspace, nil) + // User gets fetched to hit the UpdateAgentMetricsFn. + dbM.EXPECT().GetUserByID(gomock.Any(), user.ID).Return(user, nil) + // We expect an activity bump because ConnectionCount > 0. dbM.EXPECT().ActivityBumpWorkspace(gomock.Any(), database.ActivityBumpWorkspaceParams{ WorkspaceID: workspace.ID, @@ -137,14 +147,11 @@ func TestUpdateStates(t *testing.T) { }).Return(nil) // Workspace last used at gets bumped. - dbM.EXPECT().UpdateWorkspaceLastUsedAt(gomock.Any(), database.UpdateWorkspaceLastUsedAtParams{ - ID: workspace.ID, + dbM.EXPECT().BatchUpdateWorkspaceLastUsedAt(gomock.Any(), database.BatchUpdateWorkspaceLastUsedAtParams{ + IDs: []uuid.UUID{workspace.ID}, LastUsedAt: now, }).Return(nil) - // User gets fetched to hit the UpdateAgentMetricsFn. - dbM.EXPECT().GetUserByID(gomock.Any(), user.ID).Return(user, nil) - // Ensure that pubsub notifications are sent. notifyDescription := make(chan []byte) ps.Subscribe(codersdk.WorkspaceNotifyChannel(workspace.ID), func(_ context.Context, description []byte) { @@ -159,6 +166,10 @@ func TestUpdateStates(t *testing.T) { ReportInterval: durationpb.New(10 * time.Second), }, resp) + tickCh <- now + count := <-flushCh + require.Equal(t, 1, count, "expected one flush with one id") + batcher.Mu.Lock() defer batcher.Mu.Unlock() require.Equal(t, int64(1), batcher.Called) @@ -211,6 +222,7 @@ func TestUpdateStates(t *testing.T) { StatsReporter: workspacestats.NewReporter(workspacestats.ReporterOptions{ Database: dbM, Pubsub: ps, + UsageTracker: workspacestats.NewTracker(dbM), StatsBatcher: batcher, TemplateScheduleStore: templateScheduleStorePtr(templateScheduleStore), // Ignored when nil. @@ -225,12 +237,6 @@ func TestUpdateStates(t *testing.T) { // Workspace gets fetched. dbM.EXPECT().GetWorkspaceByAgentID(gomock.Any(), agent.ID).Return(workspace, nil) - // Workspace last used at gets bumped. - dbM.EXPECT().UpdateWorkspaceLastUsedAt(gomock.Any(), database.UpdateWorkspaceLastUsedAtParams{ - ID: workspace.ID, - LastUsedAt: now, - }).Return(nil) - _, err := api.UpdateStats(context.Background(), req) require.NoError(t, err) }) @@ -306,6 +312,11 @@ func TestUpdateStates(t *testing.T) { } batcher = &workspacestatstest.StatsBatcher{} updateAgentMetricsFnCalled = false + tickCh = make(chan time.Time) + flushCh = make(chan int, 1) + wut = workspacestats.NewTracker(dbM, + workspacestats.TrackerWithTickFlush(tickCh, flushCh), + ) req = &agentproto.UpdateStatsRequest{ Stats: &agentproto.Stats{ @@ -325,6 +336,7 @@ func TestUpdateStates(t *testing.T) { StatsReporter: workspacestats.NewReporter(workspacestats.ReporterOptions{ Database: dbM, Pubsub: ps, + UsageTracker: wut, StatsBatcher: batcher, TemplateScheduleStore: templateScheduleStorePtr(templateScheduleStore), UpdateAgentMetricsFn: func(ctx context.Context, labels prometheusmetrics.AgentMetricLabels, metrics []*agentproto.Stats_Metric) { @@ -343,6 +355,7 @@ func TestUpdateStates(t *testing.T) { return now }, } + defer wut.Close() // Workspace gets fetched. dbM.EXPECT().GetWorkspaceByAgentID(gomock.Any(), agent.ID).Return(workspace, nil) @@ -355,9 +368,9 @@ func TestUpdateStates(t *testing.T) { }).Return(nil) // Workspace last used at gets bumped. - dbM.EXPECT().UpdateWorkspaceLastUsedAt(gomock.Any(), database.UpdateWorkspaceLastUsedAtParams{ - ID: workspace.ID, - LastUsedAt: now, + dbM.EXPECT().BatchUpdateWorkspaceLastUsedAt(gomock.Any(), database.BatchUpdateWorkspaceLastUsedAtParams{ + IDs: []uuid.UUID{workspace.ID}, + LastUsedAt: now.UTC(), }).Return(nil) // User gets fetched to hit the UpdateAgentMetricsFn. @@ -369,6 +382,10 @@ func TestUpdateStates(t *testing.T) { ReportInterval: durationpb.New(15 * time.Second), }, resp) + tickCh <- now + count := <-flushCh + require.Equal(t, 1, count, "expected one flush with one id") + require.True(t, updateAgentMetricsFnCalled) }) @@ -392,6 +409,11 @@ func TestUpdateStates(t *testing.T) { } batcher = &workspacestatstest.StatsBatcher{} updateAgentMetricsFnCalled = false + tickCh = make(chan time.Time) + flushCh = make(chan int, 1) + wut = workspacestats.NewTracker(dbM, + workspacestats.TrackerWithTickFlush(tickCh, flushCh), + ) req = &agentproto.UpdateStatsRequest{ Stats: &agentproto.Stats{ @@ -422,6 +444,7 @@ func TestUpdateStates(t *testing.T) { }, } ) + defer wut.Close() api := agentapi.StatsAPI{ AgentFn: func(context.Context) (database.WorkspaceAgent, error) { return agent, nil @@ -431,6 +454,7 @@ func TestUpdateStates(t *testing.T) { Database: dbM, Pubsub: ps, StatsBatcher: batcher, + UsageTracker: wut, TemplateScheduleStore: templateScheduleStorePtr(templateScheduleStore), UpdateAgentMetricsFn: func(ctx context.Context, labels prometheusmetrics.AgentMetricLabels, metrics []*agentproto.Stats_Metric) { updateAgentMetricsFnCalled = true @@ -462,8 +486,8 @@ func TestUpdateStates(t *testing.T) { }).Return(nil) // Workspace last used at gets bumped. - dbM.EXPECT().UpdateWorkspaceLastUsedAt(gomock.Any(), database.UpdateWorkspaceLastUsedAtParams{ - ID: workspace.ID, + dbM.EXPECT().BatchUpdateWorkspaceLastUsedAt(gomock.Any(), database.BatchUpdateWorkspaceLastUsedAtParams{ + IDs: []uuid.UUID{workspace.ID}, LastUsedAt: now, }).Return(nil) @@ -484,6 +508,10 @@ func TestUpdateStates(t *testing.T) { ReportInterval: durationpb.New(10 * time.Second), }, resp) + tickCh <- now + count := <-flushCh + require.Equal(t, 1, count, "expected one flush with one id") + batcher.Mu.Lock() defer batcher.Mu.Unlock() require.EqualValues(t, 1, batcher.Called) diff --git a/coderd/apidoc/docs.go b/coderd/apidoc/docs.go index 76084b1ff54dd..83d1fdc2c492a 100644 --- a/coderd/apidoc/docs.go +++ b/coderd/apidoc/docs.go @@ -3623,11 +3623,28 @@ const docTemplate = `{ } } }, + "/scim/v2/ServiceProviderConfig": { + "get": { + "produces": [ + "application/scim+json" + ], + "tags": [ + "Enterprise" + ], + "summary": "SCIM 2.0: Service Provider Config", + "operationId": "scim-get-service-provider-config", + "responses": { + "200": { + "description": "OK" + } + } + } + }, "/scim/v2/Users": { "get": { "security": [ { - "CoderSessionToken": [] + "Authorization": [] } ], "produces": [ @@ -3647,7 +3664,7 @@ const docTemplate = `{ "post": { "security": [ { - "CoderSessionToken": [] + "Authorization": [] } ], "produces": [ @@ -3683,7 +3700,7 @@ const docTemplate = `{ "get": { "security": [ { - "CoderSessionToken": [] + "Authorization": [] } ], "produces": [ @@ -3713,7 +3730,7 @@ const docTemplate = `{ "patch": { "security": [ { - "CoderSessionToken": [] + "Authorization": [] } ], "produces": [ @@ -7646,6 +7663,15 @@ const docTemplate = `{ ], "summary": "Get workspace proxy crypto keys", "operationId": "get-workspace-proxy-crypto-keys", + "parameters": [ + { + "type": "string", + "description": "Feature key", + "name": "feature", + "in": "query", + "required": true + } + ], "responses": { "200": { "description": "OK", @@ -10011,12 +10037,14 @@ const docTemplate = `{ "codersdk.CryptoKeyFeature": { "type": "string", "enum": [ - "workspace_apps", + "workspace_apps_api_key", + "workspace_apps_token", "oidc_convert", "tailnet_resume" ], "x-enum-varnames": [ - "CryptoKeyFeatureWorkspaceApp", + "CryptoKeyFeatureWorkspaceAppsAPIKey", + "CryptoKeyFeatureWorkspaceAppsToken", "CryptoKeyFeatureOIDCConvert", "CryptoKeyFeatureTailnetResume" ] @@ -16244,9 +16272,6 @@ const docTemplate = `{ "wsproxysdk.RegisterWorkspaceProxyResponse": { "type": "object", "properties": { - "app_security_key": { - "type": "string" - }, "derp_force_websockets": { "type": "boolean" }, @@ -16281,6 +16306,11 @@ const docTemplate = `{ } }, "securityDefinitions": { + "Authorization": { + "type": "apiKey", + "name": "Authorizaiton", + "in": "header" + }, "CoderSessionToken": { "type": "apiKey", "name": "Coder-Session-Token", diff --git a/coderd/apidoc/swagger.json b/coderd/apidoc/swagger.json index beff69ca22373..9861e195b7a69 100644 --- a/coderd/apidoc/swagger.json +++ b/coderd/apidoc/swagger.json @@ -3189,11 +3189,24 @@ } } }, + "/scim/v2/ServiceProviderConfig": { + "get": { + "produces": ["application/scim+json"], + "tags": ["Enterprise"], + "summary": "SCIM 2.0: Service Provider Config", + "operationId": "scim-get-service-provider-config", + "responses": { + "200": { + "description": "OK" + } + } + } + }, "/scim/v2/Users": { "get": { "security": [ { - "CoderSessionToken": [] + "Authorization": [] } ], "produces": ["application/scim+json"], @@ -3209,7 +3222,7 @@ "post": { "security": [ { - "CoderSessionToken": [] + "Authorization": [] } ], "produces": ["application/json"], @@ -3241,7 +3254,7 @@ "get": { "security": [ { - "CoderSessionToken": [] + "Authorization": [] } ], "produces": ["application/scim+json"], @@ -3267,7 +3280,7 @@ "patch": { "security": [ { - "CoderSessionToken": [] + "Authorization": [] } ], "produces": ["application/scim+json"], @@ -6758,6 +6771,15 @@ "tags": ["Enterprise"], "summary": "Get workspace proxy crypto keys", "operationId": "get-workspace-proxy-crypto-keys", + "parameters": [ + { + "type": "string", + "description": "Feature key", + "name": "feature", + "in": "query", + "required": true + } + ], "responses": { "200": { "description": "OK", @@ -8914,9 +8936,15 @@ }, "codersdk.CryptoKeyFeature": { "type": "string", - "enum": ["workspace_apps", "oidc_convert", "tailnet_resume"], + "enum": [ + "workspace_apps_api_key", + "workspace_apps_token", + "oidc_convert", + "tailnet_resume" + ], "x-enum-varnames": [ - "CryptoKeyFeatureWorkspaceApp", + "CryptoKeyFeatureWorkspaceAppsAPIKey", + "CryptoKeyFeatureWorkspaceAppsToken", "CryptoKeyFeatureOIDCConvert", "CryptoKeyFeatureTailnetResume" ] @@ -14853,9 +14881,6 @@ "wsproxysdk.RegisterWorkspaceProxyResponse": { "type": "object", "properties": { - "app_security_key": { - "type": "string" - }, "derp_force_websockets": { "type": "boolean" }, @@ -14890,6 +14915,11 @@ } }, "securityDefinitions": { + "Authorization": { + "type": "apiKey", + "name": "Authorizaiton", + "in": "header" + }, "CoderSessionToken": { "type": "apiKey", "name": "Coder-Session-Token", diff --git a/coderd/autobuild/lifecycle_executor.go b/coderd/autobuild/lifecycle_executor.go index 400f0406aee0e..ac2930c9e32c8 100644 --- a/coderd/autobuild/lifecycle_executor.go +++ b/coderd/autobuild/lifecycle_executor.go @@ -10,6 +10,8 @@ import ( "github.com/dustin/go-humanize" "github.com/google/uuid" + "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/client_golang/prometheus/promauto" "golang.org/x/sync/errgroup" "golang.org/x/xerrors" @@ -39,6 +41,13 @@ type Executor struct { statsCh chan<- Stats // NotificationsEnqueuer handles enqueueing notifications for delivery by SMTP, webhook, etc. notificationsEnqueuer notifications.Enqueuer + reg prometheus.Registerer + + metrics executorMetrics +} + +type executorMetrics struct { + autobuildExecutionDuration prometheus.Histogram } // Stats contains information about one run of Executor. @@ -49,7 +58,8 @@ type Stats struct { } // New returns a new wsactions executor. -func NewExecutor(ctx context.Context, db database.Store, ps pubsub.Pubsub, tss *atomic.Pointer[schedule.TemplateScheduleStore], auditor *atomic.Pointer[audit.Auditor], acs *atomic.Pointer[dbauthz.AccessControlStore], log slog.Logger, tick <-chan time.Time, enqueuer notifications.Enqueuer) *Executor { +func NewExecutor(ctx context.Context, db database.Store, ps pubsub.Pubsub, reg prometheus.Registerer, tss *atomic.Pointer[schedule.TemplateScheduleStore], auditor *atomic.Pointer[audit.Auditor], acs *atomic.Pointer[dbauthz.AccessControlStore], log slog.Logger, tick <-chan time.Time, enqueuer notifications.Enqueuer) *Executor { + factory := promauto.With(reg) le := &Executor{ //nolint:gocritic // Autostart has a limited set of permissions. ctx: dbauthz.AsAutostart(ctx), @@ -61,6 +71,16 @@ func NewExecutor(ctx context.Context, db database.Store, ps pubsub.Pubsub, tss * auditor: auditor, accessControlStore: acs, notificationsEnqueuer: enqueuer, + reg: reg, + metrics: executorMetrics{ + autobuildExecutionDuration: factory.NewHistogram(prometheus.HistogramOpts{ + Namespace: "coderd", + Subsystem: "lifecycle", + Name: "autobuild_execution_duration_seconds", + Help: "Duration of each autobuild execution.", + Buckets: prometheus.DefBuckets, + }), + }, } return le } @@ -86,6 +106,7 @@ func (e *Executor) Run() { return } stats := e.runOnce(t) + e.metrics.autobuildExecutionDuration.Observe(stats.Elapsed.Seconds()) if e.statsCh != nil { select { case <-e.ctx.Done(): @@ -285,7 +306,10 @@ func (e *Executor) runOnce(t time.Time) Stats { // Run with RepeatableRead isolation so that the build process sees the same data // as our calculation that determines whether an autobuild is necessary. - }, &sql.TxOptions{Isolation: sql.LevelRepeatableRead}) + }, &database.TxOptions{ + Isolation: sql.LevelRepeatableRead, + TxIdentifier: "lifecycle", + }) if auditLog != nil { // If the transition didn't succeed then updating the workspace // to indicate dormant didn't either. diff --git a/coderd/coderd.go b/coderd/coderd.go index cb0884808ef27..bd844d7ca13c3 100644 --- a/coderd/coderd.go +++ b/coderd/coderd.go @@ -40,6 +40,7 @@ import ( "github.com/coder/quartz" "github.com/coder/serpent" + "github.com/coder/coder/v2/coderd/cryptokeys" "github.com/coder/coder/v2/coderd/entitlements" "github.com/coder/coder/v2/coderd/idpsync" "github.com/coder/coder/v2/coderd/runtimeconfig" @@ -185,9 +186,6 @@ type Options struct { TemplateScheduleStore *atomic.Pointer[schedule.TemplateScheduleStore] UserQuietHoursScheduleStore *atomic.Pointer[schedule.UserQuietHoursScheduleStore] AccessControlStore *atomic.Pointer[dbauthz.AccessControlStore] - // AppSecurityKey is the crypto key used to sign and encrypt tokens related to - // workspace applications. It consists of both a signing and encryption key. - AppSecurityKey workspaceapps.SecurityKey // CoordinatorResumeTokenProvider is used to provide and validate resume // tokens issued by and passed to the coordinator DRPC API. CoordinatorResumeTokenProvider tailnet.ResumeTokenProvider @@ -251,6 +249,12 @@ type Options struct { // OneTimePasscodeValidityPeriod specifies how long a one time passcode should be valid for. OneTimePasscodeValidityPeriod time.Duration + + // Keycaches + AppSigningKeyCache cryptokeys.SigningKeycache + AppEncryptionKeyCache cryptokeys.EncryptionKeycache + OIDCConvertKeyCache cryptokeys.SigningKeycache + Clock quartz.Clock } // @title Coder API @@ -267,6 +271,10 @@ type Options struct { // @BasePath /api/v2 +// @securitydefinitions.apiKey Authorization +// @in header +// @name Authorizaiton + // @securitydefinitions.apiKey CoderSessionToken // @in header // @name Coder-Session-Token @@ -352,6 +360,9 @@ func New(options *Options) *API { if options.PrometheusRegistry == nil { options.PrometheusRegistry = prometheus.NewRegistry() } + if options.Clock == nil { + options.Clock = quartz.NewReal() + } if options.DERPServer == nil && options.DeploymentValues.DERP.Server.Enable { options.DERPServer = derp.NewServer(key.NewNode(), tailnet.Logger(options.Logger.Named("derp"))) } @@ -444,6 +455,49 @@ func New(options *Options) *API { if err != nil { panic(xerrors.Errorf("get deployment ID: %w", err)) } + + fetcher := &cryptokeys.DBFetcher{ + DB: options.Database, + } + + if options.OIDCConvertKeyCache == nil { + options.OIDCConvertKeyCache, err = cryptokeys.NewSigningCache(ctx, + options.Logger.Named("oidc_convert_keycache"), + fetcher, + codersdk.CryptoKeyFeatureOIDCConvert, + ) + if err != nil { + options.Logger.Critical(ctx, "failed to properly instantiate oidc convert signing cache", slog.Error(err)) + } + } + + if options.AppSigningKeyCache == nil { + options.AppSigningKeyCache, err = cryptokeys.NewSigningCache(ctx, + options.Logger.Named("app_signing_keycache"), + fetcher, + codersdk.CryptoKeyFeatureWorkspaceAppsToken, + ) + if err != nil { + options.Logger.Critical(ctx, "failed to properly instantiate app signing key cache", slog.Error(err)) + } + } + + if options.AppEncryptionKeyCache == nil { + options.AppEncryptionKeyCache, err = cryptokeys.NewEncryptionCache(ctx, + options.Logger, + fetcher, + codersdk.CryptoKeyFeatureWorkspaceAppsAPIKey, + ) + if err != nil { + options.Logger.Critical(ctx, "failed to properly instantiate app encryption key cache", slog.Error(err)) + } + } + + // Start a background process that rotates keys. We intentionally start this after the caches + // are created to force initial requests for a key to populate the caches. This helps catch + // bugs that may only occur when a key isn't precached in tests and the latency cost is minimal. + cryptokeys.StartRotator(ctx, options.Logger, options.Database) + api := &API{ ctx: ctx, cancel: cancel, @@ -464,7 +518,7 @@ func New(options *Options) *API { options.DeploymentValues, oauthConfigs, options.AgentInactiveDisconnectTimeout, - options.AppSecurityKey, + options.AppSigningKeyCache, ), metricsCache: metricsCache, Auditor: atomic.Pointer[audit.Auditor]{}, @@ -606,7 +660,7 @@ func New(options *Options) *API { ResumeTokenProvider: api.Options.CoordinatorResumeTokenProvider, }) if err != nil { - api.Logger.Fatal(api.ctx, "failed to initialize tailnet client service", slog.Error(err)) + api.Logger.Fatal(context.Background(), "failed to initialize tailnet client service", slog.Error(err)) } api.statsReporter = workspacestats.NewReporter(workspacestats.ReporterOptions{ @@ -628,9 +682,6 @@ func New(options *Options) *API { options.WorkspaceAppsStatsCollectorOptions.Reporter = api.statsReporter } - if options.AppSecurityKey.IsZero() { - api.Logger.Fatal(api.ctx, "app security key cannot be zero") - } api.workspaceAppServer = &workspaceapps.Server{ Logger: workspaceAppsLogger, @@ -642,11 +693,11 @@ func New(options *Options) *API { SignedTokenProvider: api.WorkspaceAppsProvider, AgentProvider: api.agentProvider, - AppSecurityKey: options.AppSecurityKey, StatsCollector: workspaceapps.NewStatsCollector(options.WorkspaceAppsStatsCollectorOptions), - DisablePathApps: options.DeploymentValues.DisablePathApps.Value(), - SecureAuthCookie: options.DeploymentValues.SecureAuthCookie.Value(), + DisablePathApps: options.DeploymentValues.DisablePathApps.Value(), + SecureAuthCookie: options.DeploymentValues.SecureAuthCookie.Value(), + APIKeyEncryptionKeycache: options.AppEncryptionKeyCache, } apiKeyMiddleware := httpmw.ExtractAPIKeyMW(httpmw.ExtractAPIKeyConfig{ @@ -1434,6 +1485,9 @@ func (api *API) Close() error { _ = api.agentProvider.Close() _ = api.statsReporter.Close() _ = api.NetworkTelemetryBatcher.Close() + _ = api.OIDCConvertKeyCache.Close() + _ = api.AppSigningKeyCache.Close() + _ = api.AppEncryptionKeyCache.Close() return nil } diff --git a/coderd/coderdtest/coderdtest.go b/coderd/coderdtest/coderdtest.go index 05c31f35bd20a..e287e04b8d0cf 100644 --- a/coderd/coderdtest/coderdtest.go +++ b/coderd/coderdtest/coderdtest.go @@ -55,6 +55,7 @@ import ( "github.com/coder/coder/v2/coderd/audit" "github.com/coder/coder/v2/coderd/autobuild" "github.com/coder/coder/v2/coderd/awsidentity" + "github.com/coder/coder/v2/coderd/cryptokeys" "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/db2sdk" "github.com/coder/coder/v2/coderd/database/dbauthz" @@ -88,12 +89,9 @@ import ( sdkproto "github.com/coder/coder/v2/provisionersdk/proto" "github.com/coder/coder/v2/tailnet" "github.com/coder/coder/v2/testutil" + "github.com/coder/quartz" ) -// AppSecurityKey is a 96-byte key used to sign JWTs and encrypt JWEs for -// workspace app tokens in tests. -var AppSecurityKey = must(workspaceapps.KeyFromString("6465616e207761732068657265206465616e207761732068657265206465616e207761732068657265206465616e207761732068657265206465616e207761732068657265206465616e207761732068657265206465616e2077617320686572")) - type Options struct { // AccessURL denotes a custom access URL. By default we use the httptest // server's URL. Setting this may result in unexpected behavior (especially @@ -161,8 +159,10 @@ type Options struct { DatabaseRolluper *dbrollup.Rolluper WorkspaceUsageTrackerFlush chan int WorkspaceUsageTrackerTick chan time.Time - - NotificationsEnqueuer notifications.Enqueuer + NotificationsEnqueuer notifications.Enqueuer + APIKeyEncryptionCache cryptokeys.EncryptionKeycache + OIDCConvertKeyCache cryptokeys.SigningKeycache + Clock quartz.Clock } // New constructs a codersdk client connected to an in-memory API instance. @@ -335,6 +335,7 @@ func NewOptions(t testing.TB, options *Options) (func(http.Handler), context.Can ctx, options.Database, options.Pubsub, + prometheus.NewRegistry(), &templateScheduleStore, &auditor, accessControlStore, @@ -525,7 +526,6 @@ func NewOptions(t testing.TB, options *Options) (func(http.Handler), context.Can DeploymentOptions: codersdk.DeploymentOptionsWithoutSecrets(options.DeploymentValues.Options()), UpdateCheckOptions: options.UpdateCheckOptions, SwaggerEndpoint: options.SwaggerEndpoint, - AppSecurityKey: AppSecurityKey, SSHConfig: options.ConfigSSH, HealthcheckFunc: options.HealthcheckFunc, HealthcheckTimeout: options.HealthcheckTimeout, @@ -538,6 +538,9 @@ func NewOptions(t testing.TB, options *Options) (func(http.Handler), context.Can WorkspaceUsageTracker: wuTracker, NotificationsEnqueuer: options.NotificationsEnqueuer, OneTimePasscodeValidityPeriod: options.OneTimePasscodeValidityPeriod, + Clock: options.Clock, + AppEncryptionKeyCache: options.APIKeyEncryptionCache, + OIDCConvertKeyCache: options.OIDCConvertKeyCache, } } @@ -651,6 +654,16 @@ var FirstUserParams = codersdk.CreateFirstUserRequest{ Name: "Test User", } +var TrialUserParams = codersdk.CreateFirstUserTrialInfo{ + FirstName: "John", + LastName: "Doe", + PhoneNumber: "9999999999", + JobTitle: "Engineer", + CompanyName: "Acme Inc", + Country: "United States", + Developers: "10-50", +} + // CreateFirstUser creates a user with preset credentials and authenticates // with the passed in codersdk client. func CreateFirstUser(t testing.TB, client *codersdk.Client) codersdk.CreateFirstUserResponse { diff --git a/coderd/coderdtest/promhelp/doc.go b/coderd/coderdtest/promhelp/doc.go new file mode 100644 index 0000000000000..48b7e4b5aa550 --- /dev/null +++ b/coderd/coderdtest/promhelp/doc.go @@ -0,0 +1,3 @@ +// Package promhelp provides helper functions for asserting Prometheus +// metric values in unit tests. +package promhelp diff --git a/coderd/coderdtest/promhelp/metrics.go b/coderd/coderdtest/promhelp/metrics.go new file mode 100644 index 0000000000000..39c8af6ef9561 --- /dev/null +++ b/coderd/coderdtest/promhelp/metrics.go @@ -0,0 +1,87 @@ +package promhelp + +import ( + "context" + "io" + "maps" + "net/http" + "net/http/httptest" + "strings" + "testing" + + "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/client_golang/prometheus/promhttp" + ptestutil "github.com/prometheus/client_golang/prometheus/testutil" + io_prometheus_client "github.com/prometheus/client_model/go" + "github.com/stretchr/testify/require" +) + +// RegistryDump returns the http page for a given registry's metrics. +// Very useful for visual debugging. +func RegistryDump(reg *prometheus.Registry) string { + h := promhttp.HandlerFor(reg, promhttp.HandlerOpts{}) + rec := httptest.NewRecorder() + req, _ := http.NewRequestWithContext(context.Background(), http.MethodGet, "/", nil) + h.ServeHTTP(rec, req) + resp := rec.Result() + data, _ := io.ReadAll(resp.Body) + _ = resp.Body.Close() + return string(data) +} + +// Compare can be used to compare a registry to some prometheus formatted +// text. If any values differ, an error is returned. +// If metric names are passed in, only those metrics will be compared. +// Usage: `Compare(reg, RegistryDump(reg))` +func Compare(reg prometheus.Gatherer, compare string, metricNames ...string) error { + return ptestutil.GatherAndCompare(reg, strings.NewReader(compare), metricNames...) +} + +// HistogramValue returns the value of a histogram metric with the given name and labels. +func HistogramValue(t testing.TB, reg prometheus.Gatherer, metricName string, labels prometheus.Labels) *io_prometheus_client.Histogram { + t.Helper() + + labeled := MetricValue(t, reg, metricName, labels) + require.NotNilf(t, labeled, "metric %q with labels %v not found", metricName, labels) + return labeled.GetHistogram() +} + +// GaugeValue returns the value of a gauge metric with the given name and labels. +func GaugeValue(t testing.TB, reg prometheus.Gatherer, metricName string, labels prometheus.Labels) int { + t.Helper() + + labeled := MetricValue(t, reg, metricName, labels) + require.NotNilf(t, labeled, "metric %q with labels %v not found", metricName, labels) + return int(labeled.GetGauge().GetValue()) +} + +// CounterValue returns the value of a counter metric with the given name and labels. +func CounterValue(t testing.TB, reg prometheus.Gatherer, metricName string, labels prometheus.Labels) int { + t.Helper() + + labeled := MetricValue(t, reg, metricName, labels) + require.NotNilf(t, labeled, "metric %q with labels %v not found", metricName, labels) + return int(labeled.GetCounter().GetValue()) +} + +func MetricValue(t testing.TB, reg prometheus.Gatherer, metricName string, labels prometheus.Labels) *io_prometheus_client.Metric { + t.Helper() + + metrics, err := reg.Gather() + require.NoError(t, err) + + for _, m := range metrics { + if m.GetName() == metricName { + for _, labeled := range m.GetMetric() { + mLabels := make(prometheus.Labels) + for _, v := range labeled.GetLabel() { + mLabels[v.GetName()] = v.GetValue() + } + if maps.Equal(mLabels, labels) { + return labeled + } + } + } + } + return nil +} diff --git a/coderd/cryptokeys/cache.go b/coderd/cryptokeys/cache.go index 74fb025d416fd..43d673548ce06 100644 --- a/coderd/cryptokeys/cache.go +++ b/coderd/cryptokeys/cache.go @@ -3,6 +3,7 @@ package cryptokeys import ( "context" "encoding/hex" + "fmt" "io" "strconv" "sync" @@ -12,7 +13,7 @@ import ( "cdr.dev/slog" "github.com/coder/coder/v2/coderd/database" - "github.com/coder/coder/v2/coderd/database/db2sdk" + "github.com/coder/coder/v2/coderd/database/dbauthz" "github.com/coder/coder/v2/codersdk" "github.com/coder/quartz" ) @@ -25,7 +26,7 @@ var ( ) type Fetcher interface { - Fetch(ctx context.Context) ([]codersdk.CryptoKey, error) + Fetch(ctx context.Context, feature codersdk.CryptoKeyFeature) ([]codersdk.CryptoKey, error) } type EncryptionKeycache interface { @@ -62,27 +63,26 @@ const ( ) type DBFetcher struct { - DB database.Store - Feature database.CryptoKeyFeature + DB database.Store } -func (d *DBFetcher) Fetch(ctx context.Context) ([]codersdk.CryptoKey, error) { - keys, err := d.DB.GetCryptoKeysByFeature(ctx, d.Feature) +func (d *DBFetcher) Fetch(ctx context.Context, feature codersdk.CryptoKeyFeature) ([]codersdk.CryptoKey, error) { + keys, err := d.DB.GetCryptoKeysByFeature(ctx, database.CryptoKeyFeature(feature)) if err != nil { return nil, xerrors.Errorf("get crypto keys by feature: %w", err) } - return db2sdk.CryptoKeys(keys), nil + return toSDKKeys(keys), nil } // cache implements the caching functionality for both signing and encryption keys. type cache struct { - clock quartz.Clock - refreshCtx context.Context - refreshCancel context.CancelFunc - fetcher Fetcher - logger slog.Logger - feature codersdk.CryptoKeyFeature + ctx context.Context + cancel context.CancelFunc + clock quartz.Clock + fetcher Fetcher + logger slog.Logger + feature codersdk.CryptoKeyFeature mu sync.Mutex keys map[int32]codersdk.CryptoKey @@ -109,7 +109,8 @@ func NewSigningCache(ctx context.Context, logger slog.Logger, fetcher Fetcher, if !isSigningKeyFeature(feature) { return nil, xerrors.Errorf("invalid feature: %s", feature) } - return newCache(ctx, logger, fetcher, feature, opts...) + logger = logger.Named(fmt.Sprintf("%s_signing_keycache", feature)) + return newCache(ctx, logger, fetcher, feature, opts...), nil } func NewEncryptionCache(ctx context.Context, logger slog.Logger, fetcher Fetcher, @@ -118,10 +119,11 @@ func NewEncryptionCache(ctx context.Context, logger slog.Logger, fetcher Fetcher if !isEncryptionKeyFeature(feature) { return nil, xerrors.Errorf("invalid feature: %s", feature) } - return newCache(ctx, logger, fetcher, feature, opts...) + logger = logger.Named(fmt.Sprintf("%s_encryption_keycache", feature)) + return newCache(ctx, logger, fetcher, feature, opts...), nil } -func newCache(ctx context.Context, logger slog.Logger, fetcher Fetcher, feature codersdk.CryptoKeyFeature, opts ...func(*cache)) (*cache, error) { +func newCache(ctx context.Context, logger slog.Logger, fetcher Fetcher, feature codersdk.CryptoKeyFeature, opts ...func(*cache)) *cache { cache := &cache{ clock: quartz.NewReal(), logger: logger, @@ -134,16 +136,16 @@ func newCache(ctx context.Context, logger slog.Logger, fetcher Fetcher, feature } cache.cond = sync.NewCond(&cache.mu) - cache.refreshCtx, cache.refreshCancel = context.WithCancel(ctx) + //nolint:gocritic // We need to be able to read the keys in order to cache them. + cache.ctx, cache.cancel = context.WithCancel(dbauthz.AsKeyReader(ctx)) cache.refresher = cache.clock.AfterFunc(refreshInterval, cache.refresh) - keys, err := cache.cryptoKeys(ctx) + keys, err := cache.cryptoKeys(cache.ctx) if err != nil { - cache.refreshCancel() - return nil, xerrors.Errorf("initial fetch: %w", err) + cache.logger.Critical(cache.ctx, "failed initial fetch", slog.Error(err)) } cache.keys = keys - return cache, nil + return cache } func (c *cache) EncryptingKey(ctx context.Context) (string, interface{}, error) { @@ -151,6 +153,8 @@ func (c *cache) EncryptingKey(ctx context.Context) (string, interface{}, error) return "", nil, ErrInvalidFeature } + //nolint:gocritic // cache can only read crypto keys. + ctx = dbauthz.AsKeyReader(ctx) return c.cryptoKey(ctx, latestSequence) } @@ -159,11 +163,13 @@ func (c *cache) DecryptingKey(ctx context.Context, id string) (interface{}, erro return nil, ErrInvalidFeature } - seq, err := strconv.ParseInt(id, 10, 64) + seq, err := strconv.ParseInt(id, 10, 32) if err != nil { return nil, xerrors.Errorf("parse id: %w", err) } + //nolint:gocritic // cache can only read crypto keys. + ctx = dbauthz.AsKeyReader(ctx) _, secret, err := c.cryptoKey(ctx, int32(seq)) if err != nil { return nil, xerrors.Errorf("crypto key: %w", err) @@ -176,6 +182,8 @@ func (c *cache) SigningKey(ctx context.Context) (string, interface{}, error) { return "", nil, ErrInvalidFeature } + //nolint:gocritic // cache can only read crypto keys. + ctx = dbauthz.AsKeyReader(ctx) return c.cryptoKey(ctx, latestSequence) } @@ -184,11 +192,12 @@ func (c *cache) VerifyingKey(ctx context.Context, id string) (interface{}, error return nil, ErrInvalidFeature } - seq, err := strconv.ParseInt(id, 10, 64) + seq, err := strconv.ParseInt(id, 10, 32) if err != nil { return nil, xerrors.Errorf("parse id: %w", err) } - + //nolint:gocritic // cache can only read crypto keys. + ctx = dbauthz.AsKeyReader(ctx) _, secret, err := c.cryptoKey(ctx, int32(seq)) if err != nil { return nil, xerrors.Errorf("crypto key: %w", err) @@ -198,12 +207,12 @@ func (c *cache) VerifyingKey(ctx context.Context, id string) (interface{}, error } func isEncryptionKeyFeature(feature codersdk.CryptoKeyFeature) bool { - return feature == codersdk.CryptoKeyFeatureWorkspaceApp + return feature == codersdk.CryptoKeyFeatureWorkspaceAppsAPIKey } func isSigningKeyFeature(feature codersdk.CryptoKeyFeature) bool { switch feature { - case codersdk.CryptoKeyFeatureTailnetResume, codersdk.CryptoKeyFeatureOIDCConvert: + case codersdk.CryptoKeyFeatureTailnetResume, codersdk.CryptoKeyFeatureOIDCConvert, codersdk.CryptoKeyFeatureWorkspaceAppsToken: return true default: return false @@ -292,14 +301,15 @@ func checkKey(key codersdk.CryptoKey, sequence int32, now time.Time) (string, [] func (c *cache) refresh() { now := c.clock.Now("CryptoKeyCache", "refresh") c.mu.Lock() - defer c.mu.Unlock() if c.closed { + c.mu.Unlock() return } // If something's already fetching, we don't need to do anything. if c.fetching { + c.mu.Unlock() return } @@ -307,20 +317,21 @@ func (c *cache) refresh() { // is ongoing but prior to the timer getting reset. In this case we want to // avoid double fetching. if now.Sub(c.lastFetch) < refreshInterval { + c.mu.Unlock() return } c.fetching = true c.mu.Unlock() - keys, err := c.cryptoKeys(c.refreshCtx) + keys, err := c.cryptoKeys(c.ctx) if err != nil { - c.logger.Error(c.refreshCtx, "fetch crypto keys", slog.Error(err)) + c.logger.Error(c.ctx, "fetch crypto keys", slog.Error(err)) return } - // We don't defer an unlock here due to the deferred unlock at the top of the function. c.mu.Lock() + defer c.mu.Unlock() c.lastFetch = c.clock.Now() c.refresher.Reset(refreshInterval) @@ -332,9 +343,9 @@ func (c *cache) refresh() { // cryptoKeys queries the control plane for the crypto keys. // Outside of initialization, this should only be called by fetch. func (c *cache) cryptoKeys(ctx context.Context) (map[int32]codersdk.CryptoKey, error) { - keys, err := c.fetcher.Fetch(ctx) + keys, err := c.fetcher.Fetch(ctx, c.feature) if err != nil { - return nil, xerrors.Errorf("crypto keys: %w", err) + return nil, xerrors.Errorf("fetch: %w", err) } cache := toKeyMap(keys, c.clock.Now()) return cache, nil @@ -361,9 +372,28 @@ func (c *cache) Close() error { } c.closed = true - c.refreshCancel() + c.cancel() c.refresher.Stop() c.cond.Broadcast() return nil } + +// We have to do this to avoid a circular dependency on db2sdk (cryptokeys -> db2sdk -> tailnet -> cryptokeys) +func toSDKKeys(keys []database.CryptoKey) []codersdk.CryptoKey { + into := make([]codersdk.CryptoKey, 0, len(keys)) + for _, key := range keys { + into = append(into, toSDK(key)) + } + return into +} + +func toSDK(key database.CryptoKey) codersdk.CryptoKey { + return codersdk.CryptoKey{ + Feature: codersdk.CryptoKeyFeature(key.Feature), + Sequence: key.Sequence, + StartsAt: key.StartsAt, + DeletesAt: key.DeletesAt.Time, + Secret: key.Secret.String, + } +} diff --git a/coderd/cryptokeys/cache_test.go b/coderd/cryptokeys/cache_test.go index 92fc4527ae7b3..cda87315605a4 100644 --- a/coderd/cryptokeys/cache_test.go +++ b/coderd/cryptokeys/cache_test.go @@ -488,7 +488,7 @@ type fakeFetcher struct { called int } -func (f *fakeFetcher) Fetch(_ context.Context) ([]codersdk.CryptoKey, error) { +func (f *fakeFetcher) Fetch(_ context.Context, _ codersdk.CryptoKeyFeature) ([]codersdk.CryptoKey, error) { f.called++ return f.keys, nil } diff --git a/coderd/cryptokeys/rotate.go b/coderd/cryptokeys/rotate.go index 14a623e2156db..26256b4cd4c12 100644 --- a/coderd/cryptokeys/rotate.go +++ b/coderd/cryptokeys/rotate.go @@ -11,6 +11,7 @@ import ( "cdr.dev/slog" "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/database/dbauthz" "github.com/coder/coder/v2/coderd/database/dbtime" "github.com/coder/quartz" ) @@ -53,10 +54,12 @@ func WithKeyDuration(keyDuration time.Duration) RotatorOption { // StartRotator starts a background process that rotates keys in the database. // It ensures there's at least one valid key per feature prior to returning. // Canceling the provided context will stop the background process. -func StartRotator(ctx context.Context, logger slog.Logger, db database.Store, opts ...RotatorOption) error { +func StartRotator(ctx context.Context, logger slog.Logger, db database.Store, opts ...RotatorOption) { + //nolint:gocritic // KeyRotator can only rotate crypto keys. + ctx = dbauthz.AsKeyRotator(ctx) kr := &rotator{ db: db, - logger: logger, + logger: logger.Named("keyrotator"), clock: quartz.NewReal(), keyDuration: DefaultKeyDuration, features: database.AllCryptoKeyFeatureValues(), @@ -68,12 +71,10 @@ func StartRotator(ctx context.Context, logger slog.Logger, db database.Store, op err := kr.rotateKeys(ctx) if err != nil { - return xerrors.Errorf("rotate keys: %w", err) + kr.logger.Critical(ctx, "failed to rotate keys", slog.Error(err)) } go kr.start(ctx) - - return nil } // start begins the process of rotating keys. @@ -161,8 +162,9 @@ func (k *rotator) rotateKeys(ctx context.Context) error { } } return nil - }, &sql.TxOptions{ - Isolation: sql.LevelRepeatableRead, + }, &database.TxOptions{ + Isolation: sql.LevelRepeatableRead, + TxIdentifier: "rotate_keys", }) } @@ -226,9 +228,11 @@ func (k *rotator) rotateKey(ctx context.Context, tx database.Store, key database func generateNewSecret(feature database.CryptoKeyFeature) (string, error) { switch feature { - case database.CryptoKeyFeatureWorkspaceApps: + case database.CryptoKeyFeatureWorkspaceAppsAPIKey: return generateKey(32) - case database.CryptoKeyFeatureOidcConvert: + case database.CryptoKeyFeatureWorkspaceAppsToken: + return generateKey(64) + case database.CryptoKeyFeatureOIDCConvert: return generateKey(64) case database.CryptoKeyFeatureTailnetResume: return generateKey(64) @@ -247,9 +251,11 @@ func generateKey(length int) (string, error) { func tokenDuration(feature database.CryptoKeyFeature) time.Duration { switch feature { - case database.CryptoKeyFeatureWorkspaceApps: + case database.CryptoKeyFeatureWorkspaceAppsAPIKey: + return WorkspaceAppsTokenDuration + case database.CryptoKeyFeatureWorkspaceAppsToken: return WorkspaceAppsTokenDuration - case database.CryptoKeyFeatureOidcConvert: + case database.CryptoKeyFeatureOIDCConvert: return OIDCConvertTokenDuration case database.CryptoKeyFeatureTailnetResume: return TailnetResumeTokenDuration diff --git a/coderd/cryptokeys/rotate_internal_test.go b/coderd/cryptokeys/rotate_internal_test.go index 43754c1d8750f..e427a3c6216ac 100644 --- a/coderd/cryptokeys/rotate_internal_test.go +++ b/coderd/cryptokeys/rotate_internal_test.go @@ -38,7 +38,7 @@ func Test_rotateKeys(t *testing.T) { clock: clock, logger: logger, features: []database.CryptoKeyFeature{ - database.CryptoKeyFeatureWorkspaceApps, + database.CryptoKeyFeatureWorkspaceAppsAPIKey, }, } @@ -46,7 +46,7 @@ func Test_rotateKeys(t *testing.T) { // Seed the database with an existing key. oldKey := dbgen.CryptoKey(t, db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, StartsAt: now, Sequence: 15, }) @@ -69,11 +69,11 @@ func Test_rotateKeys(t *testing.T) { // The new key should be created and have a starts_at of the old key's expires_at. newKey, err := db.GetCryptoKeyByFeatureAndSequence(ctx, database.GetCryptoKeyByFeatureAndSequenceParams{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, Sequence: oldKey.Sequence + 1, }) require.NoError(t, err) - requireKey(t, newKey, database.CryptoKeyFeatureWorkspaceApps, oldKey.ExpiresAt(keyDuration), nullTime, oldKey.Sequence+1) + requireKey(t, newKey, database.CryptoKeyFeatureWorkspaceAppsAPIKey, oldKey.ExpiresAt(keyDuration), nullTime, oldKey.Sequence+1) // Advance the clock just before the keys delete time. clock.Advance(oldKey.DeletesAt.Time.UTC().Sub(now) - time.Second) @@ -123,7 +123,7 @@ func Test_rotateKeys(t *testing.T) { clock: clock, logger: logger, features: []database.CryptoKeyFeature{ - database.CryptoKeyFeatureWorkspaceApps, + database.CryptoKeyFeatureWorkspaceAppsAPIKey, }, } @@ -131,7 +131,7 @@ func Test_rotateKeys(t *testing.T) { // Seed the database with an existing key existingKey := dbgen.CryptoKey(t, db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, StartsAt: now, Sequence: 123, }) @@ -179,7 +179,7 @@ func Test_rotateKeys(t *testing.T) { clock: clock, logger: logger, features: []database.CryptoKeyFeature{ - database.CryptoKeyFeatureWorkspaceApps, + database.CryptoKeyFeatureWorkspaceAppsAPIKey, }, } @@ -187,7 +187,7 @@ func Test_rotateKeys(t *testing.T) { // Seed the database with an existing key deletingKey := dbgen.CryptoKey(t, db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, StartsAt: now.Add(-keyDuration), Sequence: 789, DeletesAt: sql.NullTime{ @@ -232,7 +232,7 @@ func Test_rotateKeys(t *testing.T) { clock: clock, logger: logger, features: []database.CryptoKeyFeature{ - database.CryptoKeyFeatureWorkspaceApps, + database.CryptoKeyFeatureWorkspaceAppsAPIKey, }, } @@ -240,7 +240,7 @@ func Test_rotateKeys(t *testing.T) { // Seed the database with an existing key deletingKey := dbgen.CryptoKey(t, db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, StartsAt: now, Sequence: 456, DeletesAt: sql.NullTime{ @@ -281,7 +281,7 @@ func Test_rotateKeys(t *testing.T) { clock: clock, logger: logger, features: []database.CryptoKeyFeature{ - database.CryptoKeyFeatureWorkspaceApps, + database.CryptoKeyFeatureWorkspaceAppsAPIKey, }, } @@ -291,7 +291,7 @@ func Test_rotateKeys(t *testing.T) { keys, err := db.GetCryptoKeys(ctx) require.NoError(t, err) require.Len(t, keys, 1) - requireKey(t, keys[0], database.CryptoKeyFeatureWorkspaceApps, clock.Now().UTC(), nullTime, 1) + requireKey(t, keys[0], database.CryptoKeyFeatureWorkspaceAppsAPIKey, clock.Now().UTC(), nullTime, 1) }) // Assert we insert a new key when the only key was manually deleted. @@ -312,14 +312,14 @@ func Test_rotateKeys(t *testing.T) { clock: clock, logger: logger, features: []database.CryptoKeyFeature{ - database.CryptoKeyFeatureWorkspaceApps, + database.CryptoKeyFeatureWorkspaceAppsAPIKey, }, } now := dbnow(clock) deletedkey := dbgen.CryptoKey(t, db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, StartsAt: now, Sequence: 19, DeletesAt: sql.NullTime{ @@ -338,7 +338,7 @@ func Test_rotateKeys(t *testing.T) { keys, err := db.GetCryptoKeys(ctx) require.NoError(t, err) require.Len(t, keys, 1) - requireKey(t, keys[0], database.CryptoKeyFeatureWorkspaceApps, now, nullTime, deletedkey.Sequence+1) + requireKey(t, keys[0], database.CryptoKeyFeatureWorkspaceAppsAPIKey, now, nullTime, deletedkey.Sequence+1) }) // This tests ensures that rotation works with multiple @@ -365,9 +365,11 @@ func Test_rotateKeys(t *testing.T) { now := dbnow(clock) - // We'll test a scenario where one feature has no valid keys. - // Another has a key that should be rotate. And one that - // has a valid key that shouldn't trigger an action. + // We'll test a scenario where: + // - One feature has no valid keys. + // - One has a key that should be rotated. + // - One has a valid key that shouldn't trigger an action. + // - One has no keys at all. _ = dbgen.CryptoKey(t, db, database.CryptoKey{ Feature: database.CryptoKeyFeatureTailnetResume, StartsAt: now.Add(-keyDuration), @@ -377,6 +379,7 @@ func Test_rotateKeys(t *testing.T) { Valid: false, }, }) + // Generate another deleted key to ensure we insert after the latest sequence. deletedKey := dbgen.CryptoKey(t, db, database.CryptoKey{ Feature: database.CryptoKeyFeatureTailnetResume, StartsAt: now.Add(-keyDuration), @@ -389,14 +392,14 @@ func Test_rotateKeys(t *testing.T) { // Insert a key that should be rotated. rotatedKey := dbgen.CryptoKey(t, db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, StartsAt: now.Add(-keyDuration + time.Hour), Sequence: 42, }) // Insert a key that should not trigger an action. validKey := dbgen.CryptoKey(t, db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureOidcConvert, + Feature: database.CryptoKeyFeatureOIDCConvert, StartsAt: now, Sequence: 17, }) @@ -406,26 +409,28 @@ func Test_rotateKeys(t *testing.T) { keys, err := db.GetCryptoKeys(ctx) require.NoError(t, err) - require.Len(t, keys, 4) + require.Len(t, keys, 5) kbf, err := keysByFeature(keys, database.AllCryptoKeyFeatureValues()) require.NoError(t, err) // No actions on OIDC convert. - require.Len(t, kbf[database.CryptoKeyFeatureOidcConvert], 1) + require.Len(t, kbf[database.CryptoKeyFeatureOIDCConvert], 1) // Workspace apps should have been rotated. - require.Len(t, kbf[database.CryptoKeyFeatureWorkspaceApps], 2) + require.Len(t, kbf[database.CryptoKeyFeatureWorkspaceAppsAPIKey], 2) // No existing key for tailnet resume should've // caused a key to be inserted. require.Len(t, kbf[database.CryptoKeyFeatureTailnetResume], 1) + require.Len(t, kbf[database.CryptoKeyFeatureWorkspaceAppsToken], 1) - oidcKey := kbf[database.CryptoKeyFeatureOidcConvert][0] + oidcKey := kbf[database.CryptoKeyFeatureOIDCConvert][0] tailnetKey := kbf[database.CryptoKeyFeatureTailnetResume][0] - requireKey(t, oidcKey, database.CryptoKeyFeatureOidcConvert, now, nullTime, validKey.Sequence) + appTokenKey := kbf[database.CryptoKeyFeatureWorkspaceAppsToken][0] + requireKey(t, oidcKey, database.CryptoKeyFeatureOIDCConvert, now, nullTime, validKey.Sequence) requireKey(t, tailnetKey, database.CryptoKeyFeatureTailnetResume, now, nullTime, deletedKey.Sequence+1) - - newKey := kbf[database.CryptoKeyFeatureWorkspaceApps][0] - oldKey := kbf[database.CryptoKeyFeatureWorkspaceApps][1] + requireKey(t, appTokenKey, database.CryptoKeyFeatureWorkspaceAppsToken, now, nullTime, 1) + newKey := kbf[database.CryptoKeyFeatureWorkspaceAppsAPIKey][0] + oldKey := kbf[database.CryptoKeyFeatureWorkspaceAppsAPIKey][1] if newKey.Sequence == rotatedKey.Sequence { oldKey, newKey = newKey, oldKey } @@ -433,8 +438,8 @@ func Test_rotateKeys(t *testing.T) { Time: rotatedKey.ExpiresAt(keyDuration).Add(WorkspaceAppsTokenDuration + time.Hour), Valid: true, } - requireKey(t, oldKey, database.CryptoKeyFeatureWorkspaceApps, rotatedKey.StartsAt.UTC(), deletesAt, rotatedKey.Sequence) - requireKey(t, newKey, database.CryptoKeyFeatureWorkspaceApps, rotatedKey.ExpiresAt(keyDuration), nullTime, rotatedKey.Sequence+1) + requireKey(t, oldKey, database.CryptoKeyFeatureWorkspaceAppsAPIKey, rotatedKey.StartsAt.UTC(), deletesAt, rotatedKey.Sequence) + requireKey(t, newKey, database.CryptoKeyFeatureWorkspaceAppsAPIKey, rotatedKey.ExpiresAt(keyDuration), nullTime, rotatedKey.Sequence+1) }) t.Run("UnknownFeature", func(t *testing.T) { @@ -478,11 +483,11 @@ func Test_rotateKeys(t *testing.T) { keyDuration: keyDuration, clock: clock, logger: logger, - features: []database.CryptoKeyFeature{database.CryptoKeyFeatureWorkspaceApps}, + features: []database.CryptoKeyFeature{database.CryptoKeyFeatureWorkspaceAppsAPIKey}, } expiringKey := dbgen.CryptoKey(t, db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, StartsAt: now.Add(-keyDuration), Sequence: 345, }) @@ -522,19 +527,19 @@ func Test_rotateKeys(t *testing.T) { keyDuration: keyDuration, clock: clock, logger: logger, - features: []database.CryptoKeyFeature{database.CryptoKeyFeatureWorkspaceApps}, + features: []database.CryptoKeyFeature{database.CryptoKeyFeatureWorkspaceAppsAPIKey}, } now := dbnow(clock) expiredKey := dbgen.CryptoKey(t, db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, StartsAt: now.Add(-keyDuration - 2*time.Hour), Sequence: 19, }) deletedKey := dbgen.CryptoKey(t, db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, StartsAt: now, Sequence: 20, Secret: sql.NullString{ @@ -587,9 +592,11 @@ func requireKey(t *testing.T, key database.CryptoKey, feature database.CryptoKey require.NoError(t, err) switch key.Feature { - case database.CryptoKeyFeatureOidcConvert: + case database.CryptoKeyFeatureOIDCConvert: + require.Len(t, secret, 64) + case database.CryptoKeyFeatureWorkspaceAppsToken: require.Len(t, secret, 64) - case database.CryptoKeyFeatureWorkspaceApps: + case database.CryptoKeyFeatureWorkspaceAppsAPIKey: require.Len(t, secret, 32) case database.CryptoKeyFeatureTailnetResume: require.Len(t, secret, 64) diff --git a/coderd/cryptokeys/rotate_test.go b/coderd/cryptokeys/rotate_test.go index 190ad213b1153..9e147c8f921f0 100644 --- a/coderd/cryptokeys/rotate_test.go +++ b/coderd/cryptokeys/rotate_test.go @@ -34,8 +34,7 @@ func TestRotator(t *testing.T) { require.NoError(t, err) require.Len(t, dbkeys, 0) - err = cryptokeys.StartRotator(ctx, logger, db, cryptokeys.WithClock(clock)) - require.NoError(t, err) + cryptokeys.StartRotator(ctx, logger, db, cryptokeys.WithClock(clock)) // Fetch the keys from the database and ensure they // are as expected. @@ -58,7 +57,7 @@ func TestRotator(t *testing.T) { now := clock.Now().UTC() rotatingKey := dbgen.CryptoKey(t, db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, StartsAt: now.Add(-cryptokeys.DefaultKeyDuration + time.Hour + time.Minute), Sequence: 12345, }) @@ -66,8 +65,7 @@ func TestRotator(t *testing.T) { trap := clock.Trap().TickerFunc() t.Cleanup(trap.Close) - err := cryptokeys.StartRotator(ctx, logger, db, cryptokeys.WithClock(clock)) - require.NoError(t, err) + cryptokeys.StartRotator(ctx, logger, db, cryptokeys.WithClock(clock)) initialKeyLen := len(database.AllCryptoKeyFeatureValues()) // Fetch the keys from the database and ensure they @@ -85,7 +83,7 @@ func TestRotator(t *testing.T) { require.NoError(t, err) require.Len(t, keys, initialKeyLen+1) - newKey, err := db.GetLatestCryptoKeyByFeature(ctx, database.CryptoKeyFeatureWorkspaceApps) + newKey, err := db.GetLatestCryptoKeyByFeature(ctx, database.CryptoKeyFeatureWorkspaceAppsAPIKey) require.NoError(t, err) require.Equal(t, rotatingKey.Sequence+1, newKey.Sequence) require.Equal(t, rotatingKey.ExpiresAt(cryptokeys.DefaultKeyDuration), newKey.StartsAt.UTC()) diff --git a/coderd/database/db.go b/coderd/database/db.go index 51e61e4ce2027..ae2c31a566cb3 100644 --- a/coderd/database/db.go +++ b/coderd/database/db.go @@ -28,7 +28,7 @@ type Store interface { wrapper Ping(ctx context.Context) (time.Duration, error) - InTx(func(Store) error, *sql.TxOptions) error + InTx(func(Store) error, *TxOptions) error } type wrapper interface { @@ -57,6 +57,43 @@ func New(sdb *sql.DB) Store { } } +// TxOptions is used to pass some execution metadata to the callers. +// Ideally we could throw this into a context, but no context is used for +// transactions. So instead, the return context is attached to the options +// passed in. +// This metadata should not be returned in the method signature, because it +// is only used for metric tracking. It should never be used by business logic. +type TxOptions struct { + // Isolation is the transaction isolation level. + // If zero, the driver or database's default level is used. + Isolation sql.IsolationLevel + ReadOnly bool + + // -- Coder specific metadata -- + // TxIdentifier is a unique identifier for the transaction to be used + // in metrics. Can be any string. + TxIdentifier string + + // Set by InTx + executionCount int +} + +// IncrementExecutionCount is a helper function for external packages +// to increment the unexported count. +// Mainly for `dbmem`. +func IncrementExecutionCount(opts *TxOptions) { + opts.executionCount++ +} + +func (o TxOptions) ExecutionCount() int { + return o.executionCount +} + +func (o *TxOptions) WithID(id string) *TxOptions { + o.TxIdentifier = id + return o +} + // queries encompasses both are sqlc generated // queries and our custom queries. type querier interface { @@ -80,11 +117,24 @@ func (q *sqlQuerier) Ping(ctx context.Context) (time.Duration, error) { return time.Since(start), err } -func (q *sqlQuerier) InTx(function func(Store) error, txOpts *sql.TxOptions) error { +func DefaultTXOptions() *TxOptions { + return &TxOptions{ + Isolation: sql.LevelDefault, + ReadOnly: false, + } +} + +func (q *sqlQuerier) InTx(function func(Store) error, txOpts *TxOptions) error { _, inTx := q.db.(*sqlx.Tx) - isolation := sql.LevelDefault - if txOpts != nil { - isolation = txOpts.Isolation + + if txOpts == nil { + // create a default txOpts if left to nil + txOpts = DefaultTXOptions() + } + + sqlOpts := &sql.TxOptions{ + Isolation: txOpts.Isolation, + ReadOnly: txOpts.ReadOnly, } // If we are not already in a transaction, and we are running in serializable @@ -92,13 +142,14 @@ func (q *sqlQuerier) InTx(function func(Store) error, txOpts *sql.TxOptions) err // prepared to allow retries if using serializable mode. // If we are in a transaction already, the parent InTx call will handle the retry. // We do not want to duplicate those retries. - if !inTx && isolation == sql.LevelSerializable { + if !inTx && sqlOpts.Isolation == sql.LevelSerializable { // This is an arbitrarily chosen number. const retryAmount = 3 var err error attempts := 0 for attempts = 0; attempts < retryAmount; attempts++ { - err = q.runTx(function, txOpts) + txOpts.executionCount++ + err = q.runTx(function, sqlOpts) if err == nil { // Transaction succeeded. return nil @@ -111,7 +162,9 @@ func (q *sqlQuerier) InTx(function func(Store) error, txOpts *sql.TxOptions) err // Transaction kept failing in serializable mode. return xerrors.Errorf("transaction failed after %d attempts: %w", attempts, err) } - return q.runTx(function, txOpts) + + txOpts.executionCount++ + return q.runTx(function, sqlOpts) } // InTx performs database operations inside a transaction. diff --git a/coderd/database/db_test.go b/coderd/database/db_test.go index db7fe41eea3dc..a6df18fcbb8c8 100644 --- a/coderd/database/db_test.go +++ b/coderd/database/db_test.go @@ -27,7 +27,7 @@ func TestSerializedRetry(t *testing.T) { db := database.New(sqlDB) called := 0 - txOpts := &sql.TxOptions{Isolation: sql.LevelSerializable} + txOpts := &database.TxOptions{Isolation: sql.LevelSerializable} err := db.InTx(func(tx database.Store) error { // Test nested error return tx.InTx(func(tx database.Store) error { diff --git a/coderd/database/dbauthz/dbauthz.go b/coderd/database/dbauthz/dbauthz.go index 052f25450e6a5..ae6b307b3e7d3 100644 --- a/coderd/database/dbauthz/dbauthz.go +++ b/coderd/database/dbauthz/dbauthz.go @@ -228,6 +228,42 @@ var ( Scope: rbac.ScopeAll, }.WithCachedASTValue() + // See cryptokeys package. + subjectCryptoKeyRotator = rbac.Subject{ + FriendlyName: "Crypto Key Rotator", + ID: uuid.Nil.String(), + Roles: rbac.Roles([]rbac.Role{ + { + Identifier: rbac.RoleIdentifier{Name: "keyrotator"}, + DisplayName: "Key Rotator", + Site: rbac.Permissions(map[string][]policy.Action{ + rbac.ResourceCryptoKey.Type: {policy.WildcardSymbol}, + }), + Org: map[string][]rbac.Permission{}, + User: []rbac.Permission{}, + }, + }), + Scope: rbac.ScopeAll, + }.WithCachedASTValue() + + // See cryptokeys package. + subjectCryptoKeyReader = rbac.Subject{ + FriendlyName: "Crypto Key Reader", + ID: uuid.Nil.String(), + Roles: rbac.Roles([]rbac.Role{ + { + Identifier: rbac.RoleIdentifier{Name: "keyrotator"}, + DisplayName: "Key Rotator", + Site: rbac.Permissions(map[string][]policy.Action{ + rbac.ResourceCryptoKey.Type: {policy.WildcardSymbol}, + }), + Org: map[string][]rbac.Permission{}, + User: []rbac.Permission{}, + }, + }), + Scope: rbac.ScopeAll, + }.WithCachedASTValue() + subjectSystemRestricted = rbac.Subject{ FriendlyName: "System", ID: uuid.Nil.String(), @@ -281,6 +317,16 @@ func AsHangDetector(ctx context.Context) context.Context { return context.WithValue(ctx, authContextKey{}, subjectHangDetector) } +// AsKeyRotator returns a context with an actor that has permissions required for rotating crypto keys. +func AsKeyRotator(ctx context.Context) context.Context { + return context.WithValue(ctx, authContextKey{}, subjectCryptoKeyRotator) +} + +// AsKeyReader returns a context with an actor that has permissions required for reading crypto keys. +func AsKeyReader(ctx context.Context) context.Context { + return context.WithValue(ctx, authContextKey{}, subjectCryptoKeyReader) +} + // AsSystemRestricted returns a context with an actor that has permissions // required for various system operations (login, logout, metrics cache). func AsSystemRestricted(ctx context.Context) context.Context { @@ -558,7 +604,7 @@ func (q *querier) Ping(ctx context.Context) (time.Duration, error) { } // InTx runs the given function in a transaction. -func (q *querier) InTx(function func(querier database.Store) error, txOpts *sql.TxOptions) error { +func (q *querier) InTx(function func(querier database.Store) error, txOpts *database.TxOptions) error { return q.db.InTx(func(tx database.Store) error { // Wrap the transaction store in a querier. wrapped := New(tx, q.auth, q.log, q.acs) diff --git a/coderd/database/dbauthz/dbauthz_test.go b/coderd/database/dbauthz/dbauthz_test.go index 6a34e88104ce1..439cf1bdaec19 100644 --- a/coderd/database/dbauthz/dbauthz_test.go +++ b/coderd/database/dbauthz/dbauthz_test.go @@ -2243,13 +2243,13 @@ func (s *MethodTestSuite) TestCryptoKeys() { })) s.Run("InsertCryptoKey", s.Subtest(func(db database.Store, check *expects) { check.Args(database.InsertCryptoKeyParams{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, }). Asserts(rbac.ResourceCryptoKey, policy.ActionCreate) })) s.Run("DeleteCryptoKey", s.Subtest(func(db database.Store, check *expects) { key := dbgen.CryptoKey(s.T(), db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, Sequence: 4, }) check.Args(database.DeleteCryptoKeyParams{ @@ -2259,7 +2259,7 @@ func (s *MethodTestSuite) TestCryptoKeys() { })) s.Run("GetCryptoKeyByFeatureAndSequence", s.Subtest(func(db database.Store, check *expects) { key := dbgen.CryptoKey(s.T(), db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, Sequence: 4, }) check.Args(database.GetCryptoKeyByFeatureAndSequenceParams{ @@ -2269,14 +2269,14 @@ func (s *MethodTestSuite) TestCryptoKeys() { })) s.Run("GetLatestCryptoKeyByFeature", s.Subtest(func(db database.Store, check *expects) { dbgen.CryptoKey(s.T(), db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, Sequence: 4, }) - check.Args(database.CryptoKeyFeatureWorkspaceApps).Asserts(rbac.ResourceCryptoKey, policy.ActionRead) + check.Args(database.CryptoKeyFeatureWorkspaceAppsAPIKey).Asserts(rbac.ResourceCryptoKey, policy.ActionRead) })) s.Run("UpdateCryptoKeyDeletesAt", s.Subtest(func(db database.Store, check *expects) { key := dbgen.CryptoKey(s.T(), db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, Sequence: 4, }) check.Args(database.UpdateCryptoKeyDeletesAtParams{ @@ -2286,7 +2286,7 @@ func (s *MethodTestSuite) TestCryptoKeys() { }).Asserts(rbac.ResourceCryptoKey, policy.ActionUpdate) })) s.Run("GetCryptoKeysByFeature", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.CryptoKeyFeatureWorkspaceApps). + check.Args(database.CryptoKeyFeatureWorkspaceAppsAPIKey). Asserts(rbac.ResourceCryptoKey, policy.ActionRead) })) } diff --git a/coderd/database/dbgen/dbgen.go b/coderd/database/dbgen/dbgen.go index 255c62f82aef2..69419b98c79b1 100644 --- a/coderd/database/dbgen/dbgen.go +++ b/coderd/database/dbgen/dbgen.go @@ -943,7 +943,7 @@ func CustomRole(t testing.TB, db database.Store, seed database.CustomRole) datab func CryptoKey(t testing.TB, db database.Store, seed database.CryptoKey) database.CryptoKey { t.Helper() - seed.Feature = takeFirst(seed.Feature, database.CryptoKeyFeatureWorkspaceApps) + seed.Feature = takeFirst(seed.Feature, database.CryptoKeyFeatureWorkspaceAppsAPIKey) // An empty string for the secret is interpreted as // a caller wanting a new secret to be generated. @@ -1048,9 +1048,11 @@ func takeFirst[Value comparable](values ...Value) Value { func newCryptoKeySecret(feature database.CryptoKeyFeature) (string, error) { switch feature { - case database.CryptoKeyFeatureWorkspaceApps: + case database.CryptoKeyFeatureWorkspaceAppsAPIKey: return generateCryptoKey(32) - case database.CryptoKeyFeatureOidcConvert: + case database.CryptoKeyFeatureWorkspaceAppsToken: + return generateCryptoKey(64) + case database.CryptoKeyFeatureOIDCConvert: return generateCryptoKey(64) case database.CryptoKeyFeatureTailnetResume: return generateCryptoKey(64) diff --git a/coderd/database/dbmem/dbmem.go b/coderd/database/dbmem/dbmem.go index 24498d88c9dbc..4f54598744dd0 100644 --- a/coderd/database/dbmem/dbmem.go +++ b/coderd/database/dbmem/dbmem.go @@ -365,7 +365,7 @@ func (tx *fakeTx) releaseLocks() { } // InTx doesn't rollback data properly for in-memory yet. -func (q *FakeQuerier) InTx(fn func(database.Store) error, _ *sql.TxOptions) error { +func (q *FakeQuerier) InTx(fn func(database.Store) error, opts *database.TxOptions) error { q.mutex.Lock() defer q.mutex.Unlock() tx := &fakeTx{ @@ -374,6 +374,9 @@ func (q *FakeQuerier) InTx(fn func(database.Store) error, _ *sql.TxOptions) erro } defer tx.releaseLocks() + if opts != nil { + database.IncrementExecutionCount(opts) + } return fn(tx) } diff --git a/coderd/database/dbmetrics/dbmetrics.go b/coderd/database/dbmetrics/dbmetrics.go index c3e9de22fb0d8..b0309f9f2e2eb 100644 --- a/coderd/database/dbmetrics/dbmetrics.go +++ b/coderd/database/dbmetrics/dbmetrics.go @@ -1,2721 +1,122 @@ -// Code generated by coderd/database/gen/metrics. -// Any function can be edited and will not be overwritten. -// New database functions are automatically generated! package dbmetrics import ( "context" - "database/sql" + "strconv" "time" - "github.com/google/uuid" "github.com/prometheus/client_golang/prometheus" "golang.org/x/exp/slices" + "cdr.dev/slog" "github.com/coder/coder/v2/coderd/database" - "github.com/coder/coder/v2/coderd/rbac" - "github.com/coder/coder/v2/coderd/rbac/policy" ) -var ( - // Force these imports, for some reason the autogen does not include them. - _ uuid.UUID - _ policy.Action - _ rbac.Objecter -) - -const wrapname = "dbmetrics.metricsStore" - -// New returns a database.Store that registers metrics for all queries to reg. -func New(s database.Store, reg prometheus.Registerer) database.Store { +type metricsStore struct { + database.Store + logger slog.Logger + // txDuration is how long transactions take to execute. + txDuration *prometheus.HistogramVec + // txRetries is how many retries we are seeing for a given tx. + txRetries *prometheus.CounterVec +} + +// NewDBMetrics returns a database.Store that registers metrics for the database +// but does not handle individual queries. +// metricsStore is intended to always be used, because queryMetrics are a bit +// too verbose for many use cases. +func NewDBMetrics(s database.Store, logger slog.Logger, reg prometheus.Registerer) database.Store { // Don't double-wrap. if slices.Contains(s.Wrappers(), wrapname) { return s } - queryLatencies := prometheus.NewHistogramVec(prometheus.HistogramOpts{ + txRetries := prometheus.NewCounterVec(prometheus.CounterOpts{ Namespace: "coderd", Subsystem: "db", - Name: "query_latencies_seconds", - Help: "Latency distribution of queries in seconds.", - Buckets: prometheus.DefBuckets, - }, []string{"query"}) - txDuration := prometheus.NewHistogram(prometheus.HistogramOpts{ + Name: "tx_executions_count", + Help: "Total count of transactions executed. 'retries' is expected to be 0 for a successful transaction.", + }, []string{ + "success", // Did the InTx function return an error? + // Number of executions, since we have retry logic on serialization errors. + // retries = Executions - 1 (as 1 execute is expected) + "retries", + // Uniquely naming some transactions can help debug reoccurring errors. + "tx_id", + }) + reg.MustRegister(txRetries) + + txDuration := prometheus.NewHistogramVec(prometheus.HistogramOpts{ Namespace: "coderd", Subsystem: "db", Name: "tx_duration_seconds", Help: "Duration of transactions in seconds.", Buckets: prometheus.DefBuckets, + }, []string{ + "success", // Did the InTx function return an error? + // Uniquely naming some transactions can help debug reoccurring errors. + "tx_id", }) - reg.MustRegister(queryLatencies) reg.MustRegister(txDuration) return &metricsStore{ - s: s, - queryLatencies: queryLatencies, - txDuration: txDuration, + Store: s, + txDuration: txDuration, + txRetries: txRetries, + logger: logger, } } -var _ database.Store = (*metricsStore)(nil) - -type metricsStore struct { - s database.Store - queryLatencies *prometheus.HistogramVec - txDuration prometheus.Histogram -} - func (m metricsStore) Wrappers() []string { - return append(m.s.Wrappers(), wrapname) -} - -func (m metricsStore) Ping(ctx context.Context) (time.Duration, error) { - start := time.Now() - duration, err := m.s.Ping(ctx) - m.queryLatencies.WithLabelValues("Ping").Observe(time.Since(start).Seconds()) - return duration, err -} - -func (m metricsStore) InTx(f func(database.Store) error, options *sql.TxOptions) error { - start := time.Now() - err := m.s.InTx(f, options) - m.txDuration.Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) AcquireLock(ctx context.Context, pgAdvisoryXactLock int64) error { - start := time.Now() - err := m.s.AcquireLock(ctx, pgAdvisoryXactLock) - m.queryLatencies.WithLabelValues("AcquireLock").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) AcquireNotificationMessages(ctx context.Context, arg database.AcquireNotificationMessagesParams) ([]database.AcquireNotificationMessagesRow, error) { - start := time.Now() - r0, r1 := m.s.AcquireNotificationMessages(ctx, arg) - m.queryLatencies.WithLabelValues("AcquireNotificationMessages").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) AcquireProvisionerJob(ctx context.Context, arg database.AcquireProvisionerJobParams) (database.ProvisionerJob, error) { - start := time.Now() - provisionerJob, err := m.s.AcquireProvisionerJob(ctx, arg) - m.queryLatencies.WithLabelValues("AcquireProvisionerJob").Observe(time.Since(start).Seconds()) - return provisionerJob, err -} - -func (m metricsStore) ActivityBumpWorkspace(ctx context.Context, arg database.ActivityBumpWorkspaceParams) error { - start := time.Now() - r0 := m.s.ActivityBumpWorkspace(ctx, arg) - m.queryLatencies.WithLabelValues("ActivityBumpWorkspace").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) AllUserIDs(ctx context.Context) ([]uuid.UUID, error) { - start := time.Now() - r0, r1 := m.s.AllUserIDs(ctx) - m.queryLatencies.WithLabelValues("AllUserIDs").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) ArchiveUnusedTemplateVersions(ctx context.Context, arg database.ArchiveUnusedTemplateVersionsParams) ([]uuid.UUID, error) { - start := time.Now() - r0, r1 := m.s.ArchiveUnusedTemplateVersions(ctx, arg) - m.queryLatencies.WithLabelValues("ArchiveUnusedTemplateVersions").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) BatchUpdateWorkspaceLastUsedAt(ctx context.Context, arg database.BatchUpdateWorkspaceLastUsedAtParams) error { - start := time.Now() - r0 := m.s.BatchUpdateWorkspaceLastUsedAt(ctx, arg) - m.queryLatencies.WithLabelValues("BatchUpdateWorkspaceLastUsedAt").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) BulkMarkNotificationMessagesFailed(ctx context.Context, arg database.BulkMarkNotificationMessagesFailedParams) (int64, error) { - start := time.Now() - r0, r1 := m.s.BulkMarkNotificationMessagesFailed(ctx, arg) - m.queryLatencies.WithLabelValues("BulkMarkNotificationMessagesFailed").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) BulkMarkNotificationMessagesSent(ctx context.Context, arg database.BulkMarkNotificationMessagesSentParams) (int64, error) { - start := time.Now() - r0, r1 := m.s.BulkMarkNotificationMessagesSent(ctx, arg) - m.queryLatencies.WithLabelValues("BulkMarkNotificationMessagesSent").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) CleanTailnetCoordinators(ctx context.Context) error { - start := time.Now() - err := m.s.CleanTailnetCoordinators(ctx) - m.queryLatencies.WithLabelValues("CleanTailnetCoordinators").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) CleanTailnetLostPeers(ctx context.Context) error { - start := time.Now() - r0 := m.s.CleanTailnetLostPeers(ctx) - m.queryLatencies.WithLabelValues("CleanTailnetLostPeers").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) CleanTailnetTunnels(ctx context.Context) error { - start := time.Now() - r0 := m.s.CleanTailnetTunnels(ctx) - m.queryLatencies.WithLabelValues("CleanTailnetTunnels").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) CustomRoles(ctx context.Context, arg database.CustomRolesParams) ([]database.CustomRole, error) { - start := time.Now() - r0, r1 := m.s.CustomRoles(ctx, arg) - m.queryLatencies.WithLabelValues("CustomRoles").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) DeleteAPIKeyByID(ctx context.Context, id string) error { - start := time.Now() - err := m.s.DeleteAPIKeyByID(ctx, id) - m.queryLatencies.WithLabelValues("DeleteAPIKeyByID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) DeleteAPIKeysByUserID(ctx context.Context, userID uuid.UUID) error { - start := time.Now() - err := m.s.DeleteAPIKeysByUserID(ctx, userID) - m.queryLatencies.WithLabelValues("DeleteAPIKeysByUserID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) DeleteAllTailnetClientSubscriptions(ctx context.Context, arg database.DeleteAllTailnetClientSubscriptionsParams) error { - start := time.Now() - r0 := m.s.DeleteAllTailnetClientSubscriptions(ctx, arg) - m.queryLatencies.WithLabelValues("DeleteAllTailnetClientSubscriptions").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) DeleteAllTailnetTunnels(ctx context.Context, arg database.DeleteAllTailnetTunnelsParams) error { - start := time.Now() - r0 := m.s.DeleteAllTailnetTunnels(ctx, arg) - m.queryLatencies.WithLabelValues("DeleteAllTailnetTunnels").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) DeleteApplicationConnectAPIKeysByUserID(ctx context.Context, userID uuid.UUID) error { - start := time.Now() - err := m.s.DeleteApplicationConnectAPIKeysByUserID(ctx, userID) - m.queryLatencies.WithLabelValues("DeleteApplicationConnectAPIKeysByUserID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) DeleteCoordinator(ctx context.Context, id uuid.UUID) error { - start := time.Now() - r0 := m.s.DeleteCoordinator(ctx, id) - m.queryLatencies.WithLabelValues("DeleteCoordinator").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) DeleteCryptoKey(ctx context.Context, arg database.DeleteCryptoKeyParams) (database.CryptoKey, error) { - start := time.Now() - r0, r1 := m.s.DeleteCryptoKey(ctx, arg) - m.queryLatencies.WithLabelValues("DeleteCryptoKey").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) DeleteCustomRole(ctx context.Context, arg database.DeleteCustomRoleParams) error { - start := time.Now() - r0 := m.s.DeleteCustomRole(ctx, arg) - m.queryLatencies.WithLabelValues("DeleteCustomRole").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) DeleteExternalAuthLink(ctx context.Context, arg database.DeleteExternalAuthLinkParams) error { - start := time.Now() - r0 := m.s.DeleteExternalAuthLink(ctx, arg) - m.queryLatencies.WithLabelValues("DeleteExternalAuthLink").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) DeleteGitSSHKey(ctx context.Context, userID uuid.UUID) error { - start := time.Now() - err := m.s.DeleteGitSSHKey(ctx, userID) - m.queryLatencies.WithLabelValues("DeleteGitSSHKey").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) DeleteGroupByID(ctx context.Context, id uuid.UUID) error { - start := time.Now() - err := m.s.DeleteGroupByID(ctx, id) - m.queryLatencies.WithLabelValues("DeleteGroupByID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) DeleteGroupMemberFromGroup(ctx context.Context, arg database.DeleteGroupMemberFromGroupParams) error { - start := time.Now() - err := m.s.DeleteGroupMemberFromGroup(ctx, arg) - m.queryLatencies.WithLabelValues("DeleteGroupMemberFromGroup").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) DeleteLicense(ctx context.Context, id int32) (int32, error) { - start := time.Now() - licenseID, err := m.s.DeleteLicense(ctx, id) - m.queryLatencies.WithLabelValues("DeleteLicense").Observe(time.Since(start).Seconds()) - return licenseID, err -} - -func (m metricsStore) DeleteOAuth2ProviderAppByID(ctx context.Context, id uuid.UUID) error { - start := time.Now() - r0 := m.s.DeleteOAuth2ProviderAppByID(ctx, id) - m.queryLatencies.WithLabelValues("DeleteOAuth2ProviderAppByID").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) DeleteOAuth2ProviderAppCodeByID(ctx context.Context, id uuid.UUID) error { - start := time.Now() - r0 := m.s.DeleteOAuth2ProviderAppCodeByID(ctx, id) - m.queryLatencies.WithLabelValues("DeleteOAuth2ProviderAppCodeByID").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) DeleteOAuth2ProviderAppCodesByAppAndUserID(ctx context.Context, arg database.DeleteOAuth2ProviderAppCodesByAppAndUserIDParams) error { - start := time.Now() - r0 := m.s.DeleteOAuth2ProviderAppCodesByAppAndUserID(ctx, arg) - m.queryLatencies.WithLabelValues("DeleteOAuth2ProviderAppCodesByAppAndUserID").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) DeleteOAuth2ProviderAppSecretByID(ctx context.Context, id uuid.UUID) error { - start := time.Now() - r0 := m.s.DeleteOAuth2ProviderAppSecretByID(ctx, id) - m.queryLatencies.WithLabelValues("DeleteOAuth2ProviderAppSecretByID").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) DeleteOAuth2ProviderAppTokensByAppAndUserID(ctx context.Context, arg database.DeleteOAuth2ProviderAppTokensByAppAndUserIDParams) error { - start := time.Now() - r0 := m.s.DeleteOAuth2ProviderAppTokensByAppAndUserID(ctx, arg) - m.queryLatencies.WithLabelValues("DeleteOAuth2ProviderAppTokensByAppAndUserID").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) DeleteOldNotificationMessages(ctx context.Context) error { - start := time.Now() - r0 := m.s.DeleteOldNotificationMessages(ctx) - m.queryLatencies.WithLabelValues("DeleteOldNotificationMessages").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) DeleteOldProvisionerDaemons(ctx context.Context) error { - start := time.Now() - r0 := m.s.DeleteOldProvisionerDaemons(ctx) - m.queryLatencies.WithLabelValues("DeleteOldProvisionerDaemons").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) DeleteOldWorkspaceAgentLogs(ctx context.Context, arg time.Time) error { - start := time.Now() - r0 := m.s.DeleteOldWorkspaceAgentLogs(ctx, arg) - m.queryLatencies.WithLabelValues("DeleteOldWorkspaceAgentLogs").Observe(time.Since(start).Seconds()) - return r0 + return append(m.Store.Wrappers(), wrapname) } -func (m metricsStore) DeleteOldWorkspaceAgentStats(ctx context.Context) error { - start := time.Now() - err := m.s.DeleteOldWorkspaceAgentStats(ctx) - m.queryLatencies.WithLabelValues("DeleteOldWorkspaceAgentStats").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) DeleteOrganization(ctx context.Context, id uuid.UUID) error { - start := time.Now() - r0 := m.s.DeleteOrganization(ctx, id) - m.queryLatencies.WithLabelValues("DeleteOrganization").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) DeleteOrganizationMember(ctx context.Context, arg database.DeleteOrganizationMemberParams) error { - start := time.Now() - r0 := m.s.DeleteOrganizationMember(ctx, arg) - m.queryLatencies.WithLabelValues("DeleteOrganizationMember").Observe(time.Since(start).Seconds()) - return r0 -} +func (m metricsStore) InTx(f func(database.Store) error, options *database.TxOptions) error { + if options == nil { + options = database.DefaultTXOptions() + } -func (m metricsStore) DeleteProvisionerKey(ctx context.Context, id uuid.UUID) error { - start := time.Now() - r0 := m.s.DeleteProvisionerKey(ctx, id) - m.queryLatencies.WithLabelValues("DeleteProvisionerKey").Observe(time.Since(start).Seconds()) - return r0 -} + if options.TxIdentifier == "" { + // empty strings are hard to deal with in grafana + options.TxIdentifier = "unlabeled" + } -func (m metricsStore) DeleteReplicasUpdatedBefore(ctx context.Context, updatedAt time.Time) error { start := time.Now() - err := m.s.DeleteReplicasUpdatedBefore(ctx, updatedAt) - m.queryLatencies.WithLabelValues("DeleteReplicasUpdatedBefore").Observe(time.Since(start).Seconds()) + err := m.Store.InTx(f, options) + dur := time.Since(start) + // The number of unique label combinations is + // 2 x #IDs x #of buckets + // So IDs should be used sparingly to prevent too much bloat. + m.txDuration.With(prometheus.Labels{ + "success": strconv.FormatBool(err == nil), + "tx_id": options.TxIdentifier, + }).Observe(dur.Seconds()) + + m.txRetries.With(prometheus.Labels{ + "success": strconv.FormatBool(err == nil), + "retries": strconv.FormatInt(int64(options.ExecutionCount()-1), 10), + "tx_id": options.TxIdentifier, + }).Inc() + + // Log all serializable transactions that are retried. + // This is expected to happen in production, but should be kept + // to a minimum. If these logs happen frequently, something is wrong. + if options.ExecutionCount() > 1 { + l := m.logger.Warn + if err != nil { + // Error level if retries were not enough + l = m.logger.Error + } + // No context is present in this function :( + l(context.Background(), "database transaction hit serialization error and had to retry", + slog.F("success", err == nil), // It can succeed on retry + // Note the error might not be a serialization error. It is possible + // the first error was a serialization error, and the error on the + // retry is different. If this is the case, we still want to log it + // since the first error was a serialization error. + slog.Error(err), // Might be nil, that is ok! + slog.F("executions", options.ExecutionCount()), + slog.F("tx_id", options.TxIdentifier), + slog.F("duration", dur), + ) + } return err } - -func (m metricsStore) DeleteRuntimeConfig(ctx context.Context, key string) error { - start := time.Now() - r0 := m.s.DeleteRuntimeConfig(ctx, key) - m.queryLatencies.WithLabelValues("DeleteRuntimeConfig").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) DeleteTailnetAgent(ctx context.Context, arg database.DeleteTailnetAgentParams) (database.DeleteTailnetAgentRow, error) { - start := time.Now() - r0, r1 := m.s.DeleteTailnetAgent(ctx, arg) - m.queryLatencies.WithLabelValues("DeleteTailnetAgent").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) DeleteTailnetClient(ctx context.Context, arg database.DeleteTailnetClientParams) (database.DeleteTailnetClientRow, error) { - start := time.Now() - r0, r1 := m.s.DeleteTailnetClient(ctx, arg) - m.queryLatencies.WithLabelValues("DeleteTailnetClient").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) DeleteTailnetClientSubscription(ctx context.Context, arg database.DeleteTailnetClientSubscriptionParams) error { - start := time.Now() - r0 := m.s.DeleteTailnetClientSubscription(ctx, arg) - m.queryLatencies.WithLabelValues("DeleteTailnetClientSubscription").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) DeleteTailnetPeer(ctx context.Context, arg database.DeleteTailnetPeerParams) (database.DeleteTailnetPeerRow, error) { - start := time.Now() - r0, r1 := m.s.DeleteTailnetPeer(ctx, arg) - m.queryLatencies.WithLabelValues("DeleteTailnetPeer").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) DeleteTailnetTunnel(ctx context.Context, arg database.DeleteTailnetTunnelParams) (database.DeleteTailnetTunnelRow, error) { - start := time.Now() - r0, r1 := m.s.DeleteTailnetTunnel(ctx, arg) - m.queryLatencies.WithLabelValues("DeleteTailnetTunnel").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) DeleteWorkspaceAgentPortShare(ctx context.Context, arg database.DeleteWorkspaceAgentPortShareParams) error { - start := time.Now() - r0 := m.s.DeleteWorkspaceAgentPortShare(ctx, arg) - m.queryLatencies.WithLabelValues("DeleteWorkspaceAgentPortShare").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) DeleteWorkspaceAgentPortSharesByTemplate(ctx context.Context, templateID uuid.UUID) error { - start := time.Now() - r0 := m.s.DeleteWorkspaceAgentPortSharesByTemplate(ctx, templateID) - m.queryLatencies.WithLabelValues("DeleteWorkspaceAgentPortSharesByTemplate").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) EnqueueNotificationMessage(ctx context.Context, arg database.EnqueueNotificationMessageParams) error { - start := time.Now() - r0 := m.s.EnqueueNotificationMessage(ctx, arg) - m.queryLatencies.WithLabelValues("EnqueueNotificationMessage").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) FavoriteWorkspace(ctx context.Context, arg uuid.UUID) error { - start := time.Now() - r0 := m.s.FavoriteWorkspace(ctx, arg) - m.queryLatencies.WithLabelValues("FavoriteWorkspace").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) FetchNewMessageMetadata(ctx context.Context, arg database.FetchNewMessageMetadataParams) (database.FetchNewMessageMetadataRow, error) { - start := time.Now() - r0, r1 := m.s.FetchNewMessageMetadata(ctx, arg) - m.queryLatencies.WithLabelValues("FetchNewMessageMetadata").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetAPIKeyByID(ctx context.Context, id string) (database.APIKey, error) { - start := time.Now() - apiKey, err := m.s.GetAPIKeyByID(ctx, id) - m.queryLatencies.WithLabelValues("GetAPIKeyByID").Observe(time.Since(start).Seconds()) - return apiKey, err -} - -func (m metricsStore) GetAPIKeyByName(ctx context.Context, arg database.GetAPIKeyByNameParams) (database.APIKey, error) { - start := time.Now() - apiKey, err := m.s.GetAPIKeyByName(ctx, arg) - m.queryLatencies.WithLabelValues("GetAPIKeyByName").Observe(time.Since(start).Seconds()) - return apiKey, err -} - -func (m metricsStore) GetAPIKeysByLoginType(ctx context.Context, loginType database.LoginType) ([]database.APIKey, error) { - start := time.Now() - apiKeys, err := m.s.GetAPIKeysByLoginType(ctx, loginType) - m.queryLatencies.WithLabelValues("GetAPIKeysByLoginType").Observe(time.Since(start).Seconds()) - return apiKeys, err -} - -func (m metricsStore) GetAPIKeysByUserID(ctx context.Context, arg database.GetAPIKeysByUserIDParams) ([]database.APIKey, error) { - start := time.Now() - apiKeys, err := m.s.GetAPIKeysByUserID(ctx, arg) - m.queryLatencies.WithLabelValues("GetAPIKeysByUserID").Observe(time.Since(start).Seconds()) - return apiKeys, err -} - -func (m metricsStore) GetAPIKeysLastUsedAfter(ctx context.Context, lastUsed time.Time) ([]database.APIKey, error) { - start := time.Now() - apiKeys, err := m.s.GetAPIKeysLastUsedAfter(ctx, lastUsed) - m.queryLatencies.WithLabelValues("GetAPIKeysLastUsedAfter").Observe(time.Since(start).Seconds()) - return apiKeys, err -} - -func (m metricsStore) GetActiveUserCount(ctx context.Context) (int64, error) { - start := time.Now() - count, err := m.s.GetActiveUserCount(ctx) - m.queryLatencies.WithLabelValues("GetActiveUserCount").Observe(time.Since(start).Seconds()) - return count, err -} - -func (m metricsStore) GetActiveWorkspaceBuildsByTemplateID(ctx context.Context, templateID uuid.UUID) ([]database.WorkspaceBuild, error) { - start := time.Now() - r0, r1 := m.s.GetActiveWorkspaceBuildsByTemplateID(ctx, templateID) - m.queryLatencies.WithLabelValues("GetActiveWorkspaceBuildsByTemplateID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetAllTailnetAgents(ctx context.Context) ([]database.TailnetAgent, error) { - start := time.Now() - r0, r1 := m.s.GetAllTailnetAgents(ctx) - m.queryLatencies.WithLabelValues("GetAllTailnetAgents").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetAllTailnetCoordinators(ctx context.Context) ([]database.TailnetCoordinator, error) { - start := time.Now() - r0, r1 := m.s.GetAllTailnetCoordinators(ctx) - m.queryLatencies.WithLabelValues("GetAllTailnetCoordinators").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetAllTailnetPeers(ctx context.Context) ([]database.TailnetPeer, error) { - start := time.Now() - r0, r1 := m.s.GetAllTailnetPeers(ctx) - m.queryLatencies.WithLabelValues("GetAllTailnetPeers").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetAllTailnetTunnels(ctx context.Context) ([]database.TailnetTunnel, error) { - start := time.Now() - r0, r1 := m.s.GetAllTailnetTunnels(ctx) - m.queryLatencies.WithLabelValues("GetAllTailnetTunnels").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetAnnouncementBanners(ctx context.Context) (string, error) { - start := time.Now() - r0, r1 := m.s.GetAnnouncementBanners(ctx) - m.queryLatencies.WithLabelValues("GetAnnouncementBanners").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetAppSecurityKey(ctx context.Context) (string, error) { - start := time.Now() - key, err := m.s.GetAppSecurityKey(ctx) - m.queryLatencies.WithLabelValues("GetAppSecurityKey").Observe(time.Since(start).Seconds()) - return key, err -} - -func (m metricsStore) GetApplicationName(ctx context.Context) (string, error) { - start := time.Now() - r0, r1 := m.s.GetApplicationName(ctx) - m.queryLatencies.WithLabelValues("GetApplicationName").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetAuditLogsOffset(ctx context.Context, arg database.GetAuditLogsOffsetParams) ([]database.GetAuditLogsOffsetRow, error) { - start := time.Now() - rows, err := m.s.GetAuditLogsOffset(ctx, arg) - m.queryLatencies.WithLabelValues("GetAuditLogsOffset").Observe(time.Since(start).Seconds()) - return rows, err -} - -func (m metricsStore) GetAuthorizationUserRoles(ctx context.Context, userID uuid.UUID) (database.GetAuthorizationUserRolesRow, error) { - start := time.Now() - row, err := m.s.GetAuthorizationUserRoles(ctx, userID) - m.queryLatencies.WithLabelValues("GetAuthorizationUserRoles").Observe(time.Since(start).Seconds()) - return row, err -} - -func (m metricsStore) GetCoordinatorResumeTokenSigningKey(ctx context.Context) (string, error) { - start := time.Now() - r0, r1 := m.s.GetCoordinatorResumeTokenSigningKey(ctx) - m.queryLatencies.WithLabelValues("GetCoordinatorResumeTokenSigningKey").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetCryptoKeyByFeatureAndSequence(ctx context.Context, arg database.GetCryptoKeyByFeatureAndSequenceParams) (database.CryptoKey, error) { - start := time.Now() - r0, r1 := m.s.GetCryptoKeyByFeatureAndSequence(ctx, arg) - m.queryLatencies.WithLabelValues("GetCryptoKeyByFeatureAndSequence").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetCryptoKeys(ctx context.Context) ([]database.CryptoKey, error) { - start := time.Now() - r0, r1 := m.s.GetCryptoKeys(ctx) - m.queryLatencies.WithLabelValues("GetCryptoKeys").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetCryptoKeysByFeature(ctx context.Context, feature database.CryptoKeyFeature) ([]database.CryptoKey, error) { - start := time.Now() - r0, r1 := m.s.GetCryptoKeysByFeature(ctx, feature) - m.queryLatencies.WithLabelValues("GetCryptoKeysByFeature").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetDBCryptKeys(ctx context.Context) ([]database.DBCryptKey, error) { - start := time.Now() - r0, r1 := m.s.GetDBCryptKeys(ctx) - m.queryLatencies.WithLabelValues("GetDBCryptKeys").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetDERPMeshKey(ctx context.Context) (string, error) { - start := time.Now() - key, err := m.s.GetDERPMeshKey(ctx) - m.queryLatencies.WithLabelValues("GetDERPMeshKey").Observe(time.Since(start).Seconds()) - return key, err -} - -func (m metricsStore) GetDefaultOrganization(ctx context.Context) (database.Organization, error) { - start := time.Now() - r0, r1 := m.s.GetDefaultOrganization(ctx) - m.queryLatencies.WithLabelValues("GetDefaultOrganization").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetDefaultProxyConfig(ctx context.Context) (database.GetDefaultProxyConfigRow, error) { - start := time.Now() - resp, err := m.s.GetDefaultProxyConfig(ctx) - m.queryLatencies.WithLabelValues("GetDefaultProxyConfig").Observe(time.Since(start).Seconds()) - return resp, err -} - -func (m metricsStore) GetDeploymentDAUs(ctx context.Context, tzOffset int32) ([]database.GetDeploymentDAUsRow, error) { - start := time.Now() - rows, err := m.s.GetDeploymentDAUs(ctx, tzOffset) - m.queryLatencies.WithLabelValues("GetDeploymentDAUs").Observe(time.Since(start).Seconds()) - return rows, err -} - -func (m metricsStore) GetDeploymentID(ctx context.Context) (string, error) { - start := time.Now() - id, err := m.s.GetDeploymentID(ctx) - m.queryLatencies.WithLabelValues("GetDeploymentID").Observe(time.Since(start).Seconds()) - return id, err -} - -func (m metricsStore) GetDeploymentWorkspaceAgentStats(ctx context.Context, createdAt time.Time) (database.GetDeploymentWorkspaceAgentStatsRow, error) { - start := time.Now() - row, err := m.s.GetDeploymentWorkspaceAgentStats(ctx, createdAt) - m.queryLatencies.WithLabelValues("GetDeploymentWorkspaceAgentStats").Observe(time.Since(start).Seconds()) - return row, err -} - -func (m metricsStore) GetDeploymentWorkspaceAgentUsageStats(ctx context.Context, createdAt time.Time) (database.GetDeploymentWorkspaceAgentUsageStatsRow, error) { - start := time.Now() - r0, r1 := m.s.GetDeploymentWorkspaceAgentUsageStats(ctx, createdAt) - m.queryLatencies.WithLabelValues("GetDeploymentWorkspaceAgentUsageStats").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetDeploymentWorkspaceStats(ctx context.Context) (database.GetDeploymentWorkspaceStatsRow, error) { - start := time.Now() - row, err := m.s.GetDeploymentWorkspaceStats(ctx) - m.queryLatencies.WithLabelValues("GetDeploymentWorkspaceStats").Observe(time.Since(start).Seconds()) - return row, err -} - -func (m metricsStore) GetExternalAuthLink(ctx context.Context, arg database.GetExternalAuthLinkParams) (database.ExternalAuthLink, error) { - start := time.Now() - link, err := m.s.GetExternalAuthLink(ctx, arg) - m.queryLatencies.WithLabelValues("GetExternalAuthLink").Observe(time.Since(start).Seconds()) - return link, err -} - -func (m metricsStore) GetExternalAuthLinksByUserID(ctx context.Context, userID uuid.UUID) ([]database.ExternalAuthLink, error) { - start := time.Now() - r0, r1 := m.s.GetExternalAuthLinksByUserID(ctx, userID) - m.queryLatencies.WithLabelValues("GetExternalAuthLinksByUserID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetFailedWorkspaceBuildsByTemplateID(ctx context.Context, arg database.GetFailedWorkspaceBuildsByTemplateIDParams) ([]database.GetFailedWorkspaceBuildsByTemplateIDRow, error) { - start := time.Now() - r0, r1 := m.s.GetFailedWorkspaceBuildsByTemplateID(ctx, arg) - m.queryLatencies.WithLabelValues("GetFailedWorkspaceBuildsByTemplateID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetFileByHashAndCreator(ctx context.Context, arg database.GetFileByHashAndCreatorParams) (database.File, error) { - start := time.Now() - file, err := m.s.GetFileByHashAndCreator(ctx, arg) - m.queryLatencies.WithLabelValues("GetFileByHashAndCreator").Observe(time.Since(start).Seconds()) - return file, err -} - -func (m metricsStore) GetFileByID(ctx context.Context, id uuid.UUID) (database.File, error) { - start := time.Now() - file, err := m.s.GetFileByID(ctx, id) - m.queryLatencies.WithLabelValues("GetFileByID").Observe(time.Since(start).Seconds()) - return file, err -} - -func (m metricsStore) GetFileTemplates(ctx context.Context, fileID uuid.UUID) ([]database.GetFileTemplatesRow, error) { - start := time.Now() - rows, err := m.s.GetFileTemplates(ctx, fileID) - m.queryLatencies.WithLabelValues("GetFileTemplates").Observe(time.Since(start).Seconds()) - return rows, err -} - -func (m metricsStore) GetGitSSHKey(ctx context.Context, userID uuid.UUID) (database.GitSSHKey, error) { - start := time.Now() - key, err := m.s.GetGitSSHKey(ctx, userID) - m.queryLatencies.WithLabelValues("GetGitSSHKey").Observe(time.Since(start).Seconds()) - return key, err -} - -func (m metricsStore) GetGroupByID(ctx context.Context, id uuid.UUID) (database.Group, error) { - start := time.Now() - group, err := m.s.GetGroupByID(ctx, id) - m.queryLatencies.WithLabelValues("GetGroupByID").Observe(time.Since(start).Seconds()) - return group, err -} - -func (m metricsStore) GetGroupByOrgAndName(ctx context.Context, arg database.GetGroupByOrgAndNameParams) (database.Group, error) { - start := time.Now() - group, err := m.s.GetGroupByOrgAndName(ctx, arg) - m.queryLatencies.WithLabelValues("GetGroupByOrgAndName").Observe(time.Since(start).Seconds()) - return group, err -} - -func (m metricsStore) GetGroupMembers(ctx context.Context) ([]database.GroupMember, error) { - start := time.Now() - r0, r1 := m.s.GetGroupMembers(ctx) - m.queryLatencies.WithLabelValues("GetGroupMembers").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetGroupMembersByGroupID(ctx context.Context, groupID uuid.UUID) ([]database.GroupMember, error) { - start := time.Now() - users, err := m.s.GetGroupMembersByGroupID(ctx, groupID) - m.queryLatencies.WithLabelValues("GetGroupMembersByGroupID").Observe(time.Since(start).Seconds()) - return users, err -} - -func (m metricsStore) GetGroupMembersCountByGroupID(ctx context.Context, groupID uuid.UUID) (int64, error) { - start := time.Now() - r0, r1 := m.s.GetGroupMembersCountByGroupID(ctx, groupID) - m.queryLatencies.WithLabelValues("GetGroupMembersCountByGroupID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetGroups(ctx context.Context, arg database.GetGroupsParams) ([]database.GetGroupsRow, error) { - start := time.Now() - r0, r1 := m.s.GetGroups(ctx, arg) - m.queryLatencies.WithLabelValues("GetGroups").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetHealthSettings(ctx context.Context) (string, error) { - start := time.Now() - r0, r1 := m.s.GetHealthSettings(ctx) - m.queryLatencies.WithLabelValues("GetHealthSettings").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetHungProvisionerJobs(ctx context.Context, hungSince time.Time) ([]database.ProvisionerJob, error) { - start := time.Now() - jobs, err := m.s.GetHungProvisionerJobs(ctx, hungSince) - m.queryLatencies.WithLabelValues("GetHungProvisionerJobs").Observe(time.Since(start).Seconds()) - return jobs, err -} - -func (m metricsStore) GetJFrogXrayScanByWorkspaceAndAgentID(ctx context.Context, arg database.GetJFrogXrayScanByWorkspaceAndAgentIDParams) (database.JfrogXrayScan, error) { - start := time.Now() - r0, r1 := m.s.GetJFrogXrayScanByWorkspaceAndAgentID(ctx, arg) - m.queryLatencies.WithLabelValues("GetJFrogXrayScanByWorkspaceAndAgentID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetLastUpdateCheck(ctx context.Context) (string, error) { - start := time.Now() - version, err := m.s.GetLastUpdateCheck(ctx) - m.queryLatencies.WithLabelValues("GetLastUpdateCheck").Observe(time.Since(start).Seconds()) - return version, err -} - -func (m metricsStore) GetLatestCryptoKeyByFeature(ctx context.Context, feature database.CryptoKeyFeature) (database.CryptoKey, error) { - start := time.Now() - r0, r1 := m.s.GetLatestCryptoKeyByFeature(ctx, feature) - m.queryLatencies.WithLabelValues("GetLatestCryptoKeyByFeature").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetLatestWorkspaceBuildByWorkspaceID(ctx context.Context, workspaceID uuid.UUID) (database.WorkspaceBuild, error) { - start := time.Now() - build, err := m.s.GetLatestWorkspaceBuildByWorkspaceID(ctx, workspaceID) - m.queryLatencies.WithLabelValues("GetLatestWorkspaceBuildByWorkspaceID").Observe(time.Since(start).Seconds()) - return build, err -} - -func (m metricsStore) GetLatestWorkspaceBuilds(ctx context.Context) ([]database.WorkspaceBuild, error) { - start := time.Now() - builds, err := m.s.GetLatestWorkspaceBuilds(ctx) - m.queryLatencies.WithLabelValues("GetLatestWorkspaceBuilds").Observe(time.Since(start).Seconds()) - return builds, err -} - -func (m metricsStore) GetLatestWorkspaceBuildsByWorkspaceIDs(ctx context.Context, ids []uuid.UUID) ([]database.WorkspaceBuild, error) { - start := time.Now() - builds, err := m.s.GetLatestWorkspaceBuildsByWorkspaceIDs(ctx, ids) - m.queryLatencies.WithLabelValues("GetLatestWorkspaceBuildsByWorkspaceIDs").Observe(time.Since(start).Seconds()) - return builds, err -} - -func (m metricsStore) GetLicenseByID(ctx context.Context, id int32) (database.License, error) { - start := time.Now() - license, err := m.s.GetLicenseByID(ctx, id) - m.queryLatencies.WithLabelValues("GetLicenseByID").Observe(time.Since(start).Seconds()) - return license, err -} - -func (m metricsStore) GetLicenses(ctx context.Context) ([]database.License, error) { - start := time.Now() - licenses, err := m.s.GetLicenses(ctx) - m.queryLatencies.WithLabelValues("GetLicenses").Observe(time.Since(start).Seconds()) - return licenses, err -} - -func (m metricsStore) GetLogoURL(ctx context.Context) (string, error) { - start := time.Now() - url, err := m.s.GetLogoURL(ctx) - m.queryLatencies.WithLabelValues("GetLogoURL").Observe(time.Since(start).Seconds()) - return url, err -} - -func (m metricsStore) GetNotificationMessagesByStatus(ctx context.Context, arg database.GetNotificationMessagesByStatusParams) ([]database.NotificationMessage, error) { - start := time.Now() - r0, r1 := m.s.GetNotificationMessagesByStatus(ctx, arg) - m.queryLatencies.WithLabelValues("GetNotificationMessagesByStatus").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetNotificationReportGeneratorLogByTemplate(ctx context.Context, arg uuid.UUID) (database.NotificationReportGeneratorLog, error) { - start := time.Now() - r0, r1 := m.s.GetNotificationReportGeneratorLogByTemplate(ctx, arg) - m.queryLatencies.WithLabelValues("GetNotificationReportGeneratorLogByTemplate").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetNotificationTemplateByID(ctx context.Context, id uuid.UUID) (database.NotificationTemplate, error) { - start := time.Now() - r0, r1 := m.s.GetNotificationTemplateByID(ctx, id) - m.queryLatencies.WithLabelValues("GetNotificationTemplateByID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetNotificationTemplatesByKind(ctx context.Context, kind database.NotificationTemplateKind) ([]database.NotificationTemplate, error) { - start := time.Now() - r0, r1 := m.s.GetNotificationTemplatesByKind(ctx, kind) - m.queryLatencies.WithLabelValues("GetNotificationTemplatesByKind").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetNotificationsSettings(ctx context.Context) (string, error) { - start := time.Now() - r0, r1 := m.s.GetNotificationsSettings(ctx) - m.queryLatencies.WithLabelValues("GetNotificationsSettings").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetOAuth2ProviderAppByID(ctx context.Context, id uuid.UUID) (database.OAuth2ProviderApp, error) { - start := time.Now() - r0, r1 := m.s.GetOAuth2ProviderAppByID(ctx, id) - m.queryLatencies.WithLabelValues("GetOAuth2ProviderAppByID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetOAuth2ProviderAppCodeByID(ctx context.Context, id uuid.UUID) (database.OAuth2ProviderAppCode, error) { - start := time.Now() - r0, r1 := m.s.GetOAuth2ProviderAppCodeByID(ctx, id) - m.queryLatencies.WithLabelValues("GetOAuth2ProviderAppCodeByID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetOAuth2ProviderAppCodeByPrefix(ctx context.Context, secretPrefix []byte) (database.OAuth2ProviderAppCode, error) { - start := time.Now() - r0, r1 := m.s.GetOAuth2ProviderAppCodeByPrefix(ctx, secretPrefix) - m.queryLatencies.WithLabelValues("GetOAuth2ProviderAppCodeByPrefix").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetOAuth2ProviderAppSecretByID(ctx context.Context, id uuid.UUID) (database.OAuth2ProviderAppSecret, error) { - start := time.Now() - r0, r1 := m.s.GetOAuth2ProviderAppSecretByID(ctx, id) - m.queryLatencies.WithLabelValues("GetOAuth2ProviderAppSecretByID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetOAuth2ProviderAppSecretByPrefix(ctx context.Context, secretPrefix []byte) (database.OAuth2ProviderAppSecret, error) { - start := time.Now() - r0, r1 := m.s.GetOAuth2ProviderAppSecretByPrefix(ctx, secretPrefix) - m.queryLatencies.WithLabelValues("GetOAuth2ProviderAppSecretByPrefix").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetOAuth2ProviderAppSecretsByAppID(ctx context.Context, appID uuid.UUID) ([]database.OAuth2ProviderAppSecret, error) { - start := time.Now() - r0, r1 := m.s.GetOAuth2ProviderAppSecretsByAppID(ctx, appID) - m.queryLatencies.WithLabelValues("GetOAuth2ProviderAppSecretsByAppID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetOAuth2ProviderAppTokenByPrefix(ctx context.Context, hashPrefix []byte) (database.OAuth2ProviderAppToken, error) { - start := time.Now() - r0, r1 := m.s.GetOAuth2ProviderAppTokenByPrefix(ctx, hashPrefix) - m.queryLatencies.WithLabelValues("GetOAuth2ProviderAppTokenByPrefix").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetOAuth2ProviderApps(ctx context.Context) ([]database.OAuth2ProviderApp, error) { - start := time.Now() - r0, r1 := m.s.GetOAuth2ProviderApps(ctx) - m.queryLatencies.WithLabelValues("GetOAuth2ProviderApps").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetOAuth2ProviderAppsByUserID(ctx context.Context, userID uuid.UUID) ([]database.GetOAuth2ProviderAppsByUserIDRow, error) { - start := time.Now() - r0, r1 := m.s.GetOAuth2ProviderAppsByUserID(ctx, userID) - m.queryLatencies.WithLabelValues("GetOAuth2ProviderAppsByUserID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetOAuthSigningKey(ctx context.Context) (string, error) { - start := time.Now() - r0, r1 := m.s.GetOAuthSigningKey(ctx) - m.queryLatencies.WithLabelValues("GetOAuthSigningKey").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetOrganizationByID(ctx context.Context, id uuid.UUID) (database.Organization, error) { - start := time.Now() - organization, err := m.s.GetOrganizationByID(ctx, id) - m.queryLatencies.WithLabelValues("GetOrganizationByID").Observe(time.Since(start).Seconds()) - return organization, err -} - -func (m metricsStore) GetOrganizationByName(ctx context.Context, name string) (database.Organization, error) { - start := time.Now() - organization, err := m.s.GetOrganizationByName(ctx, name) - m.queryLatencies.WithLabelValues("GetOrganizationByName").Observe(time.Since(start).Seconds()) - return organization, err -} - -func (m metricsStore) GetOrganizationIDsByMemberIDs(ctx context.Context, ids []uuid.UUID) ([]database.GetOrganizationIDsByMemberIDsRow, error) { - start := time.Now() - organizations, err := m.s.GetOrganizationIDsByMemberIDs(ctx, ids) - m.queryLatencies.WithLabelValues("GetOrganizationIDsByMemberIDs").Observe(time.Since(start).Seconds()) - return organizations, err -} - -func (m metricsStore) GetOrganizations(ctx context.Context, args database.GetOrganizationsParams) ([]database.Organization, error) { - start := time.Now() - organizations, err := m.s.GetOrganizations(ctx, args) - m.queryLatencies.WithLabelValues("GetOrganizations").Observe(time.Since(start).Seconds()) - return organizations, err -} - -func (m metricsStore) GetOrganizationsByUserID(ctx context.Context, userID uuid.UUID) ([]database.Organization, error) { - start := time.Now() - organizations, err := m.s.GetOrganizationsByUserID(ctx, userID) - m.queryLatencies.WithLabelValues("GetOrganizationsByUserID").Observe(time.Since(start).Seconds()) - return organizations, err -} - -func (m metricsStore) GetParameterSchemasByJobID(ctx context.Context, jobID uuid.UUID) ([]database.ParameterSchema, error) { - start := time.Now() - schemas, err := m.s.GetParameterSchemasByJobID(ctx, jobID) - m.queryLatencies.WithLabelValues("GetParameterSchemasByJobID").Observe(time.Since(start).Seconds()) - return schemas, err -} - -func (m metricsStore) GetPreviousTemplateVersion(ctx context.Context, arg database.GetPreviousTemplateVersionParams) (database.TemplateVersion, error) { - start := time.Now() - version, err := m.s.GetPreviousTemplateVersion(ctx, arg) - m.queryLatencies.WithLabelValues("GetPreviousTemplateVersion").Observe(time.Since(start).Seconds()) - return version, err -} - -func (m metricsStore) GetProvisionerDaemons(ctx context.Context) ([]database.ProvisionerDaemon, error) { - start := time.Now() - daemons, err := m.s.GetProvisionerDaemons(ctx) - m.queryLatencies.WithLabelValues("GetProvisionerDaemons").Observe(time.Since(start).Seconds()) - return daemons, err -} - -func (m metricsStore) GetProvisionerDaemonsByOrganization(ctx context.Context, organizationID uuid.UUID) ([]database.ProvisionerDaemon, error) { - start := time.Now() - r0, r1 := m.s.GetProvisionerDaemonsByOrganization(ctx, organizationID) - m.queryLatencies.WithLabelValues("GetProvisionerDaemonsByOrganization").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetProvisionerJobByID(ctx context.Context, id uuid.UUID) (database.ProvisionerJob, error) { - start := time.Now() - job, err := m.s.GetProvisionerJobByID(ctx, id) - m.queryLatencies.WithLabelValues("GetProvisionerJobByID").Observe(time.Since(start).Seconds()) - return job, err -} - -func (m metricsStore) GetProvisionerJobTimingsByJobID(ctx context.Context, jobID uuid.UUID) ([]database.ProvisionerJobTiming, error) { - start := time.Now() - r0, r1 := m.s.GetProvisionerJobTimingsByJobID(ctx, jobID) - m.queryLatencies.WithLabelValues("GetProvisionerJobTimingsByJobID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetProvisionerJobsByIDs(ctx context.Context, ids []uuid.UUID) ([]database.ProvisionerJob, error) { - start := time.Now() - jobs, err := m.s.GetProvisionerJobsByIDs(ctx, ids) - m.queryLatencies.WithLabelValues("GetProvisionerJobsByIDs").Observe(time.Since(start).Seconds()) - return jobs, err -} - -func (m metricsStore) GetProvisionerJobsByIDsWithQueuePosition(ctx context.Context, ids []uuid.UUID) ([]database.GetProvisionerJobsByIDsWithQueuePositionRow, error) { - start := time.Now() - r0, r1 := m.s.GetProvisionerJobsByIDsWithQueuePosition(ctx, ids) - m.queryLatencies.WithLabelValues("GetProvisionerJobsByIDsWithQueuePosition").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetProvisionerJobsCreatedAfter(ctx context.Context, createdAt time.Time) ([]database.ProvisionerJob, error) { - start := time.Now() - jobs, err := m.s.GetProvisionerJobsCreatedAfter(ctx, createdAt) - m.queryLatencies.WithLabelValues("GetProvisionerJobsCreatedAfter").Observe(time.Since(start).Seconds()) - return jobs, err -} - -func (m metricsStore) GetProvisionerKeyByHashedSecret(ctx context.Context, hashedSecret []byte) (database.ProvisionerKey, error) { - start := time.Now() - r0, r1 := m.s.GetProvisionerKeyByHashedSecret(ctx, hashedSecret) - m.queryLatencies.WithLabelValues("GetProvisionerKeyByHashedSecret").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetProvisionerKeyByID(ctx context.Context, id uuid.UUID) (database.ProvisionerKey, error) { - start := time.Now() - r0, r1 := m.s.GetProvisionerKeyByID(ctx, id) - m.queryLatencies.WithLabelValues("GetProvisionerKeyByID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetProvisionerKeyByName(ctx context.Context, name database.GetProvisionerKeyByNameParams) (database.ProvisionerKey, error) { - start := time.Now() - r0, r1 := m.s.GetProvisionerKeyByName(ctx, name) - m.queryLatencies.WithLabelValues("GetProvisionerKeyByName").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetProvisionerLogsAfterID(ctx context.Context, arg database.GetProvisionerLogsAfterIDParams) ([]database.ProvisionerJobLog, error) { - start := time.Now() - logs, err := m.s.GetProvisionerLogsAfterID(ctx, arg) - m.queryLatencies.WithLabelValues("GetProvisionerLogsAfterID").Observe(time.Since(start).Seconds()) - return logs, err -} - -func (m metricsStore) GetQuotaAllowanceForUser(ctx context.Context, userID database.GetQuotaAllowanceForUserParams) (int64, error) { - start := time.Now() - allowance, err := m.s.GetQuotaAllowanceForUser(ctx, userID) - m.queryLatencies.WithLabelValues("GetQuotaAllowanceForUser").Observe(time.Since(start).Seconds()) - return allowance, err -} - -func (m metricsStore) GetQuotaConsumedForUser(ctx context.Context, ownerID database.GetQuotaConsumedForUserParams) (int64, error) { - start := time.Now() - consumed, err := m.s.GetQuotaConsumedForUser(ctx, ownerID) - m.queryLatencies.WithLabelValues("GetQuotaConsumedForUser").Observe(time.Since(start).Seconds()) - return consumed, err -} - -func (m metricsStore) GetReplicaByID(ctx context.Context, id uuid.UUID) (database.Replica, error) { - start := time.Now() - replica, err := m.s.GetReplicaByID(ctx, id) - m.queryLatencies.WithLabelValues("GetReplicaByID").Observe(time.Since(start).Seconds()) - return replica, err -} - -func (m metricsStore) GetReplicasUpdatedAfter(ctx context.Context, updatedAt time.Time) ([]database.Replica, error) { - start := time.Now() - replicas, err := m.s.GetReplicasUpdatedAfter(ctx, updatedAt) - m.queryLatencies.WithLabelValues("GetReplicasUpdatedAfter").Observe(time.Since(start).Seconds()) - return replicas, err -} - -func (m metricsStore) GetRuntimeConfig(ctx context.Context, key string) (string, error) { - start := time.Now() - r0, r1 := m.s.GetRuntimeConfig(ctx, key) - m.queryLatencies.WithLabelValues("GetRuntimeConfig").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetTailnetAgents(ctx context.Context, id uuid.UUID) ([]database.TailnetAgent, error) { - start := time.Now() - r0, r1 := m.s.GetTailnetAgents(ctx, id) - m.queryLatencies.WithLabelValues("GetTailnetAgents").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetTailnetClientsForAgent(ctx context.Context, agentID uuid.UUID) ([]database.TailnetClient, error) { - start := time.Now() - r0, r1 := m.s.GetTailnetClientsForAgent(ctx, agentID) - m.queryLatencies.WithLabelValues("GetTailnetClientsForAgent").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetTailnetPeers(ctx context.Context, id uuid.UUID) ([]database.TailnetPeer, error) { - start := time.Now() - r0, r1 := m.s.GetTailnetPeers(ctx, id) - m.queryLatencies.WithLabelValues("GetTailnetPeers").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetTailnetTunnelPeerBindings(ctx context.Context, srcID uuid.UUID) ([]database.GetTailnetTunnelPeerBindingsRow, error) { - start := time.Now() - r0, r1 := m.s.GetTailnetTunnelPeerBindings(ctx, srcID) - m.queryLatencies.WithLabelValues("GetTailnetTunnelPeerBindings").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetTailnetTunnelPeerIDs(ctx context.Context, srcID uuid.UUID) ([]database.GetTailnetTunnelPeerIDsRow, error) { - start := time.Now() - r0, r1 := m.s.GetTailnetTunnelPeerIDs(ctx, srcID) - m.queryLatencies.WithLabelValues("GetTailnetTunnelPeerIDs").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetTemplateAppInsights(ctx context.Context, arg database.GetTemplateAppInsightsParams) ([]database.GetTemplateAppInsightsRow, error) { - start := time.Now() - r0, r1 := m.s.GetTemplateAppInsights(ctx, arg) - m.queryLatencies.WithLabelValues("GetTemplateAppInsights").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetTemplateAppInsightsByTemplate(ctx context.Context, arg database.GetTemplateAppInsightsByTemplateParams) ([]database.GetTemplateAppInsightsByTemplateRow, error) { - start := time.Now() - r0, r1 := m.s.GetTemplateAppInsightsByTemplate(ctx, arg) - m.queryLatencies.WithLabelValues("GetTemplateAppInsightsByTemplate").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetTemplateAverageBuildTime(ctx context.Context, arg database.GetTemplateAverageBuildTimeParams) (database.GetTemplateAverageBuildTimeRow, error) { - start := time.Now() - buildTime, err := m.s.GetTemplateAverageBuildTime(ctx, arg) - m.queryLatencies.WithLabelValues("GetTemplateAverageBuildTime").Observe(time.Since(start).Seconds()) - return buildTime, err -} - -func (m metricsStore) GetTemplateByID(ctx context.Context, id uuid.UUID) (database.Template, error) { - start := time.Now() - template, err := m.s.GetTemplateByID(ctx, id) - m.queryLatencies.WithLabelValues("GetTemplateByID").Observe(time.Since(start).Seconds()) - return template, err -} - -func (m metricsStore) GetTemplateByOrganizationAndName(ctx context.Context, arg database.GetTemplateByOrganizationAndNameParams) (database.Template, error) { - start := time.Now() - template, err := m.s.GetTemplateByOrganizationAndName(ctx, arg) - m.queryLatencies.WithLabelValues("GetTemplateByOrganizationAndName").Observe(time.Since(start).Seconds()) - return template, err -} - -func (m metricsStore) GetTemplateDAUs(ctx context.Context, arg database.GetTemplateDAUsParams) ([]database.GetTemplateDAUsRow, error) { - start := time.Now() - daus, err := m.s.GetTemplateDAUs(ctx, arg) - m.queryLatencies.WithLabelValues("GetTemplateDAUs").Observe(time.Since(start).Seconds()) - return daus, err -} - -func (m metricsStore) GetTemplateInsights(ctx context.Context, arg database.GetTemplateInsightsParams) (database.GetTemplateInsightsRow, error) { - start := time.Now() - r0, r1 := m.s.GetTemplateInsights(ctx, arg) - m.queryLatencies.WithLabelValues("GetTemplateInsights").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetTemplateInsightsByInterval(ctx context.Context, arg database.GetTemplateInsightsByIntervalParams) ([]database.GetTemplateInsightsByIntervalRow, error) { - start := time.Now() - r0, r1 := m.s.GetTemplateInsightsByInterval(ctx, arg) - m.queryLatencies.WithLabelValues("GetTemplateInsightsByInterval").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetTemplateInsightsByTemplate(ctx context.Context, arg database.GetTemplateInsightsByTemplateParams) ([]database.GetTemplateInsightsByTemplateRow, error) { - start := time.Now() - r0, r1 := m.s.GetTemplateInsightsByTemplate(ctx, arg) - m.queryLatencies.WithLabelValues("GetTemplateInsightsByTemplate").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetTemplateParameterInsights(ctx context.Context, arg database.GetTemplateParameterInsightsParams) ([]database.GetTemplateParameterInsightsRow, error) { - start := time.Now() - r0, r1 := m.s.GetTemplateParameterInsights(ctx, arg) - m.queryLatencies.WithLabelValues("GetTemplateParameterInsights").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetTemplateUsageStats(ctx context.Context, arg database.GetTemplateUsageStatsParams) ([]database.TemplateUsageStat, error) { - start := time.Now() - r0, r1 := m.s.GetTemplateUsageStats(ctx, arg) - m.queryLatencies.WithLabelValues("GetTemplateUsageStats").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetTemplateVersionByID(ctx context.Context, id uuid.UUID) (database.TemplateVersion, error) { - start := time.Now() - version, err := m.s.GetTemplateVersionByID(ctx, id) - m.queryLatencies.WithLabelValues("GetTemplateVersionByID").Observe(time.Since(start).Seconds()) - return version, err -} - -func (m metricsStore) GetTemplateVersionByJobID(ctx context.Context, jobID uuid.UUID) (database.TemplateVersion, error) { - start := time.Now() - version, err := m.s.GetTemplateVersionByJobID(ctx, jobID) - m.queryLatencies.WithLabelValues("GetTemplateVersionByJobID").Observe(time.Since(start).Seconds()) - return version, err -} - -func (m metricsStore) GetTemplateVersionByTemplateIDAndName(ctx context.Context, arg database.GetTemplateVersionByTemplateIDAndNameParams) (database.TemplateVersion, error) { - start := time.Now() - version, err := m.s.GetTemplateVersionByTemplateIDAndName(ctx, arg) - m.queryLatencies.WithLabelValues("GetTemplateVersionByTemplateIDAndName").Observe(time.Since(start).Seconds()) - return version, err -} - -func (m metricsStore) GetTemplateVersionParameters(ctx context.Context, templateVersionID uuid.UUID) ([]database.TemplateVersionParameter, error) { - start := time.Now() - parameters, err := m.s.GetTemplateVersionParameters(ctx, templateVersionID) - m.queryLatencies.WithLabelValues("GetTemplateVersionParameters").Observe(time.Since(start).Seconds()) - return parameters, err -} - -func (m metricsStore) GetTemplateVersionVariables(ctx context.Context, templateVersionID uuid.UUID) ([]database.TemplateVersionVariable, error) { - start := time.Now() - variables, err := m.s.GetTemplateVersionVariables(ctx, templateVersionID) - m.queryLatencies.WithLabelValues("GetTemplateVersionVariables").Observe(time.Since(start).Seconds()) - return variables, err -} - -func (m metricsStore) GetTemplateVersionWorkspaceTags(ctx context.Context, templateVersionID uuid.UUID) ([]database.TemplateVersionWorkspaceTag, error) { - start := time.Now() - r0, r1 := m.s.GetTemplateVersionWorkspaceTags(ctx, templateVersionID) - m.queryLatencies.WithLabelValues("GetTemplateVersionWorkspaceTags").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetTemplateVersionsByIDs(ctx context.Context, ids []uuid.UUID) ([]database.TemplateVersion, error) { - start := time.Now() - versions, err := m.s.GetTemplateVersionsByIDs(ctx, ids) - m.queryLatencies.WithLabelValues("GetTemplateVersionsByIDs").Observe(time.Since(start).Seconds()) - return versions, err -} - -func (m metricsStore) GetTemplateVersionsByTemplateID(ctx context.Context, arg database.GetTemplateVersionsByTemplateIDParams) ([]database.TemplateVersion, error) { - start := time.Now() - versions, err := m.s.GetTemplateVersionsByTemplateID(ctx, arg) - m.queryLatencies.WithLabelValues("GetTemplateVersionsByTemplateID").Observe(time.Since(start).Seconds()) - return versions, err -} - -func (m metricsStore) GetTemplateVersionsCreatedAfter(ctx context.Context, createdAt time.Time) ([]database.TemplateVersion, error) { - start := time.Now() - versions, err := m.s.GetTemplateVersionsCreatedAfter(ctx, createdAt) - m.queryLatencies.WithLabelValues("GetTemplateVersionsCreatedAfter").Observe(time.Since(start).Seconds()) - return versions, err -} - -func (m metricsStore) GetTemplates(ctx context.Context) ([]database.Template, error) { - start := time.Now() - templates, err := m.s.GetTemplates(ctx) - m.queryLatencies.WithLabelValues("GetTemplates").Observe(time.Since(start).Seconds()) - return templates, err -} - -func (m metricsStore) GetTemplatesWithFilter(ctx context.Context, arg database.GetTemplatesWithFilterParams) ([]database.Template, error) { - start := time.Now() - templates, err := m.s.GetTemplatesWithFilter(ctx, arg) - m.queryLatencies.WithLabelValues("GetTemplatesWithFilter").Observe(time.Since(start).Seconds()) - return templates, err -} - -func (m metricsStore) GetUnexpiredLicenses(ctx context.Context) ([]database.License, error) { - start := time.Now() - licenses, err := m.s.GetUnexpiredLicenses(ctx) - m.queryLatencies.WithLabelValues("GetUnexpiredLicenses").Observe(time.Since(start).Seconds()) - return licenses, err -} - -func (m metricsStore) GetUserActivityInsights(ctx context.Context, arg database.GetUserActivityInsightsParams) ([]database.GetUserActivityInsightsRow, error) { - start := time.Now() - r0, r1 := m.s.GetUserActivityInsights(ctx, arg) - m.queryLatencies.WithLabelValues("GetUserActivityInsights").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetUserByEmailOrUsername(ctx context.Context, arg database.GetUserByEmailOrUsernameParams) (database.User, error) { - start := time.Now() - user, err := m.s.GetUserByEmailOrUsername(ctx, arg) - m.queryLatencies.WithLabelValues("GetUserByEmailOrUsername").Observe(time.Since(start).Seconds()) - return user, err -} - -func (m metricsStore) GetUserByID(ctx context.Context, id uuid.UUID) (database.User, error) { - start := time.Now() - user, err := m.s.GetUserByID(ctx, id) - m.queryLatencies.WithLabelValues("GetUserByID").Observe(time.Since(start).Seconds()) - return user, err -} - -func (m metricsStore) GetUserCount(ctx context.Context) (int64, error) { - start := time.Now() - count, err := m.s.GetUserCount(ctx) - m.queryLatencies.WithLabelValues("GetUserCount").Observe(time.Since(start).Seconds()) - return count, err -} - -func (m metricsStore) GetUserLatencyInsights(ctx context.Context, arg database.GetUserLatencyInsightsParams) ([]database.GetUserLatencyInsightsRow, error) { - start := time.Now() - r0, r1 := m.s.GetUserLatencyInsights(ctx, arg) - m.queryLatencies.WithLabelValues("GetUserLatencyInsights").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetUserLinkByLinkedID(ctx context.Context, linkedID string) (database.UserLink, error) { - start := time.Now() - link, err := m.s.GetUserLinkByLinkedID(ctx, linkedID) - m.queryLatencies.WithLabelValues("GetUserLinkByLinkedID").Observe(time.Since(start).Seconds()) - return link, err -} - -func (m metricsStore) GetUserLinkByUserIDLoginType(ctx context.Context, arg database.GetUserLinkByUserIDLoginTypeParams) (database.UserLink, error) { - start := time.Now() - link, err := m.s.GetUserLinkByUserIDLoginType(ctx, arg) - m.queryLatencies.WithLabelValues("GetUserLinkByUserIDLoginType").Observe(time.Since(start).Seconds()) - return link, err -} - -func (m metricsStore) GetUserLinksByUserID(ctx context.Context, userID uuid.UUID) ([]database.UserLink, error) { - start := time.Now() - r0, r1 := m.s.GetUserLinksByUserID(ctx, userID) - m.queryLatencies.WithLabelValues("GetUserLinksByUserID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetUserNotificationPreferences(ctx context.Context, userID uuid.UUID) ([]database.NotificationPreference, error) { - start := time.Now() - r0, r1 := m.s.GetUserNotificationPreferences(ctx, userID) - m.queryLatencies.WithLabelValues("GetUserNotificationPreferences").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetUserWorkspaceBuildParameters(ctx context.Context, ownerID database.GetUserWorkspaceBuildParametersParams) ([]database.GetUserWorkspaceBuildParametersRow, error) { - start := time.Now() - r0, r1 := m.s.GetUserWorkspaceBuildParameters(ctx, ownerID) - m.queryLatencies.WithLabelValues("GetUserWorkspaceBuildParameters").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetUsers(ctx context.Context, arg database.GetUsersParams) ([]database.GetUsersRow, error) { - start := time.Now() - users, err := m.s.GetUsers(ctx, arg) - m.queryLatencies.WithLabelValues("GetUsers").Observe(time.Since(start).Seconds()) - return users, err -} - -func (m metricsStore) GetUsersByIDs(ctx context.Context, ids []uuid.UUID) ([]database.User, error) { - start := time.Now() - users, err := m.s.GetUsersByIDs(ctx, ids) - m.queryLatencies.WithLabelValues("GetUsersByIDs").Observe(time.Since(start).Seconds()) - return users, err -} - -func (m metricsStore) GetWorkspaceAgentAndLatestBuildByAuthToken(ctx context.Context, authToken uuid.UUID) (database.GetWorkspaceAgentAndLatestBuildByAuthTokenRow, error) { - start := time.Now() - r0, r1 := m.s.GetWorkspaceAgentAndLatestBuildByAuthToken(ctx, authToken) - m.queryLatencies.WithLabelValues("GetWorkspaceAgentAndLatestBuildByAuthToken").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetWorkspaceAgentByID(ctx context.Context, id uuid.UUID) (database.WorkspaceAgent, error) { - start := time.Now() - agent, err := m.s.GetWorkspaceAgentByID(ctx, id) - m.queryLatencies.WithLabelValues("GetWorkspaceAgentByID").Observe(time.Since(start).Seconds()) - return agent, err -} - -func (m metricsStore) GetWorkspaceAgentByInstanceID(ctx context.Context, authInstanceID string) (database.WorkspaceAgent, error) { - start := time.Now() - agent, err := m.s.GetWorkspaceAgentByInstanceID(ctx, authInstanceID) - m.queryLatencies.WithLabelValues("GetWorkspaceAgentByInstanceID").Observe(time.Since(start).Seconds()) - return agent, err -} - -func (m metricsStore) GetWorkspaceAgentLifecycleStateByID(ctx context.Context, id uuid.UUID) (database.GetWorkspaceAgentLifecycleStateByIDRow, error) { - start := time.Now() - r0, r1 := m.s.GetWorkspaceAgentLifecycleStateByID(ctx, id) - m.queryLatencies.WithLabelValues("GetWorkspaceAgentLifecycleStateByID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetWorkspaceAgentLogSourcesByAgentIDs(ctx context.Context, ids []uuid.UUID) ([]database.WorkspaceAgentLogSource, error) { - start := time.Now() - r0, r1 := m.s.GetWorkspaceAgentLogSourcesByAgentIDs(ctx, ids) - m.queryLatencies.WithLabelValues("GetWorkspaceAgentLogSourcesByAgentIDs").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetWorkspaceAgentLogsAfter(ctx context.Context, arg database.GetWorkspaceAgentLogsAfterParams) ([]database.WorkspaceAgentLog, error) { - start := time.Now() - r0, r1 := m.s.GetWorkspaceAgentLogsAfter(ctx, arg) - m.queryLatencies.WithLabelValues("GetWorkspaceAgentLogsAfter").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetWorkspaceAgentMetadata(ctx context.Context, workspaceAgentID database.GetWorkspaceAgentMetadataParams) ([]database.WorkspaceAgentMetadatum, error) { - start := time.Now() - metadata, err := m.s.GetWorkspaceAgentMetadata(ctx, workspaceAgentID) - m.queryLatencies.WithLabelValues("GetWorkspaceAgentMetadata").Observe(time.Since(start).Seconds()) - return metadata, err -} - -func (m metricsStore) GetWorkspaceAgentPortShare(ctx context.Context, arg database.GetWorkspaceAgentPortShareParams) (database.WorkspaceAgentPortShare, error) { - start := time.Now() - r0, r1 := m.s.GetWorkspaceAgentPortShare(ctx, arg) - m.queryLatencies.WithLabelValues("GetWorkspaceAgentPortShare").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetWorkspaceAgentScriptTimingsByBuildID(ctx context.Context, id uuid.UUID) ([]database.GetWorkspaceAgentScriptTimingsByBuildIDRow, error) { - start := time.Now() - r0, r1 := m.s.GetWorkspaceAgentScriptTimingsByBuildID(ctx, id) - m.queryLatencies.WithLabelValues("GetWorkspaceAgentScriptTimingsByBuildID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetWorkspaceAgentScriptsByAgentIDs(ctx context.Context, ids []uuid.UUID) ([]database.WorkspaceAgentScript, error) { - start := time.Now() - r0, r1 := m.s.GetWorkspaceAgentScriptsByAgentIDs(ctx, ids) - m.queryLatencies.WithLabelValues("GetWorkspaceAgentScriptsByAgentIDs").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetWorkspaceAgentStats(ctx context.Context, createdAt time.Time) ([]database.GetWorkspaceAgentStatsRow, error) { - start := time.Now() - stats, err := m.s.GetWorkspaceAgentStats(ctx, createdAt) - m.queryLatencies.WithLabelValues("GetWorkspaceAgentStats").Observe(time.Since(start).Seconds()) - return stats, err -} - -func (m metricsStore) GetWorkspaceAgentStatsAndLabels(ctx context.Context, createdAt time.Time) ([]database.GetWorkspaceAgentStatsAndLabelsRow, error) { - start := time.Now() - stats, err := m.s.GetWorkspaceAgentStatsAndLabels(ctx, createdAt) - m.queryLatencies.WithLabelValues("GetWorkspaceAgentStatsAndLabels").Observe(time.Since(start).Seconds()) - return stats, err -} - -func (m metricsStore) GetWorkspaceAgentUsageStats(ctx context.Context, createdAt time.Time) ([]database.GetWorkspaceAgentUsageStatsRow, error) { - start := time.Now() - r0, r1 := m.s.GetWorkspaceAgentUsageStats(ctx, createdAt) - m.queryLatencies.WithLabelValues("GetWorkspaceAgentUsageStats").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetWorkspaceAgentUsageStatsAndLabels(ctx context.Context, createdAt time.Time) ([]database.GetWorkspaceAgentUsageStatsAndLabelsRow, error) { - start := time.Now() - r0, r1 := m.s.GetWorkspaceAgentUsageStatsAndLabels(ctx, createdAt) - m.queryLatencies.WithLabelValues("GetWorkspaceAgentUsageStatsAndLabels").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetWorkspaceAgentsByResourceIDs(ctx context.Context, ids []uuid.UUID) ([]database.WorkspaceAgent, error) { - start := time.Now() - agents, err := m.s.GetWorkspaceAgentsByResourceIDs(ctx, ids) - m.queryLatencies.WithLabelValues("GetWorkspaceAgentsByResourceIDs").Observe(time.Since(start).Seconds()) - return agents, err -} - -func (m metricsStore) GetWorkspaceAgentsCreatedAfter(ctx context.Context, createdAt time.Time) ([]database.WorkspaceAgent, error) { - start := time.Now() - agents, err := m.s.GetWorkspaceAgentsCreatedAfter(ctx, createdAt) - m.queryLatencies.WithLabelValues("GetWorkspaceAgentsCreatedAfter").Observe(time.Since(start).Seconds()) - return agents, err -} - -func (m metricsStore) GetWorkspaceAgentsInLatestBuildByWorkspaceID(ctx context.Context, workspaceID uuid.UUID) ([]database.WorkspaceAgent, error) { - start := time.Now() - agents, err := m.s.GetWorkspaceAgentsInLatestBuildByWorkspaceID(ctx, workspaceID) - m.queryLatencies.WithLabelValues("GetWorkspaceAgentsInLatestBuildByWorkspaceID").Observe(time.Since(start).Seconds()) - return agents, err -} - -func (m metricsStore) GetWorkspaceAppByAgentIDAndSlug(ctx context.Context, arg database.GetWorkspaceAppByAgentIDAndSlugParams) (database.WorkspaceApp, error) { - start := time.Now() - app, err := m.s.GetWorkspaceAppByAgentIDAndSlug(ctx, arg) - m.queryLatencies.WithLabelValues("GetWorkspaceAppByAgentIDAndSlug").Observe(time.Since(start).Seconds()) - return app, err -} - -func (m metricsStore) GetWorkspaceAppsByAgentID(ctx context.Context, agentID uuid.UUID) ([]database.WorkspaceApp, error) { - start := time.Now() - apps, err := m.s.GetWorkspaceAppsByAgentID(ctx, agentID) - m.queryLatencies.WithLabelValues("GetWorkspaceAppsByAgentID").Observe(time.Since(start).Seconds()) - return apps, err -} - -func (m metricsStore) GetWorkspaceAppsByAgentIDs(ctx context.Context, ids []uuid.UUID) ([]database.WorkspaceApp, error) { - start := time.Now() - apps, err := m.s.GetWorkspaceAppsByAgentIDs(ctx, ids) - m.queryLatencies.WithLabelValues("GetWorkspaceAppsByAgentIDs").Observe(time.Since(start).Seconds()) - return apps, err -} - -func (m metricsStore) GetWorkspaceAppsCreatedAfter(ctx context.Context, createdAt time.Time) ([]database.WorkspaceApp, error) { - start := time.Now() - apps, err := m.s.GetWorkspaceAppsCreatedAfter(ctx, createdAt) - m.queryLatencies.WithLabelValues("GetWorkspaceAppsCreatedAfter").Observe(time.Since(start).Seconds()) - return apps, err -} - -func (m metricsStore) GetWorkspaceBuildByID(ctx context.Context, id uuid.UUID) (database.WorkspaceBuild, error) { - start := time.Now() - build, err := m.s.GetWorkspaceBuildByID(ctx, id) - m.queryLatencies.WithLabelValues("GetWorkspaceBuildByID").Observe(time.Since(start).Seconds()) - return build, err -} - -func (m metricsStore) GetWorkspaceBuildByJobID(ctx context.Context, jobID uuid.UUID) (database.WorkspaceBuild, error) { - start := time.Now() - build, err := m.s.GetWorkspaceBuildByJobID(ctx, jobID) - m.queryLatencies.WithLabelValues("GetWorkspaceBuildByJobID").Observe(time.Since(start).Seconds()) - return build, err -} - -func (m metricsStore) GetWorkspaceBuildByWorkspaceIDAndBuildNumber(ctx context.Context, arg database.GetWorkspaceBuildByWorkspaceIDAndBuildNumberParams) (database.WorkspaceBuild, error) { - start := time.Now() - build, err := m.s.GetWorkspaceBuildByWorkspaceIDAndBuildNumber(ctx, arg) - m.queryLatencies.WithLabelValues("GetWorkspaceBuildByWorkspaceIDAndBuildNumber").Observe(time.Since(start).Seconds()) - return build, err -} - -func (m metricsStore) GetWorkspaceBuildParameters(ctx context.Context, workspaceBuildID uuid.UUID) ([]database.WorkspaceBuildParameter, error) { - start := time.Now() - params, err := m.s.GetWorkspaceBuildParameters(ctx, workspaceBuildID) - m.queryLatencies.WithLabelValues("GetWorkspaceBuildParameters").Observe(time.Since(start).Seconds()) - return params, err -} - -func (m metricsStore) GetWorkspaceBuildStatsByTemplates(ctx context.Context, since time.Time) ([]database.GetWorkspaceBuildStatsByTemplatesRow, error) { - start := time.Now() - r0, r1 := m.s.GetWorkspaceBuildStatsByTemplates(ctx, since) - m.queryLatencies.WithLabelValues("GetWorkspaceBuildStatsByTemplates").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetWorkspaceBuildsByWorkspaceID(ctx context.Context, arg database.GetWorkspaceBuildsByWorkspaceIDParams) ([]database.WorkspaceBuild, error) { - start := time.Now() - builds, err := m.s.GetWorkspaceBuildsByWorkspaceID(ctx, arg) - m.queryLatencies.WithLabelValues("GetWorkspaceBuildsByWorkspaceID").Observe(time.Since(start).Seconds()) - return builds, err -} - -func (m metricsStore) GetWorkspaceBuildsCreatedAfter(ctx context.Context, createdAt time.Time) ([]database.WorkspaceBuild, error) { - start := time.Now() - builds, err := m.s.GetWorkspaceBuildsCreatedAfter(ctx, createdAt) - m.queryLatencies.WithLabelValues("GetWorkspaceBuildsCreatedAfter").Observe(time.Since(start).Seconds()) - return builds, err -} - -func (m metricsStore) GetWorkspaceByAgentID(ctx context.Context, agentID uuid.UUID) (database.Workspace, error) { - start := time.Now() - workspace, err := m.s.GetWorkspaceByAgentID(ctx, agentID) - m.queryLatencies.WithLabelValues("GetWorkspaceByAgentID").Observe(time.Since(start).Seconds()) - return workspace, err -} - -func (m metricsStore) GetWorkspaceByID(ctx context.Context, id uuid.UUID) (database.Workspace, error) { - start := time.Now() - workspace, err := m.s.GetWorkspaceByID(ctx, id) - m.queryLatencies.WithLabelValues("GetWorkspaceByID").Observe(time.Since(start).Seconds()) - return workspace, err -} - -func (m metricsStore) GetWorkspaceByOwnerIDAndName(ctx context.Context, arg database.GetWorkspaceByOwnerIDAndNameParams) (database.Workspace, error) { - start := time.Now() - workspace, err := m.s.GetWorkspaceByOwnerIDAndName(ctx, arg) - m.queryLatencies.WithLabelValues("GetWorkspaceByOwnerIDAndName").Observe(time.Since(start).Seconds()) - return workspace, err -} - -func (m metricsStore) GetWorkspaceByWorkspaceAppID(ctx context.Context, workspaceAppID uuid.UUID) (database.Workspace, error) { - start := time.Now() - workspace, err := m.s.GetWorkspaceByWorkspaceAppID(ctx, workspaceAppID) - m.queryLatencies.WithLabelValues("GetWorkspaceByWorkspaceAppID").Observe(time.Since(start).Seconds()) - return workspace, err -} - -func (m metricsStore) GetWorkspaceProxies(ctx context.Context) ([]database.WorkspaceProxy, error) { - start := time.Now() - proxies, err := m.s.GetWorkspaceProxies(ctx) - m.queryLatencies.WithLabelValues("GetWorkspaceProxies").Observe(time.Since(start).Seconds()) - return proxies, err -} - -func (m metricsStore) GetWorkspaceProxyByHostname(ctx context.Context, arg database.GetWorkspaceProxyByHostnameParams) (database.WorkspaceProxy, error) { - start := time.Now() - proxy, err := m.s.GetWorkspaceProxyByHostname(ctx, arg) - m.queryLatencies.WithLabelValues("GetWorkspaceProxyByHostname").Observe(time.Since(start).Seconds()) - return proxy, err -} - -func (m metricsStore) GetWorkspaceProxyByID(ctx context.Context, id uuid.UUID) (database.WorkspaceProxy, error) { - start := time.Now() - proxy, err := m.s.GetWorkspaceProxyByID(ctx, id) - m.queryLatencies.WithLabelValues("GetWorkspaceProxyByID").Observe(time.Since(start).Seconds()) - return proxy, err -} - -func (m metricsStore) GetWorkspaceProxyByName(ctx context.Context, name string) (database.WorkspaceProxy, error) { - start := time.Now() - proxy, err := m.s.GetWorkspaceProxyByName(ctx, name) - m.queryLatencies.WithLabelValues("GetWorkspaceProxyByName").Observe(time.Since(start).Seconds()) - return proxy, err -} - -func (m metricsStore) GetWorkspaceResourceByID(ctx context.Context, id uuid.UUID) (database.WorkspaceResource, error) { - start := time.Now() - resource, err := m.s.GetWorkspaceResourceByID(ctx, id) - m.queryLatencies.WithLabelValues("GetWorkspaceResourceByID").Observe(time.Since(start).Seconds()) - return resource, err -} - -func (m metricsStore) GetWorkspaceResourceMetadataByResourceIDs(ctx context.Context, ids []uuid.UUID) ([]database.WorkspaceResourceMetadatum, error) { - start := time.Now() - metadata, err := m.s.GetWorkspaceResourceMetadataByResourceIDs(ctx, ids) - m.queryLatencies.WithLabelValues("GetWorkspaceResourceMetadataByResourceIDs").Observe(time.Since(start).Seconds()) - return metadata, err -} - -func (m metricsStore) GetWorkspaceResourceMetadataCreatedAfter(ctx context.Context, createdAt time.Time) ([]database.WorkspaceResourceMetadatum, error) { - start := time.Now() - metadata, err := m.s.GetWorkspaceResourceMetadataCreatedAfter(ctx, createdAt) - m.queryLatencies.WithLabelValues("GetWorkspaceResourceMetadataCreatedAfter").Observe(time.Since(start).Seconds()) - return metadata, err -} - -func (m metricsStore) GetWorkspaceResourcesByJobID(ctx context.Context, jobID uuid.UUID) ([]database.WorkspaceResource, error) { - start := time.Now() - resources, err := m.s.GetWorkspaceResourcesByJobID(ctx, jobID) - m.queryLatencies.WithLabelValues("GetWorkspaceResourcesByJobID").Observe(time.Since(start).Seconds()) - return resources, err -} - -func (m metricsStore) GetWorkspaceResourcesByJobIDs(ctx context.Context, ids []uuid.UUID) ([]database.WorkspaceResource, error) { - start := time.Now() - resources, err := m.s.GetWorkspaceResourcesByJobIDs(ctx, ids) - m.queryLatencies.WithLabelValues("GetWorkspaceResourcesByJobIDs").Observe(time.Since(start).Seconds()) - return resources, err -} - -func (m metricsStore) GetWorkspaceResourcesCreatedAfter(ctx context.Context, createdAt time.Time) ([]database.WorkspaceResource, error) { - start := time.Now() - resources, err := m.s.GetWorkspaceResourcesCreatedAfter(ctx, createdAt) - m.queryLatencies.WithLabelValues("GetWorkspaceResourcesCreatedAfter").Observe(time.Since(start).Seconds()) - return resources, err -} - -func (m metricsStore) GetWorkspaceUniqueOwnerCountByTemplateIDs(ctx context.Context, templateIds []uuid.UUID) ([]database.GetWorkspaceUniqueOwnerCountByTemplateIDsRow, error) { - start := time.Now() - r0, r1 := m.s.GetWorkspaceUniqueOwnerCountByTemplateIDs(ctx, templateIds) - m.queryLatencies.WithLabelValues("GetWorkspaceUniqueOwnerCountByTemplateIDs").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetWorkspaces(ctx context.Context, arg database.GetWorkspacesParams) ([]database.GetWorkspacesRow, error) { - start := time.Now() - workspaces, err := m.s.GetWorkspaces(ctx, arg) - m.queryLatencies.WithLabelValues("GetWorkspaces").Observe(time.Since(start).Seconds()) - return workspaces, err -} - -func (m metricsStore) GetWorkspacesEligibleForTransition(ctx context.Context, now time.Time) ([]database.WorkspaceTable, error) { - start := time.Now() - workspaces, err := m.s.GetWorkspacesEligibleForTransition(ctx, now) - m.queryLatencies.WithLabelValues("GetWorkspacesEligibleForAutoStartStop").Observe(time.Since(start).Seconds()) - return workspaces, err -} - -func (m metricsStore) InsertAPIKey(ctx context.Context, arg database.InsertAPIKeyParams) (database.APIKey, error) { - start := time.Now() - key, err := m.s.InsertAPIKey(ctx, arg) - m.queryLatencies.WithLabelValues("InsertAPIKey").Observe(time.Since(start).Seconds()) - return key, err -} - -func (m metricsStore) InsertAllUsersGroup(ctx context.Context, organizationID uuid.UUID) (database.Group, error) { - start := time.Now() - group, err := m.s.InsertAllUsersGroup(ctx, organizationID) - m.queryLatencies.WithLabelValues("InsertAllUsersGroup").Observe(time.Since(start).Seconds()) - return group, err -} - -func (m metricsStore) InsertAuditLog(ctx context.Context, arg database.InsertAuditLogParams) (database.AuditLog, error) { - start := time.Now() - log, err := m.s.InsertAuditLog(ctx, arg) - m.queryLatencies.WithLabelValues("InsertAuditLog").Observe(time.Since(start).Seconds()) - return log, err -} - -func (m metricsStore) InsertCryptoKey(ctx context.Context, arg database.InsertCryptoKeyParams) (database.CryptoKey, error) { - start := time.Now() - key, err := m.s.InsertCryptoKey(ctx, arg) - m.queryLatencies.WithLabelValues("InsertCryptoKey").Observe(time.Since(start).Seconds()) - return key, err -} - -func (m metricsStore) InsertCustomRole(ctx context.Context, arg database.InsertCustomRoleParams) (database.CustomRole, error) { - start := time.Now() - r0, r1 := m.s.InsertCustomRole(ctx, arg) - m.queryLatencies.WithLabelValues("InsertCustomRole").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) InsertDBCryptKey(ctx context.Context, arg database.InsertDBCryptKeyParams) error { - start := time.Now() - r0 := m.s.InsertDBCryptKey(ctx, arg) - m.queryLatencies.WithLabelValues("InsertDBCryptKey").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) InsertDERPMeshKey(ctx context.Context, value string) error { - start := time.Now() - err := m.s.InsertDERPMeshKey(ctx, value) - m.queryLatencies.WithLabelValues("InsertDERPMeshKey").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) InsertDeploymentID(ctx context.Context, value string) error { - start := time.Now() - err := m.s.InsertDeploymentID(ctx, value) - m.queryLatencies.WithLabelValues("InsertDeploymentID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) InsertExternalAuthLink(ctx context.Context, arg database.InsertExternalAuthLinkParams) (database.ExternalAuthLink, error) { - start := time.Now() - link, err := m.s.InsertExternalAuthLink(ctx, arg) - m.queryLatencies.WithLabelValues("InsertExternalAuthLink").Observe(time.Since(start).Seconds()) - return link, err -} - -func (m metricsStore) InsertFile(ctx context.Context, arg database.InsertFileParams) (database.File, error) { - start := time.Now() - file, err := m.s.InsertFile(ctx, arg) - m.queryLatencies.WithLabelValues("InsertFile").Observe(time.Since(start).Seconds()) - return file, err -} - -func (m metricsStore) InsertGitSSHKey(ctx context.Context, arg database.InsertGitSSHKeyParams) (database.GitSSHKey, error) { - start := time.Now() - key, err := m.s.InsertGitSSHKey(ctx, arg) - m.queryLatencies.WithLabelValues("InsertGitSSHKey").Observe(time.Since(start).Seconds()) - return key, err -} - -func (m metricsStore) InsertGroup(ctx context.Context, arg database.InsertGroupParams) (database.Group, error) { - start := time.Now() - group, err := m.s.InsertGroup(ctx, arg) - m.queryLatencies.WithLabelValues("InsertGroup").Observe(time.Since(start).Seconds()) - return group, err -} - -func (m metricsStore) InsertGroupMember(ctx context.Context, arg database.InsertGroupMemberParams) error { - start := time.Now() - err := m.s.InsertGroupMember(ctx, arg) - m.queryLatencies.WithLabelValues("InsertGroupMember").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) InsertLicense(ctx context.Context, arg database.InsertLicenseParams) (database.License, error) { - start := time.Now() - license, err := m.s.InsertLicense(ctx, arg) - m.queryLatencies.WithLabelValues("InsertLicense").Observe(time.Since(start).Seconds()) - return license, err -} - -func (m metricsStore) InsertMissingGroups(ctx context.Context, arg database.InsertMissingGroupsParams) ([]database.Group, error) { - start := time.Now() - r0, r1 := m.s.InsertMissingGroups(ctx, arg) - m.queryLatencies.WithLabelValues("InsertMissingGroups").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) InsertOAuth2ProviderApp(ctx context.Context, arg database.InsertOAuth2ProviderAppParams) (database.OAuth2ProviderApp, error) { - start := time.Now() - r0, r1 := m.s.InsertOAuth2ProviderApp(ctx, arg) - m.queryLatencies.WithLabelValues("InsertOAuth2ProviderApp").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) InsertOAuth2ProviderAppCode(ctx context.Context, arg database.InsertOAuth2ProviderAppCodeParams) (database.OAuth2ProviderAppCode, error) { - start := time.Now() - r0, r1 := m.s.InsertOAuth2ProviderAppCode(ctx, arg) - m.queryLatencies.WithLabelValues("InsertOAuth2ProviderAppCode").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) InsertOAuth2ProviderAppSecret(ctx context.Context, arg database.InsertOAuth2ProviderAppSecretParams) (database.OAuth2ProviderAppSecret, error) { - start := time.Now() - r0, r1 := m.s.InsertOAuth2ProviderAppSecret(ctx, arg) - m.queryLatencies.WithLabelValues("InsertOAuth2ProviderAppSecret").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) InsertOAuth2ProviderAppToken(ctx context.Context, arg database.InsertOAuth2ProviderAppTokenParams) (database.OAuth2ProviderAppToken, error) { - start := time.Now() - r0, r1 := m.s.InsertOAuth2ProviderAppToken(ctx, arg) - m.queryLatencies.WithLabelValues("InsertOAuth2ProviderAppToken").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) InsertOrganization(ctx context.Context, arg database.InsertOrganizationParams) (database.Organization, error) { - start := time.Now() - organization, err := m.s.InsertOrganization(ctx, arg) - m.queryLatencies.WithLabelValues("InsertOrganization").Observe(time.Since(start).Seconds()) - return organization, err -} - -func (m metricsStore) InsertOrganizationMember(ctx context.Context, arg database.InsertOrganizationMemberParams) (database.OrganizationMember, error) { - start := time.Now() - member, err := m.s.InsertOrganizationMember(ctx, arg) - m.queryLatencies.WithLabelValues("InsertOrganizationMember").Observe(time.Since(start).Seconds()) - return member, err -} - -func (m metricsStore) InsertProvisionerJob(ctx context.Context, arg database.InsertProvisionerJobParams) (database.ProvisionerJob, error) { - start := time.Now() - job, err := m.s.InsertProvisionerJob(ctx, arg) - m.queryLatencies.WithLabelValues("InsertProvisionerJob").Observe(time.Since(start).Seconds()) - return job, err -} - -func (m metricsStore) InsertProvisionerJobLogs(ctx context.Context, arg database.InsertProvisionerJobLogsParams) ([]database.ProvisionerJobLog, error) { - start := time.Now() - logs, err := m.s.InsertProvisionerJobLogs(ctx, arg) - m.queryLatencies.WithLabelValues("InsertProvisionerJobLogs").Observe(time.Since(start).Seconds()) - return logs, err -} - -func (m metricsStore) InsertProvisionerJobTimings(ctx context.Context, arg database.InsertProvisionerJobTimingsParams) ([]database.ProvisionerJobTiming, error) { - start := time.Now() - r0, r1 := m.s.InsertProvisionerJobTimings(ctx, arg) - m.queryLatencies.WithLabelValues("InsertProvisionerJobTimings").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) InsertProvisionerKey(ctx context.Context, arg database.InsertProvisionerKeyParams) (database.ProvisionerKey, error) { - start := time.Now() - r0, r1 := m.s.InsertProvisionerKey(ctx, arg) - m.queryLatencies.WithLabelValues("InsertProvisionerKey").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) InsertReplica(ctx context.Context, arg database.InsertReplicaParams) (database.Replica, error) { - start := time.Now() - replica, err := m.s.InsertReplica(ctx, arg) - m.queryLatencies.WithLabelValues("InsertReplica").Observe(time.Since(start).Seconds()) - return replica, err -} - -func (m metricsStore) InsertTemplate(ctx context.Context, arg database.InsertTemplateParams) error { - start := time.Now() - err := m.s.InsertTemplate(ctx, arg) - m.queryLatencies.WithLabelValues("InsertTemplate").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) InsertTemplateVersion(ctx context.Context, arg database.InsertTemplateVersionParams) error { - start := time.Now() - err := m.s.InsertTemplateVersion(ctx, arg) - m.queryLatencies.WithLabelValues("InsertTemplateVersion").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) InsertTemplateVersionParameter(ctx context.Context, arg database.InsertTemplateVersionParameterParams) (database.TemplateVersionParameter, error) { - start := time.Now() - parameter, err := m.s.InsertTemplateVersionParameter(ctx, arg) - m.queryLatencies.WithLabelValues("InsertTemplateVersionParameter").Observe(time.Since(start).Seconds()) - return parameter, err -} - -func (m metricsStore) InsertTemplateVersionVariable(ctx context.Context, arg database.InsertTemplateVersionVariableParams) (database.TemplateVersionVariable, error) { - start := time.Now() - variable, err := m.s.InsertTemplateVersionVariable(ctx, arg) - m.queryLatencies.WithLabelValues("InsertTemplateVersionVariable").Observe(time.Since(start).Seconds()) - return variable, err -} - -func (m metricsStore) InsertTemplateVersionWorkspaceTag(ctx context.Context, arg database.InsertTemplateVersionWorkspaceTagParams) (database.TemplateVersionWorkspaceTag, error) { - start := time.Now() - r0, r1 := m.s.InsertTemplateVersionWorkspaceTag(ctx, arg) - m.queryLatencies.WithLabelValues("InsertTemplateVersionWorkspaceTag").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) InsertUser(ctx context.Context, arg database.InsertUserParams) (database.User, error) { - start := time.Now() - user, err := m.s.InsertUser(ctx, arg) - m.queryLatencies.WithLabelValues("InsertUser").Observe(time.Since(start).Seconds()) - return user, err -} - -func (m metricsStore) InsertUserGroupsByID(ctx context.Context, arg database.InsertUserGroupsByIDParams) ([]uuid.UUID, error) { - start := time.Now() - r0, r1 := m.s.InsertUserGroupsByID(ctx, arg) - m.queryLatencies.WithLabelValues("InsertUserGroupsByID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) InsertUserGroupsByName(ctx context.Context, arg database.InsertUserGroupsByNameParams) error { - start := time.Now() - err := m.s.InsertUserGroupsByName(ctx, arg) - m.queryLatencies.WithLabelValues("InsertUserGroupsByName").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) InsertUserLink(ctx context.Context, arg database.InsertUserLinkParams) (database.UserLink, error) { - start := time.Now() - link, err := m.s.InsertUserLink(ctx, arg) - m.queryLatencies.WithLabelValues("InsertUserLink").Observe(time.Since(start).Seconds()) - return link, err -} - -func (m metricsStore) InsertWorkspace(ctx context.Context, arg database.InsertWorkspaceParams) (database.WorkspaceTable, error) { - start := time.Now() - workspace, err := m.s.InsertWorkspace(ctx, arg) - m.queryLatencies.WithLabelValues("InsertWorkspace").Observe(time.Since(start).Seconds()) - return workspace, err -} - -func (m metricsStore) InsertWorkspaceAgent(ctx context.Context, arg database.InsertWorkspaceAgentParams) (database.WorkspaceAgent, error) { - start := time.Now() - agent, err := m.s.InsertWorkspaceAgent(ctx, arg) - m.queryLatencies.WithLabelValues("InsertWorkspaceAgent").Observe(time.Since(start).Seconds()) - return agent, err -} - -func (m metricsStore) InsertWorkspaceAgentLogSources(ctx context.Context, arg database.InsertWorkspaceAgentLogSourcesParams) ([]database.WorkspaceAgentLogSource, error) { - start := time.Now() - r0, r1 := m.s.InsertWorkspaceAgentLogSources(ctx, arg) - m.queryLatencies.WithLabelValues("InsertWorkspaceAgentLogSources").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) InsertWorkspaceAgentLogs(ctx context.Context, arg database.InsertWorkspaceAgentLogsParams) ([]database.WorkspaceAgentLog, error) { - start := time.Now() - r0, r1 := m.s.InsertWorkspaceAgentLogs(ctx, arg) - m.queryLatencies.WithLabelValues("InsertWorkspaceAgentLogs").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) InsertWorkspaceAgentMetadata(ctx context.Context, arg database.InsertWorkspaceAgentMetadataParams) error { - start := time.Now() - err := m.s.InsertWorkspaceAgentMetadata(ctx, arg) - m.queryLatencies.WithLabelValues("InsertWorkspaceAgentMetadata").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) InsertWorkspaceAgentScriptTimings(ctx context.Context, arg database.InsertWorkspaceAgentScriptTimingsParams) (database.WorkspaceAgentScriptTiming, error) { - start := time.Now() - r0, r1 := m.s.InsertWorkspaceAgentScriptTimings(ctx, arg) - m.queryLatencies.WithLabelValues("InsertWorkspaceAgentScriptTimings").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) InsertWorkspaceAgentScripts(ctx context.Context, arg database.InsertWorkspaceAgentScriptsParams) ([]database.WorkspaceAgentScript, error) { - start := time.Now() - r0, r1 := m.s.InsertWorkspaceAgentScripts(ctx, arg) - m.queryLatencies.WithLabelValues("InsertWorkspaceAgentScripts").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) InsertWorkspaceAgentStats(ctx context.Context, arg database.InsertWorkspaceAgentStatsParams) error { - start := time.Now() - r0 := m.s.InsertWorkspaceAgentStats(ctx, arg) - m.queryLatencies.WithLabelValues("InsertWorkspaceAgentStats").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) InsertWorkspaceApp(ctx context.Context, arg database.InsertWorkspaceAppParams) (database.WorkspaceApp, error) { - start := time.Now() - app, err := m.s.InsertWorkspaceApp(ctx, arg) - m.queryLatencies.WithLabelValues("InsertWorkspaceApp").Observe(time.Since(start).Seconds()) - return app, err -} - -func (m metricsStore) InsertWorkspaceAppStats(ctx context.Context, arg database.InsertWorkspaceAppStatsParams) error { - start := time.Now() - r0 := m.s.InsertWorkspaceAppStats(ctx, arg) - m.queryLatencies.WithLabelValues("InsertWorkspaceAppStats").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) InsertWorkspaceBuild(ctx context.Context, arg database.InsertWorkspaceBuildParams) error { - start := time.Now() - err := m.s.InsertWorkspaceBuild(ctx, arg) - m.queryLatencies.WithLabelValues("InsertWorkspaceBuild").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) InsertWorkspaceBuildParameters(ctx context.Context, arg database.InsertWorkspaceBuildParametersParams) error { - start := time.Now() - err := m.s.InsertWorkspaceBuildParameters(ctx, arg) - m.queryLatencies.WithLabelValues("InsertWorkspaceBuildParameters").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) InsertWorkspaceProxy(ctx context.Context, arg database.InsertWorkspaceProxyParams) (database.WorkspaceProxy, error) { - start := time.Now() - proxy, err := m.s.InsertWorkspaceProxy(ctx, arg) - m.queryLatencies.WithLabelValues("InsertWorkspaceProxy").Observe(time.Since(start).Seconds()) - return proxy, err -} - -func (m metricsStore) InsertWorkspaceResource(ctx context.Context, arg database.InsertWorkspaceResourceParams) (database.WorkspaceResource, error) { - start := time.Now() - resource, err := m.s.InsertWorkspaceResource(ctx, arg) - m.queryLatencies.WithLabelValues("InsertWorkspaceResource").Observe(time.Since(start).Seconds()) - return resource, err -} - -func (m metricsStore) InsertWorkspaceResourceMetadata(ctx context.Context, arg database.InsertWorkspaceResourceMetadataParams) ([]database.WorkspaceResourceMetadatum, error) { - start := time.Now() - metadata, err := m.s.InsertWorkspaceResourceMetadata(ctx, arg) - m.queryLatencies.WithLabelValues("InsertWorkspaceResourceMetadata").Observe(time.Since(start).Seconds()) - return metadata, err -} - -func (m metricsStore) ListProvisionerKeysByOrganization(ctx context.Context, organizationID uuid.UUID) ([]database.ProvisionerKey, error) { - start := time.Now() - r0, r1 := m.s.ListProvisionerKeysByOrganization(ctx, organizationID) - m.queryLatencies.WithLabelValues("ListProvisionerKeysByOrganization").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) ListProvisionerKeysByOrganizationExcludeReserved(ctx context.Context, organizationID uuid.UUID) ([]database.ProvisionerKey, error) { - start := time.Now() - r0, r1 := m.s.ListProvisionerKeysByOrganizationExcludeReserved(ctx, organizationID) - m.queryLatencies.WithLabelValues("ListProvisionerKeysByOrganizationExcludeReserved").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) ListWorkspaceAgentPortShares(ctx context.Context, workspaceID uuid.UUID) ([]database.WorkspaceAgentPortShare, error) { - start := time.Now() - r0, r1 := m.s.ListWorkspaceAgentPortShares(ctx, workspaceID) - m.queryLatencies.WithLabelValues("ListWorkspaceAgentPortShares").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) OrganizationMembers(ctx context.Context, arg database.OrganizationMembersParams) ([]database.OrganizationMembersRow, error) { - start := time.Now() - r0, r1 := m.s.OrganizationMembers(ctx, arg) - m.queryLatencies.WithLabelValues("OrganizationMembers").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) ReduceWorkspaceAgentShareLevelToAuthenticatedByTemplate(ctx context.Context, templateID uuid.UUID) error { - start := time.Now() - r0 := m.s.ReduceWorkspaceAgentShareLevelToAuthenticatedByTemplate(ctx, templateID) - m.queryLatencies.WithLabelValues("ReduceWorkspaceAgentShareLevelToAuthenticatedByTemplate").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) RegisterWorkspaceProxy(ctx context.Context, arg database.RegisterWorkspaceProxyParams) (database.WorkspaceProxy, error) { - start := time.Now() - proxy, err := m.s.RegisterWorkspaceProxy(ctx, arg) - m.queryLatencies.WithLabelValues("RegisterWorkspaceProxy").Observe(time.Since(start).Seconds()) - return proxy, err -} - -func (m metricsStore) RemoveUserFromAllGroups(ctx context.Context, userID uuid.UUID) error { - start := time.Now() - r0 := m.s.RemoveUserFromAllGroups(ctx, userID) - m.queryLatencies.WithLabelValues("RemoveUserFromAllGroups").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) RemoveUserFromGroups(ctx context.Context, arg database.RemoveUserFromGroupsParams) ([]uuid.UUID, error) { - start := time.Now() - r0, r1 := m.s.RemoveUserFromGroups(ctx, arg) - m.queryLatencies.WithLabelValues("RemoveUserFromGroups").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) RevokeDBCryptKey(ctx context.Context, activeKeyDigest string) error { - start := time.Now() - r0 := m.s.RevokeDBCryptKey(ctx, activeKeyDigest) - m.queryLatencies.WithLabelValues("RevokeDBCryptKey").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) TryAcquireLock(ctx context.Context, pgTryAdvisoryXactLock int64) (bool, error) { - start := time.Now() - ok, err := m.s.TryAcquireLock(ctx, pgTryAdvisoryXactLock) - m.queryLatencies.WithLabelValues("TryAcquireLock").Observe(time.Since(start).Seconds()) - return ok, err -} - -func (m metricsStore) UnarchiveTemplateVersion(ctx context.Context, arg database.UnarchiveTemplateVersionParams) error { - start := time.Now() - r0 := m.s.UnarchiveTemplateVersion(ctx, arg) - m.queryLatencies.WithLabelValues("UnarchiveTemplateVersion").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UnfavoriteWorkspace(ctx context.Context, arg uuid.UUID) error { - start := time.Now() - r0 := m.s.UnfavoriteWorkspace(ctx, arg) - m.queryLatencies.WithLabelValues("UnfavoriteWorkspace").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpdateAPIKeyByID(ctx context.Context, arg database.UpdateAPIKeyByIDParams) error { - start := time.Now() - err := m.s.UpdateAPIKeyByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateAPIKeyByID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateCryptoKeyDeletesAt(ctx context.Context, arg database.UpdateCryptoKeyDeletesAtParams) (database.CryptoKey, error) { - start := time.Now() - key, err := m.s.UpdateCryptoKeyDeletesAt(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateCryptoKeyDeletesAt").Observe(time.Since(start).Seconds()) - return key, err -} - -func (m metricsStore) UpdateCustomRole(ctx context.Context, arg database.UpdateCustomRoleParams) (database.CustomRole, error) { - start := time.Now() - r0, r1 := m.s.UpdateCustomRole(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateCustomRole").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) UpdateExternalAuthLink(ctx context.Context, arg database.UpdateExternalAuthLinkParams) (database.ExternalAuthLink, error) { - start := time.Now() - link, err := m.s.UpdateExternalAuthLink(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateExternalAuthLink").Observe(time.Since(start).Seconds()) - return link, err -} - -func (m metricsStore) UpdateGitSSHKey(ctx context.Context, arg database.UpdateGitSSHKeyParams) (database.GitSSHKey, error) { - start := time.Now() - key, err := m.s.UpdateGitSSHKey(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateGitSSHKey").Observe(time.Since(start).Seconds()) - return key, err -} - -func (m metricsStore) UpdateGroupByID(ctx context.Context, arg database.UpdateGroupByIDParams) (database.Group, error) { - start := time.Now() - group, err := m.s.UpdateGroupByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateGroupByID").Observe(time.Since(start).Seconds()) - return group, err -} - -func (m metricsStore) UpdateInactiveUsersToDormant(ctx context.Context, lastSeenAfter database.UpdateInactiveUsersToDormantParams) ([]database.UpdateInactiveUsersToDormantRow, error) { - start := time.Now() - r0, r1 := m.s.UpdateInactiveUsersToDormant(ctx, lastSeenAfter) - m.queryLatencies.WithLabelValues("UpdateInactiveUsersToDormant").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) UpdateMemberRoles(ctx context.Context, arg database.UpdateMemberRolesParams) (database.OrganizationMember, error) { - start := time.Now() - member, err := m.s.UpdateMemberRoles(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateMemberRoles").Observe(time.Since(start).Seconds()) - return member, err -} - -func (m metricsStore) UpdateNotificationTemplateMethodByID(ctx context.Context, arg database.UpdateNotificationTemplateMethodByIDParams) (database.NotificationTemplate, error) { - start := time.Now() - r0, r1 := m.s.UpdateNotificationTemplateMethodByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateNotificationTemplateMethodByID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) UpdateOAuth2ProviderAppByID(ctx context.Context, arg database.UpdateOAuth2ProviderAppByIDParams) (database.OAuth2ProviderApp, error) { - start := time.Now() - r0, r1 := m.s.UpdateOAuth2ProviderAppByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateOAuth2ProviderAppByID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) UpdateOAuth2ProviderAppSecretByID(ctx context.Context, arg database.UpdateOAuth2ProviderAppSecretByIDParams) (database.OAuth2ProviderAppSecret, error) { - start := time.Now() - r0, r1 := m.s.UpdateOAuth2ProviderAppSecretByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateOAuth2ProviderAppSecretByID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) UpdateOrganization(ctx context.Context, arg database.UpdateOrganizationParams) (database.Organization, error) { - start := time.Now() - r0, r1 := m.s.UpdateOrganization(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateOrganization").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) UpdateProvisionerDaemonLastSeenAt(ctx context.Context, arg database.UpdateProvisionerDaemonLastSeenAtParams) error { - start := time.Now() - r0 := m.s.UpdateProvisionerDaemonLastSeenAt(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateProvisionerDaemonLastSeenAt").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpdateProvisionerJobByID(ctx context.Context, arg database.UpdateProvisionerJobByIDParams) error { - start := time.Now() - err := m.s.UpdateProvisionerJobByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateProvisionerJobByID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateProvisionerJobWithCancelByID(ctx context.Context, arg database.UpdateProvisionerJobWithCancelByIDParams) error { - start := time.Now() - err := m.s.UpdateProvisionerJobWithCancelByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateProvisionerJobWithCancelByID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateProvisionerJobWithCompleteByID(ctx context.Context, arg database.UpdateProvisionerJobWithCompleteByIDParams) error { - start := time.Now() - err := m.s.UpdateProvisionerJobWithCompleteByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateProvisionerJobWithCompleteByID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateReplica(ctx context.Context, arg database.UpdateReplicaParams) (database.Replica, error) { - start := time.Now() - replica, err := m.s.UpdateReplica(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateReplica").Observe(time.Since(start).Seconds()) - return replica, err -} - -func (m metricsStore) UpdateTailnetPeerStatusByCoordinator(ctx context.Context, arg database.UpdateTailnetPeerStatusByCoordinatorParams) error { - start := time.Now() - r0 := m.s.UpdateTailnetPeerStatusByCoordinator(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateTailnetPeerStatusByCoordinator").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpdateTemplateACLByID(ctx context.Context, arg database.UpdateTemplateACLByIDParams) error { - start := time.Now() - err := m.s.UpdateTemplateACLByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateTemplateACLByID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateTemplateAccessControlByID(ctx context.Context, arg database.UpdateTemplateAccessControlByIDParams) error { - start := time.Now() - r0 := m.s.UpdateTemplateAccessControlByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateTemplateAccessControlByID").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpdateTemplateActiveVersionByID(ctx context.Context, arg database.UpdateTemplateActiveVersionByIDParams) error { - start := time.Now() - err := m.s.UpdateTemplateActiveVersionByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateTemplateActiveVersionByID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateTemplateDeletedByID(ctx context.Context, arg database.UpdateTemplateDeletedByIDParams) error { - start := time.Now() - err := m.s.UpdateTemplateDeletedByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateTemplateDeletedByID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateTemplateMetaByID(ctx context.Context, arg database.UpdateTemplateMetaByIDParams) error { - start := time.Now() - err := m.s.UpdateTemplateMetaByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateTemplateMetaByID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateTemplateScheduleByID(ctx context.Context, arg database.UpdateTemplateScheduleByIDParams) error { - start := time.Now() - err := m.s.UpdateTemplateScheduleByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateTemplateScheduleByID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateTemplateVersionByID(ctx context.Context, arg database.UpdateTemplateVersionByIDParams) error { - start := time.Now() - err := m.s.UpdateTemplateVersionByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateTemplateVersionByID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateTemplateVersionDescriptionByJobID(ctx context.Context, arg database.UpdateTemplateVersionDescriptionByJobIDParams) error { - start := time.Now() - err := m.s.UpdateTemplateVersionDescriptionByJobID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateTemplateVersionDescriptionByJobID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateTemplateVersionExternalAuthProvidersByJobID(ctx context.Context, arg database.UpdateTemplateVersionExternalAuthProvidersByJobIDParams) error { - start := time.Now() - err := m.s.UpdateTemplateVersionExternalAuthProvidersByJobID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateTemplateVersionExternalAuthProvidersByJobID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateTemplateWorkspacesLastUsedAt(ctx context.Context, arg database.UpdateTemplateWorkspacesLastUsedAtParams) error { - start := time.Now() - r0 := m.s.UpdateTemplateWorkspacesLastUsedAt(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateTemplateWorkspacesLastUsedAt").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpdateUserAppearanceSettings(ctx context.Context, arg database.UpdateUserAppearanceSettingsParams) (database.User, error) { - start := time.Now() - r0, r1 := m.s.UpdateUserAppearanceSettings(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateUserAppearanceSettings").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) UpdateUserDeletedByID(ctx context.Context, id uuid.UUID) error { - start := time.Now() - r0 := m.s.UpdateUserDeletedByID(ctx, id) - m.queryLatencies.WithLabelValues("UpdateUserDeletedByID").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpdateUserGithubComUserID(ctx context.Context, arg database.UpdateUserGithubComUserIDParams) error { - start := time.Now() - r0 := m.s.UpdateUserGithubComUserID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateUserGithubComUserID").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpdateUserHashedOneTimePasscode(ctx context.Context, arg database.UpdateUserHashedOneTimePasscodeParams) error { - start := time.Now() - r0 := m.s.UpdateUserHashedOneTimePasscode(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateUserHashedOneTimePasscode").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpdateUserHashedPassword(ctx context.Context, arg database.UpdateUserHashedPasswordParams) error { - start := time.Now() - err := m.s.UpdateUserHashedPassword(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateUserHashedPassword").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateUserLastSeenAt(ctx context.Context, arg database.UpdateUserLastSeenAtParams) (database.User, error) { - start := time.Now() - user, err := m.s.UpdateUserLastSeenAt(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateUserLastSeenAt").Observe(time.Since(start).Seconds()) - return user, err -} - -func (m metricsStore) UpdateUserLink(ctx context.Context, arg database.UpdateUserLinkParams) (database.UserLink, error) { - start := time.Now() - link, err := m.s.UpdateUserLink(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateUserLink").Observe(time.Since(start).Seconds()) - return link, err -} - -func (m metricsStore) UpdateUserLinkedID(ctx context.Context, arg database.UpdateUserLinkedIDParams) (database.UserLink, error) { - start := time.Now() - link, err := m.s.UpdateUserLinkedID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateUserLinkedID").Observe(time.Since(start).Seconds()) - return link, err -} - -func (m metricsStore) UpdateUserLoginType(ctx context.Context, arg database.UpdateUserLoginTypeParams) (database.User, error) { - start := time.Now() - r0, r1 := m.s.UpdateUserLoginType(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateUserLoginType").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) UpdateUserNotificationPreferences(ctx context.Context, arg database.UpdateUserNotificationPreferencesParams) (int64, error) { - start := time.Now() - r0, r1 := m.s.UpdateUserNotificationPreferences(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateUserNotificationPreferences").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) UpdateUserProfile(ctx context.Context, arg database.UpdateUserProfileParams) (database.User, error) { - start := time.Now() - user, err := m.s.UpdateUserProfile(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateUserProfile").Observe(time.Since(start).Seconds()) - return user, err -} - -func (m metricsStore) UpdateUserQuietHoursSchedule(ctx context.Context, arg database.UpdateUserQuietHoursScheduleParams) (database.User, error) { - start := time.Now() - r0, r1 := m.s.UpdateUserQuietHoursSchedule(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateUserQuietHoursSchedule").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) UpdateUserRoles(ctx context.Context, arg database.UpdateUserRolesParams) (database.User, error) { - start := time.Now() - user, err := m.s.UpdateUserRoles(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateUserRoles").Observe(time.Since(start).Seconds()) - return user, err -} - -func (m metricsStore) UpdateUserStatus(ctx context.Context, arg database.UpdateUserStatusParams) (database.User, error) { - start := time.Now() - user, err := m.s.UpdateUserStatus(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateUserStatus").Observe(time.Since(start).Seconds()) - return user, err -} - -func (m metricsStore) UpdateWorkspace(ctx context.Context, arg database.UpdateWorkspaceParams) (database.WorkspaceTable, error) { - start := time.Now() - workspace, err := m.s.UpdateWorkspace(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateWorkspace").Observe(time.Since(start).Seconds()) - return workspace, err -} - -func (m metricsStore) UpdateWorkspaceAgentConnectionByID(ctx context.Context, arg database.UpdateWorkspaceAgentConnectionByIDParams) error { - start := time.Now() - err := m.s.UpdateWorkspaceAgentConnectionByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateWorkspaceAgentConnectionByID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateWorkspaceAgentLifecycleStateByID(ctx context.Context, arg database.UpdateWorkspaceAgentLifecycleStateByIDParams) error { - start := time.Now() - r0 := m.s.UpdateWorkspaceAgentLifecycleStateByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateWorkspaceAgentLifecycleStateByID").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpdateWorkspaceAgentLogOverflowByID(ctx context.Context, arg database.UpdateWorkspaceAgentLogOverflowByIDParams) error { - start := time.Now() - r0 := m.s.UpdateWorkspaceAgentLogOverflowByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateWorkspaceAgentLogOverflowByID").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpdateWorkspaceAgentMetadata(ctx context.Context, arg database.UpdateWorkspaceAgentMetadataParams) error { - start := time.Now() - err := m.s.UpdateWorkspaceAgentMetadata(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateWorkspaceAgentMetadata").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateWorkspaceAgentStartupByID(ctx context.Context, arg database.UpdateWorkspaceAgentStartupByIDParams) error { - start := time.Now() - err := m.s.UpdateWorkspaceAgentStartupByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateWorkspaceAgentStartupByID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateWorkspaceAppHealthByID(ctx context.Context, arg database.UpdateWorkspaceAppHealthByIDParams) error { - start := time.Now() - err := m.s.UpdateWorkspaceAppHealthByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateWorkspaceAppHealthByID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateWorkspaceAutomaticUpdates(ctx context.Context, arg database.UpdateWorkspaceAutomaticUpdatesParams) error { - start := time.Now() - r0 := m.s.UpdateWorkspaceAutomaticUpdates(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateWorkspaceAutomaticUpdates").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpdateWorkspaceAutostart(ctx context.Context, arg database.UpdateWorkspaceAutostartParams) error { - start := time.Now() - err := m.s.UpdateWorkspaceAutostart(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateWorkspaceAutostart").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateWorkspaceBuildCostByID(ctx context.Context, arg database.UpdateWorkspaceBuildCostByIDParams) error { - start := time.Now() - err := m.s.UpdateWorkspaceBuildCostByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateWorkspaceBuildCostByID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateWorkspaceBuildDeadlineByID(ctx context.Context, arg database.UpdateWorkspaceBuildDeadlineByIDParams) error { - start := time.Now() - r0 := m.s.UpdateWorkspaceBuildDeadlineByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateWorkspaceBuildDeadlineByID").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpdateWorkspaceBuildProvisionerStateByID(ctx context.Context, arg database.UpdateWorkspaceBuildProvisionerStateByIDParams) error { - start := time.Now() - r0 := m.s.UpdateWorkspaceBuildProvisionerStateByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateWorkspaceBuildProvisionerStateByID").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpdateWorkspaceDeletedByID(ctx context.Context, arg database.UpdateWorkspaceDeletedByIDParams) error { - start := time.Now() - err := m.s.UpdateWorkspaceDeletedByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateWorkspaceDeletedByID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateWorkspaceDormantDeletingAt(ctx context.Context, arg database.UpdateWorkspaceDormantDeletingAtParams) (database.WorkspaceTable, error) { - start := time.Now() - ws, r0 := m.s.UpdateWorkspaceDormantDeletingAt(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateWorkspaceDormantDeletingAt").Observe(time.Since(start).Seconds()) - return ws, r0 -} - -func (m metricsStore) UpdateWorkspaceLastUsedAt(ctx context.Context, arg database.UpdateWorkspaceLastUsedAtParams) error { - start := time.Now() - err := m.s.UpdateWorkspaceLastUsedAt(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateWorkspaceLastUsedAt").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateWorkspaceProxy(ctx context.Context, arg database.UpdateWorkspaceProxyParams) (database.WorkspaceProxy, error) { - start := time.Now() - proxy, err := m.s.UpdateWorkspaceProxy(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateWorkspaceProxy").Observe(time.Since(start).Seconds()) - return proxy, err -} - -func (m metricsStore) UpdateWorkspaceProxyDeleted(ctx context.Context, arg database.UpdateWorkspaceProxyDeletedParams) error { - start := time.Now() - r0 := m.s.UpdateWorkspaceProxyDeleted(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateWorkspaceProxyDeleted").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpdateWorkspaceTTL(ctx context.Context, arg database.UpdateWorkspaceTTLParams) error { - start := time.Now() - r0 := m.s.UpdateWorkspaceTTL(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateWorkspaceTTL").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpdateWorkspacesDormantDeletingAtByTemplateID(ctx context.Context, arg database.UpdateWorkspacesDormantDeletingAtByTemplateIDParams) ([]database.WorkspaceTable, error) { - start := time.Now() - r0, r1 := m.s.UpdateWorkspacesDormantDeletingAtByTemplateID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateWorkspacesDormantDeletingAtByTemplateID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) UpsertAnnouncementBanners(ctx context.Context, value string) error { - start := time.Now() - r0 := m.s.UpsertAnnouncementBanners(ctx, value) - m.queryLatencies.WithLabelValues("UpsertAnnouncementBanners").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpsertAppSecurityKey(ctx context.Context, value string) error { - start := time.Now() - r0 := m.s.UpsertAppSecurityKey(ctx, value) - m.queryLatencies.WithLabelValues("UpsertAppSecurityKey").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpsertApplicationName(ctx context.Context, value string) error { - start := time.Now() - r0 := m.s.UpsertApplicationName(ctx, value) - m.queryLatencies.WithLabelValues("UpsertApplicationName").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpsertCoordinatorResumeTokenSigningKey(ctx context.Context, value string) error { - start := time.Now() - r0 := m.s.UpsertCoordinatorResumeTokenSigningKey(ctx, value) - m.queryLatencies.WithLabelValues("UpsertCoordinatorResumeTokenSigningKey").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpsertDefaultProxy(ctx context.Context, arg database.UpsertDefaultProxyParams) error { - start := time.Now() - r0 := m.s.UpsertDefaultProxy(ctx, arg) - m.queryLatencies.WithLabelValues("UpsertDefaultProxy").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpsertHealthSettings(ctx context.Context, value string) error { - start := time.Now() - r0 := m.s.UpsertHealthSettings(ctx, value) - m.queryLatencies.WithLabelValues("UpsertHealthSettings").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpsertJFrogXrayScanByWorkspaceAndAgentID(ctx context.Context, arg database.UpsertJFrogXrayScanByWorkspaceAndAgentIDParams) error { - start := time.Now() - r0 := m.s.UpsertJFrogXrayScanByWorkspaceAndAgentID(ctx, arg) - m.queryLatencies.WithLabelValues("UpsertJFrogXrayScanByWorkspaceAndAgentID").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpsertLastUpdateCheck(ctx context.Context, value string) error { - start := time.Now() - r0 := m.s.UpsertLastUpdateCheck(ctx, value) - m.queryLatencies.WithLabelValues("UpsertLastUpdateCheck").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpsertLogoURL(ctx context.Context, value string) error { - start := time.Now() - r0 := m.s.UpsertLogoURL(ctx, value) - m.queryLatencies.WithLabelValues("UpsertLogoURL").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpsertNotificationReportGeneratorLog(ctx context.Context, arg database.UpsertNotificationReportGeneratorLogParams) error { - start := time.Now() - r0 := m.s.UpsertNotificationReportGeneratorLog(ctx, arg) - m.queryLatencies.WithLabelValues("UpsertNotificationReportGeneratorLog").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpsertNotificationsSettings(ctx context.Context, value string) error { - start := time.Now() - r0 := m.s.UpsertNotificationsSettings(ctx, value) - m.queryLatencies.WithLabelValues("UpsertNotificationsSettings").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpsertOAuthSigningKey(ctx context.Context, value string) error { - start := time.Now() - r0 := m.s.UpsertOAuthSigningKey(ctx, value) - m.queryLatencies.WithLabelValues("UpsertOAuthSigningKey").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpsertProvisionerDaemon(ctx context.Context, arg database.UpsertProvisionerDaemonParams) (database.ProvisionerDaemon, error) { - start := time.Now() - r0, r1 := m.s.UpsertProvisionerDaemon(ctx, arg) - m.queryLatencies.WithLabelValues("UpsertProvisionerDaemon").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) UpsertRuntimeConfig(ctx context.Context, arg database.UpsertRuntimeConfigParams) error { - start := time.Now() - r0 := m.s.UpsertRuntimeConfig(ctx, arg) - m.queryLatencies.WithLabelValues("UpsertRuntimeConfig").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpsertTailnetAgent(ctx context.Context, arg database.UpsertTailnetAgentParams) (database.TailnetAgent, error) { - start := time.Now() - r0, r1 := m.s.UpsertTailnetAgent(ctx, arg) - m.queryLatencies.WithLabelValues("UpsertTailnetAgent").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) UpsertTailnetClient(ctx context.Context, arg database.UpsertTailnetClientParams) (database.TailnetClient, error) { - start := time.Now() - r0, r1 := m.s.UpsertTailnetClient(ctx, arg) - m.queryLatencies.WithLabelValues("UpsertTailnetClient").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) UpsertTailnetClientSubscription(ctx context.Context, arg database.UpsertTailnetClientSubscriptionParams) error { - start := time.Now() - r0 := m.s.UpsertTailnetClientSubscription(ctx, arg) - m.queryLatencies.WithLabelValues("UpsertTailnetClientSubscription").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpsertTailnetCoordinator(ctx context.Context, id uuid.UUID) (database.TailnetCoordinator, error) { - start := time.Now() - r0, r1 := m.s.UpsertTailnetCoordinator(ctx, id) - m.queryLatencies.WithLabelValues("UpsertTailnetCoordinator").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) UpsertTailnetPeer(ctx context.Context, arg database.UpsertTailnetPeerParams) (database.TailnetPeer, error) { - start := time.Now() - r0, r1 := m.s.UpsertTailnetPeer(ctx, arg) - m.queryLatencies.WithLabelValues("UpsertTailnetPeer").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) UpsertTailnetTunnel(ctx context.Context, arg database.UpsertTailnetTunnelParams) (database.TailnetTunnel, error) { - start := time.Now() - r0, r1 := m.s.UpsertTailnetTunnel(ctx, arg) - m.queryLatencies.WithLabelValues("UpsertTailnetTunnel").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) UpsertTemplateUsageStats(ctx context.Context) error { - start := time.Now() - r0 := m.s.UpsertTemplateUsageStats(ctx) - m.queryLatencies.WithLabelValues("UpsertTemplateUsageStats").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpsertWorkspaceAgentPortShare(ctx context.Context, arg database.UpsertWorkspaceAgentPortShareParams) (database.WorkspaceAgentPortShare, error) { - start := time.Now() - r0, r1 := m.s.UpsertWorkspaceAgentPortShare(ctx, arg) - m.queryLatencies.WithLabelValues("UpsertWorkspaceAgentPortShare").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetAuthorizedTemplates(ctx context.Context, arg database.GetTemplatesWithFilterParams, prepared rbac.PreparedAuthorized) ([]database.Template, error) { - start := time.Now() - templates, err := m.s.GetAuthorizedTemplates(ctx, arg, prepared) - m.queryLatencies.WithLabelValues("GetAuthorizedTemplates").Observe(time.Since(start).Seconds()) - return templates, err -} - -func (m metricsStore) GetTemplateGroupRoles(ctx context.Context, id uuid.UUID) ([]database.TemplateGroup, error) { - start := time.Now() - roles, err := m.s.GetTemplateGroupRoles(ctx, id) - m.queryLatencies.WithLabelValues("GetTemplateGroupRoles").Observe(time.Since(start).Seconds()) - return roles, err -} - -func (m metricsStore) GetTemplateUserRoles(ctx context.Context, id uuid.UUID) ([]database.TemplateUser, error) { - start := time.Now() - roles, err := m.s.GetTemplateUserRoles(ctx, id) - m.queryLatencies.WithLabelValues("GetTemplateUserRoles").Observe(time.Since(start).Seconds()) - return roles, err -} - -func (m metricsStore) GetAuthorizedWorkspaces(ctx context.Context, arg database.GetWorkspacesParams, prepared rbac.PreparedAuthorized) ([]database.GetWorkspacesRow, error) { - start := time.Now() - workspaces, err := m.s.GetAuthorizedWorkspaces(ctx, arg, prepared) - m.queryLatencies.WithLabelValues("GetAuthorizedWorkspaces").Observe(time.Since(start).Seconds()) - return workspaces, err -} - -func (m metricsStore) GetAuthorizedUsers(ctx context.Context, arg database.GetUsersParams, prepared rbac.PreparedAuthorized) ([]database.GetUsersRow, error) { - start := time.Now() - r0, r1 := m.s.GetAuthorizedUsers(ctx, arg, prepared) - m.queryLatencies.WithLabelValues("GetAuthorizedUsers").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetAuthorizedAuditLogsOffset(ctx context.Context, arg database.GetAuditLogsOffsetParams, prepared rbac.PreparedAuthorized) ([]database.GetAuditLogsOffsetRow, error) { - start := time.Now() - r0, r1 := m.s.GetAuthorizedAuditLogsOffset(ctx, arg, prepared) - m.queryLatencies.WithLabelValues("GetAuthorizedAuditLogsOffset").Observe(time.Since(start).Seconds()) - return r0, r1 -} diff --git a/coderd/database/dbmetrics/dbmetrics_test.go b/coderd/database/dbmetrics/dbmetrics_test.go new file mode 100644 index 0000000000000..bd6566d054aae --- /dev/null +++ b/coderd/database/dbmetrics/dbmetrics_test.go @@ -0,0 +1,109 @@ +package dbmetrics_test + +import ( + "bytes" + "testing" + + "github.com/prometheus/client_golang/prometheus" + "github.com/stretchr/testify/require" + "golang.org/x/xerrors" + + "cdr.dev/slog" + "cdr.dev/slog/sloggers/sloghuman" + "cdr.dev/slog/sloggers/slogtest" + "github.com/coder/coder/v2/coderd/coderdtest/promhelp" + "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/database/dbmem" + "github.com/coder/coder/v2/coderd/database/dbmetrics" +) + +func TestInTxMetrics(t *testing.T) { + t.Parallel() + + successLabels := prometheus.Labels{ + "success": "true", + "tx_id": "unlabeled", + } + const inTxHistMetricName = "coderd_db_tx_duration_seconds" + const inTxCountMetricName = "coderd_db_tx_executions_count" + t.Run("QueryMetrics", func(t *testing.T) { + t.Parallel() + + db := dbmem.New() + reg := prometheus.NewRegistry() + db = dbmetrics.NewQueryMetrics(db, slogtest.Make(t, nil), reg) + + err := db.InTx(func(s database.Store) error { + return nil + }, nil) + require.NoError(t, err) + + // Check that the metrics are registered + inTxMetric := promhelp.HistogramValue(t, reg, inTxHistMetricName, successLabels) + require.NotNil(t, inTxMetric) + require.Equal(t, uint64(1), inTxMetric.GetSampleCount()) + }) + + t.Run("DBMetrics", func(t *testing.T) { + t.Parallel() + + db := dbmem.New() + reg := prometheus.NewRegistry() + db = dbmetrics.NewDBMetrics(db, slogtest.Make(t, nil), reg) + + err := db.InTx(func(s database.Store) error { + return nil + }, nil) + require.NoError(t, err) + + // Check that the metrics are registered + inTxMetric := promhelp.HistogramValue(t, reg, inTxHistMetricName, successLabels) + require.NotNil(t, inTxMetric) + require.Equal(t, uint64(1), inTxMetric.GetSampleCount()) + }) + + // Test log output and metrics on failures + // Log example: + // [erro] database transaction hit serialization error and had to retry success=false executions=2 id=foobar_factory + t.Run("SerializationError", func(t *testing.T) { + t.Parallel() + + var output bytes.Buffer + logger := slog.Make(sloghuman.Sink(&output)) + + reg := prometheus.NewRegistry() + db := dbmetrics.NewDBMetrics(dbmem.New(), logger, reg) + const id = "foobar_factory" + + txOpts := database.DefaultTXOptions().WithID(id) + database.IncrementExecutionCount(txOpts) // 2 executions + + err := db.InTx(func(s database.Store) error { + return xerrors.Errorf("some dumb error") + }, txOpts) + require.Error(t, err) + + // Check that the metrics are registered + inTxHistMetric := promhelp.HistogramValue(t, reg, inTxHistMetricName, prometheus.Labels{ + "success": "false", + "tx_id": id, + }) + require.NotNil(t, inTxHistMetric) + require.Equal(t, uint64(1), inTxHistMetric.GetSampleCount()) + + inTxCountMetric := promhelp.CounterValue(t, reg, inTxCountMetricName, prometheus.Labels{ + "success": "false", + "retries": "1", + "tx_id": id, + }) + require.NotNil(t, inTxCountMetric) + require.Equal(t, 1, inTxCountMetric) + + // Also check the logs + require.Contains(t, output.String(), "some dumb error") + require.Contains(t, output.String(), "database transaction hit serialization error and had to retry") + require.Contains(t, output.String(), "success=false") + require.Contains(t, output.String(), "executions=2") + require.Contains(t, output.String(), "id="+id) + }) +} diff --git a/coderd/database/dbmetrics/querymetrics.go b/coderd/database/dbmetrics/querymetrics.go new file mode 100644 index 0000000000000..7e74aab3b9de0 --- /dev/null +++ b/coderd/database/dbmetrics/querymetrics.go @@ -0,0 +1,2710 @@ +// Code generated by coderd/database/gen/metrics. +// Any function can be edited and will not be overwritten. +// New database functions are automatically generated! +package dbmetrics + +import ( + "context" + "time" + + "github.com/google/uuid" + "github.com/prometheus/client_golang/prometheus" + "golang.org/x/exp/slices" + + "cdr.dev/slog" + "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" +) + +var ( + // Force these imports, for some reason the autogen does not include them. + _ uuid.UUID + _ policy.Action + _ rbac.Objecter +) + +const wrapname = "dbmetrics.metricsStore" + +// NewQueryMetrics returns a database.Store that registers metrics for all queries to reg. +func NewQueryMetrics(s database.Store, logger slog.Logger, reg prometheus.Registerer) database.Store { + // Don't double-wrap. + if slices.Contains(s.Wrappers(), wrapname) { + return s + } + queryLatencies := prometheus.NewHistogramVec(prometheus.HistogramOpts{ + Namespace: "coderd", + Subsystem: "db", + Name: "query_latencies_seconds", + Help: "Latency distribution of queries in seconds.", + Buckets: prometheus.DefBuckets, + }, []string{"query"}) + reg.MustRegister(queryLatencies) + return &queryMetricsStore{ + s: s, + queryLatencies: queryLatencies, + dbMetrics: NewDBMetrics(s, logger, reg).(*metricsStore), + } +} + +var _ database.Store = (*queryMetricsStore)(nil) + +type queryMetricsStore struct { + s database.Store + queryLatencies *prometheus.HistogramVec + dbMetrics *metricsStore +} + +func (m queryMetricsStore) Wrappers() []string { + return append(m.s.Wrappers(), wrapname) +} + +func (m queryMetricsStore) Ping(ctx context.Context) (time.Duration, error) { + start := time.Now() + duration, err := m.s.Ping(ctx) + m.queryLatencies.WithLabelValues("Ping").Observe(time.Since(start).Seconds()) + return duration, err +} + +func (m queryMetricsStore) InTx(f func(database.Store) error, options *database.TxOptions) error { + return m.dbMetrics.InTx(f, options) +} + +func (m queryMetricsStore) AcquireLock(ctx context.Context, pgAdvisoryXactLock int64) error { + start := time.Now() + err := m.s.AcquireLock(ctx, pgAdvisoryXactLock) + m.queryLatencies.WithLabelValues("AcquireLock").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) AcquireNotificationMessages(ctx context.Context, arg database.AcquireNotificationMessagesParams) ([]database.AcquireNotificationMessagesRow, error) { + start := time.Now() + r0, r1 := m.s.AcquireNotificationMessages(ctx, arg) + m.queryLatencies.WithLabelValues("AcquireNotificationMessages").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) AcquireProvisionerJob(ctx context.Context, arg database.AcquireProvisionerJobParams) (database.ProvisionerJob, error) { + start := time.Now() + provisionerJob, err := m.s.AcquireProvisionerJob(ctx, arg) + m.queryLatencies.WithLabelValues("AcquireProvisionerJob").Observe(time.Since(start).Seconds()) + return provisionerJob, err +} + +func (m queryMetricsStore) ActivityBumpWorkspace(ctx context.Context, arg database.ActivityBumpWorkspaceParams) error { + start := time.Now() + r0 := m.s.ActivityBumpWorkspace(ctx, arg) + m.queryLatencies.WithLabelValues("ActivityBumpWorkspace").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) AllUserIDs(ctx context.Context) ([]uuid.UUID, error) { + start := time.Now() + r0, r1 := m.s.AllUserIDs(ctx) + m.queryLatencies.WithLabelValues("AllUserIDs").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) ArchiveUnusedTemplateVersions(ctx context.Context, arg database.ArchiveUnusedTemplateVersionsParams) ([]uuid.UUID, error) { + start := time.Now() + r0, r1 := m.s.ArchiveUnusedTemplateVersions(ctx, arg) + m.queryLatencies.WithLabelValues("ArchiveUnusedTemplateVersions").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) BatchUpdateWorkspaceLastUsedAt(ctx context.Context, arg database.BatchUpdateWorkspaceLastUsedAtParams) error { + start := time.Now() + r0 := m.s.BatchUpdateWorkspaceLastUsedAt(ctx, arg) + m.queryLatencies.WithLabelValues("BatchUpdateWorkspaceLastUsedAt").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) BulkMarkNotificationMessagesFailed(ctx context.Context, arg database.BulkMarkNotificationMessagesFailedParams) (int64, error) { + start := time.Now() + r0, r1 := m.s.BulkMarkNotificationMessagesFailed(ctx, arg) + m.queryLatencies.WithLabelValues("BulkMarkNotificationMessagesFailed").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) BulkMarkNotificationMessagesSent(ctx context.Context, arg database.BulkMarkNotificationMessagesSentParams) (int64, error) { + start := time.Now() + r0, r1 := m.s.BulkMarkNotificationMessagesSent(ctx, arg) + m.queryLatencies.WithLabelValues("BulkMarkNotificationMessagesSent").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) CleanTailnetCoordinators(ctx context.Context) error { + start := time.Now() + err := m.s.CleanTailnetCoordinators(ctx) + m.queryLatencies.WithLabelValues("CleanTailnetCoordinators").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) CleanTailnetLostPeers(ctx context.Context) error { + start := time.Now() + r0 := m.s.CleanTailnetLostPeers(ctx) + m.queryLatencies.WithLabelValues("CleanTailnetLostPeers").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) CleanTailnetTunnels(ctx context.Context) error { + start := time.Now() + r0 := m.s.CleanTailnetTunnels(ctx) + m.queryLatencies.WithLabelValues("CleanTailnetTunnels").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) CustomRoles(ctx context.Context, arg database.CustomRolesParams) ([]database.CustomRole, error) { + start := time.Now() + r0, r1 := m.s.CustomRoles(ctx, arg) + m.queryLatencies.WithLabelValues("CustomRoles").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) DeleteAPIKeyByID(ctx context.Context, id string) error { + start := time.Now() + err := m.s.DeleteAPIKeyByID(ctx, id) + m.queryLatencies.WithLabelValues("DeleteAPIKeyByID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) DeleteAPIKeysByUserID(ctx context.Context, userID uuid.UUID) error { + start := time.Now() + err := m.s.DeleteAPIKeysByUserID(ctx, userID) + m.queryLatencies.WithLabelValues("DeleteAPIKeysByUserID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) DeleteAllTailnetClientSubscriptions(ctx context.Context, arg database.DeleteAllTailnetClientSubscriptionsParams) error { + start := time.Now() + r0 := m.s.DeleteAllTailnetClientSubscriptions(ctx, arg) + m.queryLatencies.WithLabelValues("DeleteAllTailnetClientSubscriptions").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) DeleteAllTailnetTunnels(ctx context.Context, arg database.DeleteAllTailnetTunnelsParams) error { + start := time.Now() + r0 := m.s.DeleteAllTailnetTunnels(ctx, arg) + m.queryLatencies.WithLabelValues("DeleteAllTailnetTunnels").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) DeleteApplicationConnectAPIKeysByUserID(ctx context.Context, userID uuid.UUID) error { + start := time.Now() + err := m.s.DeleteApplicationConnectAPIKeysByUserID(ctx, userID) + m.queryLatencies.WithLabelValues("DeleteApplicationConnectAPIKeysByUserID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) DeleteCoordinator(ctx context.Context, id uuid.UUID) error { + start := time.Now() + r0 := m.s.DeleteCoordinator(ctx, id) + m.queryLatencies.WithLabelValues("DeleteCoordinator").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) DeleteCryptoKey(ctx context.Context, arg database.DeleteCryptoKeyParams) (database.CryptoKey, error) { + start := time.Now() + r0, r1 := m.s.DeleteCryptoKey(ctx, arg) + m.queryLatencies.WithLabelValues("DeleteCryptoKey").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) DeleteCustomRole(ctx context.Context, arg database.DeleteCustomRoleParams) error { + start := time.Now() + r0 := m.s.DeleteCustomRole(ctx, arg) + m.queryLatencies.WithLabelValues("DeleteCustomRole").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) DeleteExternalAuthLink(ctx context.Context, arg database.DeleteExternalAuthLinkParams) error { + start := time.Now() + r0 := m.s.DeleteExternalAuthLink(ctx, arg) + m.queryLatencies.WithLabelValues("DeleteExternalAuthLink").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) DeleteGitSSHKey(ctx context.Context, userID uuid.UUID) error { + start := time.Now() + err := m.s.DeleteGitSSHKey(ctx, userID) + m.queryLatencies.WithLabelValues("DeleteGitSSHKey").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) DeleteGroupByID(ctx context.Context, id uuid.UUID) error { + start := time.Now() + err := m.s.DeleteGroupByID(ctx, id) + m.queryLatencies.WithLabelValues("DeleteGroupByID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) DeleteGroupMemberFromGroup(ctx context.Context, arg database.DeleteGroupMemberFromGroupParams) error { + start := time.Now() + err := m.s.DeleteGroupMemberFromGroup(ctx, arg) + m.queryLatencies.WithLabelValues("DeleteGroupMemberFromGroup").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) DeleteLicense(ctx context.Context, id int32) (int32, error) { + start := time.Now() + licenseID, err := m.s.DeleteLicense(ctx, id) + m.queryLatencies.WithLabelValues("DeleteLicense").Observe(time.Since(start).Seconds()) + return licenseID, err +} + +func (m queryMetricsStore) DeleteOAuth2ProviderAppByID(ctx context.Context, id uuid.UUID) error { + start := time.Now() + r0 := m.s.DeleteOAuth2ProviderAppByID(ctx, id) + m.queryLatencies.WithLabelValues("DeleteOAuth2ProviderAppByID").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) DeleteOAuth2ProviderAppCodeByID(ctx context.Context, id uuid.UUID) error { + start := time.Now() + r0 := m.s.DeleteOAuth2ProviderAppCodeByID(ctx, id) + m.queryLatencies.WithLabelValues("DeleteOAuth2ProviderAppCodeByID").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) DeleteOAuth2ProviderAppCodesByAppAndUserID(ctx context.Context, arg database.DeleteOAuth2ProviderAppCodesByAppAndUserIDParams) error { + start := time.Now() + r0 := m.s.DeleteOAuth2ProviderAppCodesByAppAndUserID(ctx, arg) + m.queryLatencies.WithLabelValues("DeleteOAuth2ProviderAppCodesByAppAndUserID").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) DeleteOAuth2ProviderAppSecretByID(ctx context.Context, id uuid.UUID) error { + start := time.Now() + r0 := m.s.DeleteOAuth2ProviderAppSecretByID(ctx, id) + m.queryLatencies.WithLabelValues("DeleteOAuth2ProviderAppSecretByID").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) DeleteOAuth2ProviderAppTokensByAppAndUserID(ctx context.Context, arg database.DeleteOAuth2ProviderAppTokensByAppAndUserIDParams) error { + start := time.Now() + r0 := m.s.DeleteOAuth2ProviderAppTokensByAppAndUserID(ctx, arg) + m.queryLatencies.WithLabelValues("DeleteOAuth2ProviderAppTokensByAppAndUserID").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) DeleteOldNotificationMessages(ctx context.Context) error { + start := time.Now() + r0 := m.s.DeleteOldNotificationMessages(ctx) + m.queryLatencies.WithLabelValues("DeleteOldNotificationMessages").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) DeleteOldProvisionerDaemons(ctx context.Context) error { + start := time.Now() + r0 := m.s.DeleteOldProvisionerDaemons(ctx) + m.queryLatencies.WithLabelValues("DeleteOldProvisionerDaemons").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) DeleteOldWorkspaceAgentLogs(ctx context.Context, arg time.Time) error { + start := time.Now() + r0 := m.s.DeleteOldWorkspaceAgentLogs(ctx, arg) + m.queryLatencies.WithLabelValues("DeleteOldWorkspaceAgentLogs").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) DeleteOldWorkspaceAgentStats(ctx context.Context) error { + start := time.Now() + err := m.s.DeleteOldWorkspaceAgentStats(ctx) + m.queryLatencies.WithLabelValues("DeleteOldWorkspaceAgentStats").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) DeleteOrganization(ctx context.Context, id uuid.UUID) error { + start := time.Now() + r0 := m.s.DeleteOrganization(ctx, id) + m.queryLatencies.WithLabelValues("DeleteOrganization").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) DeleteOrganizationMember(ctx context.Context, arg database.DeleteOrganizationMemberParams) error { + start := time.Now() + r0 := m.s.DeleteOrganizationMember(ctx, arg) + m.queryLatencies.WithLabelValues("DeleteOrganizationMember").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) DeleteProvisionerKey(ctx context.Context, id uuid.UUID) error { + start := time.Now() + r0 := m.s.DeleteProvisionerKey(ctx, id) + m.queryLatencies.WithLabelValues("DeleteProvisionerKey").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) DeleteReplicasUpdatedBefore(ctx context.Context, updatedAt time.Time) error { + start := time.Now() + err := m.s.DeleteReplicasUpdatedBefore(ctx, updatedAt) + m.queryLatencies.WithLabelValues("DeleteReplicasUpdatedBefore").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) DeleteRuntimeConfig(ctx context.Context, key string) error { + start := time.Now() + r0 := m.s.DeleteRuntimeConfig(ctx, key) + m.queryLatencies.WithLabelValues("DeleteRuntimeConfig").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) DeleteTailnetAgent(ctx context.Context, arg database.DeleteTailnetAgentParams) (database.DeleteTailnetAgentRow, error) { + start := time.Now() + r0, r1 := m.s.DeleteTailnetAgent(ctx, arg) + m.queryLatencies.WithLabelValues("DeleteTailnetAgent").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) DeleteTailnetClient(ctx context.Context, arg database.DeleteTailnetClientParams) (database.DeleteTailnetClientRow, error) { + start := time.Now() + r0, r1 := m.s.DeleteTailnetClient(ctx, arg) + m.queryLatencies.WithLabelValues("DeleteTailnetClient").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) DeleteTailnetClientSubscription(ctx context.Context, arg database.DeleteTailnetClientSubscriptionParams) error { + start := time.Now() + r0 := m.s.DeleteTailnetClientSubscription(ctx, arg) + m.queryLatencies.WithLabelValues("DeleteTailnetClientSubscription").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) DeleteTailnetPeer(ctx context.Context, arg database.DeleteTailnetPeerParams) (database.DeleteTailnetPeerRow, error) { + start := time.Now() + r0, r1 := m.s.DeleteTailnetPeer(ctx, arg) + m.queryLatencies.WithLabelValues("DeleteTailnetPeer").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) DeleteTailnetTunnel(ctx context.Context, arg database.DeleteTailnetTunnelParams) (database.DeleteTailnetTunnelRow, error) { + start := time.Now() + r0, r1 := m.s.DeleteTailnetTunnel(ctx, arg) + m.queryLatencies.WithLabelValues("DeleteTailnetTunnel").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) DeleteWorkspaceAgentPortShare(ctx context.Context, arg database.DeleteWorkspaceAgentPortShareParams) error { + start := time.Now() + r0 := m.s.DeleteWorkspaceAgentPortShare(ctx, arg) + m.queryLatencies.WithLabelValues("DeleteWorkspaceAgentPortShare").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) DeleteWorkspaceAgentPortSharesByTemplate(ctx context.Context, templateID uuid.UUID) error { + start := time.Now() + r0 := m.s.DeleteWorkspaceAgentPortSharesByTemplate(ctx, templateID) + m.queryLatencies.WithLabelValues("DeleteWorkspaceAgentPortSharesByTemplate").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) EnqueueNotificationMessage(ctx context.Context, arg database.EnqueueNotificationMessageParams) error { + start := time.Now() + r0 := m.s.EnqueueNotificationMessage(ctx, arg) + m.queryLatencies.WithLabelValues("EnqueueNotificationMessage").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) FavoriteWorkspace(ctx context.Context, arg uuid.UUID) error { + start := time.Now() + r0 := m.s.FavoriteWorkspace(ctx, arg) + m.queryLatencies.WithLabelValues("FavoriteWorkspace").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) FetchNewMessageMetadata(ctx context.Context, arg database.FetchNewMessageMetadataParams) (database.FetchNewMessageMetadataRow, error) { + start := time.Now() + r0, r1 := m.s.FetchNewMessageMetadata(ctx, arg) + m.queryLatencies.WithLabelValues("FetchNewMessageMetadata").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetAPIKeyByID(ctx context.Context, id string) (database.APIKey, error) { + start := time.Now() + apiKey, err := m.s.GetAPIKeyByID(ctx, id) + m.queryLatencies.WithLabelValues("GetAPIKeyByID").Observe(time.Since(start).Seconds()) + return apiKey, err +} + +func (m queryMetricsStore) GetAPIKeyByName(ctx context.Context, arg database.GetAPIKeyByNameParams) (database.APIKey, error) { + start := time.Now() + apiKey, err := m.s.GetAPIKeyByName(ctx, arg) + m.queryLatencies.WithLabelValues("GetAPIKeyByName").Observe(time.Since(start).Seconds()) + return apiKey, err +} + +func (m queryMetricsStore) GetAPIKeysByLoginType(ctx context.Context, loginType database.LoginType) ([]database.APIKey, error) { + start := time.Now() + apiKeys, err := m.s.GetAPIKeysByLoginType(ctx, loginType) + m.queryLatencies.WithLabelValues("GetAPIKeysByLoginType").Observe(time.Since(start).Seconds()) + return apiKeys, err +} + +func (m queryMetricsStore) GetAPIKeysByUserID(ctx context.Context, arg database.GetAPIKeysByUserIDParams) ([]database.APIKey, error) { + start := time.Now() + apiKeys, err := m.s.GetAPIKeysByUserID(ctx, arg) + m.queryLatencies.WithLabelValues("GetAPIKeysByUserID").Observe(time.Since(start).Seconds()) + return apiKeys, err +} + +func (m queryMetricsStore) GetAPIKeysLastUsedAfter(ctx context.Context, lastUsed time.Time) ([]database.APIKey, error) { + start := time.Now() + apiKeys, err := m.s.GetAPIKeysLastUsedAfter(ctx, lastUsed) + m.queryLatencies.WithLabelValues("GetAPIKeysLastUsedAfter").Observe(time.Since(start).Seconds()) + return apiKeys, err +} + +func (m queryMetricsStore) GetActiveUserCount(ctx context.Context) (int64, error) { + start := time.Now() + count, err := m.s.GetActiveUserCount(ctx) + m.queryLatencies.WithLabelValues("GetActiveUserCount").Observe(time.Since(start).Seconds()) + return count, err +} + +func (m queryMetricsStore) GetActiveWorkspaceBuildsByTemplateID(ctx context.Context, templateID uuid.UUID) ([]database.WorkspaceBuild, error) { + start := time.Now() + r0, r1 := m.s.GetActiveWorkspaceBuildsByTemplateID(ctx, templateID) + m.queryLatencies.WithLabelValues("GetActiveWorkspaceBuildsByTemplateID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetAllTailnetAgents(ctx context.Context) ([]database.TailnetAgent, error) { + start := time.Now() + r0, r1 := m.s.GetAllTailnetAgents(ctx) + m.queryLatencies.WithLabelValues("GetAllTailnetAgents").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetAllTailnetCoordinators(ctx context.Context) ([]database.TailnetCoordinator, error) { + start := time.Now() + r0, r1 := m.s.GetAllTailnetCoordinators(ctx) + m.queryLatencies.WithLabelValues("GetAllTailnetCoordinators").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetAllTailnetPeers(ctx context.Context) ([]database.TailnetPeer, error) { + start := time.Now() + r0, r1 := m.s.GetAllTailnetPeers(ctx) + m.queryLatencies.WithLabelValues("GetAllTailnetPeers").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetAllTailnetTunnels(ctx context.Context) ([]database.TailnetTunnel, error) { + start := time.Now() + r0, r1 := m.s.GetAllTailnetTunnels(ctx) + m.queryLatencies.WithLabelValues("GetAllTailnetTunnels").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetAnnouncementBanners(ctx context.Context) (string, error) { + start := time.Now() + r0, r1 := m.s.GetAnnouncementBanners(ctx) + m.queryLatencies.WithLabelValues("GetAnnouncementBanners").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetAppSecurityKey(ctx context.Context) (string, error) { + start := time.Now() + key, err := m.s.GetAppSecurityKey(ctx) + m.queryLatencies.WithLabelValues("GetAppSecurityKey").Observe(time.Since(start).Seconds()) + return key, err +} + +func (m queryMetricsStore) GetApplicationName(ctx context.Context) (string, error) { + start := time.Now() + r0, r1 := m.s.GetApplicationName(ctx) + m.queryLatencies.WithLabelValues("GetApplicationName").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetAuditLogsOffset(ctx context.Context, arg database.GetAuditLogsOffsetParams) ([]database.GetAuditLogsOffsetRow, error) { + start := time.Now() + rows, err := m.s.GetAuditLogsOffset(ctx, arg) + m.queryLatencies.WithLabelValues("GetAuditLogsOffset").Observe(time.Since(start).Seconds()) + return rows, err +} + +func (m queryMetricsStore) GetAuthorizationUserRoles(ctx context.Context, userID uuid.UUID) (database.GetAuthorizationUserRolesRow, error) { + start := time.Now() + row, err := m.s.GetAuthorizationUserRoles(ctx, userID) + m.queryLatencies.WithLabelValues("GetAuthorizationUserRoles").Observe(time.Since(start).Seconds()) + return row, err +} + +func (m queryMetricsStore) GetCoordinatorResumeTokenSigningKey(ctx context.Context) (string, error) { + start := time.Now() + r0, r1 := m.s.GetCoordinatorResumeTokenSigningKey(ctx) + m.queryLatencies.WithLabelValues("GetCoordinatorResumeTokenSigningKey").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetCryptoKeyByFeatureAndSequence(ctx context.Context, arg database.GetCryptoKeyByFeatureAndSequenceParams) (database.CryptoKey, error) { + start := time.Now() + r0, r1 := m.s.GetCryptoKeyByFeatureAndSequence(ctx, arg) + m.queryLatencies.WithLabelValues("GetCryptoKeyByFeatureAndSequence").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetCryptoKeys(ctx context.Context) ([]database.CryptoKey, error) { + start := time.Now() + r0, r1 := m.s.GetCryptoKeys(ctx) + m.queryLatencies.WithLabelValues("GetCryptoKeys").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetCryptoKeysByFeature(ctx context.Context, feature database.CryptoKeyFeature) ([]database.CryptoKey, error) { + start := time.Now() + r0, r1 := m.s.GetCryptoKeysByFeature(ctx, feature) + m.queryLatencies.WithLabelValues("GetCryptoKeysByFeature").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetDBCryptKeys(ctx context.Context) ([]database.DBCryptKey, error) { + start := time.Now() + r0, r1 := m.s.GetDBCryptKeys(ctx) + m.queryLatencies.WithLabelValues("GetDBCryptKeys").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetDERPMeshKey(ctx context.Context) (string, error) { + start := time.Now() + key, err := m.s.GetDERPMeshKey(ctx) + m.queryLatencies.WithLabelValues("GetDERPMeshKey").Observe(time.Since(start).Seconds()) + return key, err +} + +func (m queryMetricsStore) GetDefaultOrganization(ctx context.Context) (database.Organization, error) { + start := time.Now() + r0, r1 := m.s.GetDefaultOrganization(ctx) + m.queryLatencies.WithLabelValues("GetDefaultOrganization").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetDefaultProxyConfig(ctx context.Context) (database.GetDefaultProxyConfigRow, error) { + start := time.Now() + resp, err := m.s.GetDefaultProxyConfig(ctx) + m.queryLatencies.WithLabelValues("GetDefaultProxyConfig").Observe(time.Since(start).Seconds()) + return resp, err +} + +func (m queryMetricsStore) GetDeploymentDAUs(ctx context.Context, tzOffset int32) ([]database.GetDeploymentDAUsRow, error) { + start := time.Now() + rows, err := m.s.GetDeploymentDAUs(ctx, tzOffset) + m.queryLatencies.WithLabelValues("GetDeploymentDAUs").Observe(time.Since(start).Seconds()) + return rows, err +} + +func (m queryMetricsStore) GetDeploymentID(ctx context.Context) (string, error) { + start := time.Now() + id, err := m.s.GetDeploymentID(ctx) + m.queryLatencies.WithLabelValues("GetDeploymentID").Observe(time.Since(start).Seconds()) + return id, err +} + +func (m queryMetricsStore) GetDeploymentWorkspaceAgentStats(ctx context.Context, createdAt time.Time) (database.GetDeploymentWorkspaceAgentStatsRow, error) { + start := time.Now() + row, err := m.s.GetDeploymentWorkspaceAgentStats(ctx, createdAt) + m.queryLatencies.WithLabelValues("GetDeploymentWorkspaceAgentStats").Observe(time.Since(start).Seconds()) + return row, err +} + +func (m queryMetricsStore) GetDeploymentWorkspaceAgentUsageStats(ctx context.Context, createdAt time.Time) (database.GetDeploymentWorkspaceAgentUsageStatsRow, error) { + start := time.Now() + r0, r1 := m.s.GetDeploymentWorkspaceAgentUsageStats(ctx, createdAt) + m.queryLatencies.WithLabelValues("GetDeploymentWorkspaceAgentUsageStats").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetDeploymentWorkspaceStats(ctx context.Context) (database.GetDeploymentWorkspaceStatsRow, error) { + start := time.Now() + row, err := m.s.GetDeploymentWorkspaceStats(ctx) + m.queryLatencies.WithLabelValues("GetDeploymentWorkspaceStats").Observe(time.Since(start).Seconds()) + return row, err +} + +func (m queryMetricsStore) GetExternalAuthLink(ctx context.Context, arg database.GetExternalAuthLinkParams) (database.ExternalAuthLink, error) { + start := time.Now() + link, err := m.s.GetExternalAuthLink(ctx, arg) + m.queryLatencies.WithLabelValues("GetExternalAuthLink").Observe(time.Since(start).Seconds()) + return link, err +} + +func (m queryMetricsStore) GetExternalAuthLinksByUserID(ctx context.Context, userID uuid.UUID) ([]database.ExternalAuthLink, error) { + start := time.Now() + r0, r1 := m.s.GetExternalAuthLinksByUserID(ctx, userID) + m.queryLatencies.WithLabelValues("GetExternalAuthLinksByUserID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetFailedWorkspaceBuildsByTemplateID(ctx context.Context, arg database.GetFailedWorkspaceBuildsByTemplateIDParams) ([]database.GetFailedWorkspaceBuildsByTemplateIDRow, error) { + start := time.Now() + r0, r1 := m.s.GetFailedWorkspaceBuildsByTemplateID(ctx, arg) + m.queryLatencies.WithLabelValues("GetFailedWorkspaceBuildsByTemplateID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetFileByHashAndCreator(ctx context.Context, arg database.GetFileByHashAndCreatorParams) (database.File, error) { + start := time.Now() + file, err := m.s.GetFileByHashAndCreator(ctx, arg) + m.queryLatencies.WithLabelValues("GetFileByHashAndCreator").Observe(time.Since(start).Seconds()) + return file, err +} + +func (m queryMetricsStore) GetFileByID(ctx context.Context, id uuid.UUID) (database.File, error) { + start := time.Now() + file, err := m.s.GetFileByID(ctx, id) + m.queryLatencies.WithLabelValues("GetFileByID").Observe(time.Since(start).Seconds()) + return file, err +} + +func (m queryMetricsStore) GetFileTemplates(ctx context.Context, fileID uuid.UUID) ([]database.GetFileTemplatesRow, error) { + start := time.Now() + rows, err := m.s.GetFileTemplates(ctx, fileID) + m.queryLatencies.WithLabelValues("GetFileTemplates").Observe(time.Since(start).Seconds()) + return rows, err +} + +func (m queryMetricsStore) GetGitSSHKey(ctx context.Context, userID uuid.UUID) (database.GitSSHKey, error) { + start := time.Now() + key, err := m.s.GetGitSSHKey(ctx, userID) + m.queryLatencies.WithLabelValues("GetGitSSHKey").Observe(time.Since(start).Seconds()) + return key, err +} + +func (m queryMetricsStore) GetGroupByID(ctx context.Context, id uuid.UUID) (database.Group, error) { + start := time.Now() + group, err := m.s.GetGroupByID(ctx, id) + m.queryLatencies.WithLabelValues("GetGroupByID").Observe(time.Since(start).Seconds()) + return group, err +} + +func (m queryMetricsStore) GetGroupByOrgAndName(ctx context.Context, arg database.GetGroupByOrgAndNameParams) (database.Group, error) { + start := time.Now() + group, err := m.s.GetGroupByOrgAndName(ctx, arg) + m.queryLatencies.WithLabelValues("GetGroupByOrgAndName").Observe(time.Since(start).Seconds()) + return group, err +} + +func (m queryMetricsStore) GetGroupMembers(ctx context.Context) ([]database.GroupMember, error) { + start := time.Now() + r0, r1 := m.s.GetGroupMembers(ctx) + m.queryLatencies.WithLabelValues("GetGroupMembers").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetGroupMembersByGroupID(ctx context.Context, groupID uuid.UUID) ([]database.GroupMember, error) { + start := time.Now() + users, err := m.s.GetGroupMembersByGroupID(ctx, groupID) + m.queryLatencies.WithLabelValues("GetGroupMembersByGroupID").Observe(time.Since(start).Seconds()) + return users, err +} + +func (m queryMetricsStore) GetGroupMembersCountByGroupID(ctx context.Context, groupID uuid.UUID) (int64, error) { + start := time.Now() + r0, r1 := m.s.GetGroupMembersCountByGroupID(ctx, groupID) + m.queryLatencies.WithLabelValues("GetGroupMembersCountByGroupID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetGroups(ctx context.Context, arg database.GetGroupsParams) ([]database.GetGroupsRow, error) { + start := time.Now() + r0, r1 := m.s.GetGroups(ctx, arg) + m.queryLatencies.WithLabelValues("GetGroups").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetHealthSettings(ctx context.Context) (string, error) { + start := time.Now() + r0, r1 := m.s.GetHealthSettings(ctx) + m.queryLatencies.WithLabelValues("GetHealthSettings").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetHungProvisionerJobs(ctx context.Context, hungSince time.Time) ([]database.ProvisionerJob, error) { + start := time.Now() + jobs, err := m.s.GetHungProvisionerJobs(ctx, hungSince) + m.queryLatencies.WithLabelValues("GetHungProvisionerJobs").Observe(time.Since(start).Seconds()) + return jobs, err +} + +func (m queryMetricsStore) GetJFrogXrayScanByWorkspaceAndAgentID(ctx context.Context, arg database.GetJFrogXrayScanByWorkspaceAndAgentIDParams) (database.JfrogXrayScan, error) { + start := time.Now() + r0, r1 := m.s.GetJFrogXrayScanByWorkspaceAndAgentID(ctx, arg) + m.queryLatencies.WithLabelValues("GetJFrogXrayScanByWorkspaceAndAgentID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetLastUpdateCheck(ctx context.Context) (string, error) { + start := time.Now() + version, err := m.s.GetLastUpdateCheck(ctx) + m.queryLatencies.WithLabelValues("GetLastUpdateCheck").Observe(time.Since(start).Seconds()) + return version, err +} + +func (m queryMetricsStore) GetLatestCryptoKeyByFeature(ctx context.Context, feature database.CryptoKeyFeature) (database.CryptoKey, error) { + start := time.Now() + r0, r1 := m.s.GetLatestCryptoKeyByFeature(ctx, feature) + m.queryLatencies.WithLabelValues("GetLatestCryptoKeyByFeature").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetLatestWorkspaceBuildByWorkspaceID(ctx context.Context, workspaceID uuid.UUID) (database.WorkspaceBuild, error) { + start := time.Now() + build, err := m.s.GetLatestWorkspaceBuildByWorkspaceID(ctx, workspaceID) + m.queryLatencies.WithLabelValues("GetLatestWorkspaceBuildByWorkspaceID").Observe(time.Since(start).Seconds()) + return build, err +} + +func (m queryMetricsStore) GetLatestWorkspaceBuilds(ctx context.Context) ([]database.WorkspaceBuild, error) { + start := time.Now() + builds, err := m.s.GetLatestWorkspaceBuilds(ctx) + m.queryLatencies.WithLabelValues("GetLatestWorkspaceBuilds").Observe(time.Since(start).Seconds()) + return builds, err +} + +func (m queryMetricsStore) GetLatestWorkspaceBuildsByWorkspaceIDs(ctx context.Context, ids []uuid.UUID) ([]database.WorkspaceBuild, error) { + start := time.Now() + builds, err := m.s.GetLatestWorkspaceBuildsByWorkspaceIDs(ctx, ids) + m.queryLatencies.WithLabelValues("GetLatestWorkspaceBuildsByWorkspaceIDs").Observe(time.Since(start).Seconds()) + return builds, err +} + +func (m queryMetricsStore) GetLicenseByID(ctx context.Context, id int32) (database.License, error) { + start := time.Now() + license, err := m.s.GetLicenseByID(ctx, id) + m.queryLatencies.WithLabelValues("GetLicenseByID").Observe(time.Since(start).Seconds()) + return license, err +} + +func (m queryMetricsStore) GetLicenses(ctx context.Context) ([]database.License, error) { + start := time.Now() + licenses, err := m.s.GetLicenses(ctx) + m.queryLatencies.WithLabelValues("GetLicenses").Observe(time.Since(start).Seconds()) + return licenses, err +} + +func (m queryMetricsStore) GetLogoURL(ctx context.Context) (string, error) { + start := time.Now() + url, err := m.s.GetLogoURL(ctx) + m.queryLatencies.WithLabelValues("GetLogoURL").Observe(time.Since(start).Seconds()) + return url, err +} + +func (m queryMetricsStore) GetNotificationMessagesByStatus(ctx context.Context, arg database.GetNotificationMessagesByStatusParams) ([]database.NotificationMessage, error) { + start := time.Now() + r0, r1 := m.s.GetNotificationMessagesByStatus(ctx, arg) + m.queryLatencies.WithLabelValues("GetNotificationMessagesByStatus").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetNotificationReportGeneratorLogByTemplate(ctx context.Context, arg uuid.UUID) (database.NotificationReportGeneratorLog, error) { + start := time.Now() + r0, r1 := m.s.GetNotificationReportGeneratorLogByTemplate(ctx, arg) + m.queryLatencies.WithLabelValues("GetNotificationReportGeneratorLogByTemplate").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetNotificationTemplateByID(ctx context.Context, id uuid.UUID) (database.NotificationTemplate, error) { + start := time.Now() + r0, r1 := m.s.GetNotificationTemplateByID(ctx, id) + m.queryLatencies.WithLabelValues("GetNotificationTemplateByID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetNotificationTemplatesByKind(ctx context.Context, kind database.NotificationTemplateKind) ([]database.NotificationTemplate, error) { + start := time.Now() + r0, r1 := m.s.GetNotificationTemplatesByKind(ctx, kind) + m.queryLatencies.WithLabelValues("GetNotificationTemplatesByKind").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetNotificationsSettings(ctx context.Context) (string, error) { + start := time.Now() + r0, r1 := m.s.GetNotificationsSettings(ctx) + m.queryLatencies.WithLabelValues("GetNotificationsSettings").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetOAuth2ProviderAppByID(ctx context.Context, id uuid.UUID) (database.OAuth2ProviderApp, error) { + start := time.Now() + r0, r1 := m.s.GetOAuth2ProviderAppByID(ctx, id) + m.queryLatencies.WithLabelValues("GetOAuth2ProviderAppByID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetOAuth2ProviderAppCodeByID(ctx context.Context, id uuid.UUID) (database.OAuth2ProviderAppCode, error) { + start := time.Now() + r0, r1 := m.s.GetOAuth2ProviderAppCodeByID(ctx, id) + m.queryLatencies.WithLabelValues("GetOAuth2ProviderAppCodeByID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetOAuth2ProviderAppCodeByPrefix(ctx context.Context, secretPrefix []byte) (database.OAuth2ProviderAppCode, error) { + start := time.Now() + r0, r1 := m.s.GetOAuth2ProviderAppCodeByPrefix(ctx, secretPrefix) + m.queryLatencies.WithLabelValues("GetOAuth2ProviderAppCodeByPrefix").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetOAuth2ProviderAppSecretByID(ctx context.Context, id uuid.UUID) (database.OAuth2ProviderAppSecret, error) { + start := time.Now() + r0, r1 := m.s.GetOAuth2ProviderAppSecretByID(ctx, id) + m.queryLatencies.WithLabelValues("GetOAuth2ProviderAppSecretByID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetOAuth2ProviderAppSecretByPrefix(ctx context.Context, secretPrefix []byte) (database.OAuth2ProviderAppSecret, error) { + start := time.Now() + r0, r1 := m.s.GetOAuth2ProviderAppSecretByPrefix(ctx, secretPrefix) + m.queryLatencies.WithLabelValues("GetOAuth2ProviderAppSecretByPrefix").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetOAuth2ProviderAppSecretsByAppID(ctx context.Context, appID uuid.UUID) ([]database.OAuth2ProviderAppSecret, error) { + start := time.Now() + r0, r1 := m.s.GetOAuth2ProviderAppSecretsByAppID(ctx, appID) + m.queryLatencies.WithLabelValues("GetOAuth2ProviderAppSecretsByAppID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetOAuth2ProviderAppTokenByPrefix(ctx context.Context, hashPrefix []byte) (database.OAuth2ProviderAppToken, error) { + start := time.Now() + r0, r1 := m.s.GetOAuth2ProviderAppTokenByPrefix(ctx, hashPrefix) + m.queryLatencies.WithLabelValues("GetOAuth2ProviderAppTokenByPrefix").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetOAuth2ProviderApps(ctx context.Context) ([]database.OAuth2ProviderApp, error) { + start := time.Now() + r0, r1 := m.s.GetOAuth2ProviderApps(ctx) + m.queryLatencies.WithLabelValues("GetOAuth2ProviderApps").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetOAuth2ProviderAppsByUserID(ctx context.Context, userID uuid.UUID) ([]database.GetOAuth2ProviderAppsByUserIDRow, error) { + start := time.Now() + r0, r1 := m.s.GetOAuth2ProviderAppsByUserID(ctx, userID) + m.queryLatencies.WithLabelValues("GetOAuth2ProviderAppsByUserID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetOAuthSigningKey(ctx context.Context) (string, error) { + start := time.Now() + r0, r1 := m.s.GetOAuthSigningKey(ctx) + m.queryLatencies.WithLabelValues("GetOAuthSigningKey").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetOrganizationByID(ctx context.Context, id uuid.UUID) (database.Organization, error) { + start := time.Now() + organization, err := m.s.GetOrganizationByID(ctx, id) + m.queryLatencies.WithLabelValues("GetOrganizationByID").Observe(time.Since(start).Seconds()) + return organization, err +} + +func (m queryMetricsStore) GetOrganizationByName(ctx context.Context, name string) (database.Organization, error) { + start := time.Now() + organization, err := m.s.GetOrganizationByName(ctx, name) + m.queryLatencies.WithLabelValues("GetOrganizationByName").Observe(time.Since(start).Seconds()) + return organization, err +} + +func (m queryMetricsStore) GetOrganizationIDsByMemberIDs(ctx context.Context, ids []uuid.UUID) ([]database.GetOrganizationIDsByMemberIDsRow, error) { + start := time.Now() + organizations, err := m.s.GetOrganizationIDsByMemberIDs(ctx, ids) + m.queryLatencies.WithLabelValues("GetOrganizationIDsByMemberIDs").Observe(time.Since(start).Seconds()) + return organizations, err +} + +func (m queryMetricsStore) GetOrganizations(ctx context.Context, args database.GetOrganizationsParams) ([]database.Organization, error) { + start := time.Now() + organizations, err := m.s.GetOrganizations(ctx, args) + m.queryLatencies.WithLabelValues("GetOrganizations").Observe(time.Since(start).Seconds()) + return organizations, err +} + +func (m queryMetricsStore) GetOrganizationsByUserID(ctx context.Context, userID uuid.UUID) ([]database.Organization, error) { + start := time.Now() + organizations, err := m.s.GetOrganizationsByUserID(ctx, userID) + m.queryLatencies.WithLabelValues("GetOrganizationsByUserID").Observe(time.Since(start).Seconds()) + return organizations, err +} + +func (m queryMetricsStore) GetParameterSchemasByJobID(ctx context.Context, jobID uuid.UUID) ([]database.ParameterSchema, error) { + start := time.Now() + schemas, err := m.s.GetParameterSchemasByJobID(ctx, jobID) + m.queryLatencies.WithLabelValues("GetParameterSchemasByJobID").Observe(time.Since(start).Seconds()) + return schemas, err +} + +func (m queryMetricsStore) GetPreviousTemplateVersion(ctx context.Context, arg database.GetPreviousTemplateVersionParams) (database.TemplateVersion, error) { + start := time.Now() + version, err := m.s.GetPreviousTemplateVersion(ctx, arg) + m.queryLatencies.WithLabelValues("GetPreviousTemplateVersion").Observe(time.Since(start).Seconds()) + return version, err +} + +func (m queryMetricsStore) GetProvisionerDaemons(ctx context.Context) ([]database.ProvisionerDaemon, error) { + start := time.Now() + daemons, err := m.s.GetProvisionerDaemons(ctx) + m.queryLatencies.WithLabelValues("GetProvisionerDaemons").Observe(time.Since(start).Seconds()) + return daemons, err +} + +func (m queryMetricsStore) GetProvisionerDaemonsByOrganization(ctx context.Context, organizationID uuid.UUID) ([]database.ProvisionerDaemon, error) { + start := time.Now() + r0, r1 := m.s.GetProvisionerDaemonsByOrganization(ctx, organizationID) + m.queryLatencies.WithLabelValues("GetProvisionerDaemonsByOrganization").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetProvisionerJobByID(ctx context.Context, id uuid.UUID) (database.ProvisionerJob, error) { + start := time.Now() + job, err := m.s.GetProvisionerJobByID(ctx, id) + m.queryLatencies.WithLabelValues("GetProvisionerJobByID").Observe(time.Since(start).Seconds()) + return job, err +} + +func (m queryMetricsStore) GetProvisionerJobTimingsByJobID(ctx context.Context, jobID uuid.UUID) ([]database.ProvisionerJobTiming, error) { + start := time.Now() + r0, r1 := m.s.GetProvisionerJobTimingsByJobID(ctx, jobID) + m.queryLatencies.WithLabelValues("GetProvisionerJobTimingsByJobID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetProvisionerJobsByIDs(ctx context.Context, ids []uuid.UUID) ([]database.ProvisionerJob, error) { + start := time.Now() + jobs, err := m.s.GetProvisionerJobsByIDs(ctx, ids) + m.queryLatencies.WithLabelValues("GetProvisionerJobsByIDs").Observe(time.Since(start).Seconds()) + return jobs, err +} + +func (m queryMetricsStore) GetProvisionerJobsByIDsWithQueuePosition(ctx context.Context, ids []uuid.UUID) ([]database.GetProvisionerJobsByIDsWithQueuePositionRow, error) { + start := time.Now() + r0, r1 := m.s.GetProvisionerJobsByIDsWithQueuePosition(ctx, ids) + m.queryLatencies.WithLabelValues("GetProvisionerJobsByIDsWithQueuePosition").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetProvisionerJobsCreatedAfter(ctx context.Context, createdAt time.Time) ([]database.ProvisionerJob, error) { + start := time.Now() + jobs, err := m.s.GetProvisionerJobsCreatedAfter(ctx, createdAt) + m.queryLatencies.WithLabelValues("GetProvisionerJobsCreatedAfter").Observe(time.Since(start).Seconds()) + return jobs, err +} + +func (m queryMetricsStore) GetProvisionerKeyByHashedSecret(ctx context.Context, hashedSecret []byte) (database.ProvisionerKey, error) { + start := time.Now() + r0, r1 := m.s.GetProvisionerKeyByHashedSecret(ctx, hashedSecret) + m.queryLatencies.WithLabelValues("GetProvisionerKeyByHashedSecret").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetProvisionerKeyByID(ctx context.Context, id uuid.UUID) (database.ProvisionerKey, error) { + start := time.Now() + r0, r1 := m.s.GetProvisionerKeyByID(ctx, id) + m.queryLatencies.WithLabelValues("GetProvisionerKeyByID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetProvisionerKeyByName(ctx context.Context, name database.GetProvisionerKeyByNameParams) (database.ProvisionerKey, error) { + start := time.Now() + r0, r1 := m.s.GetProvisionerKeyByName(ctx, name) + m.queryLatencies.WithLabelValues("GetProvisionerKeyByName").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetProvisionerLogsAfterID(ctx context.Context, arg database.GetProvisionerLogsAfterIDParams) ([]database.ProvisionerJobLog, error) { + start := time.Now() + logs, err := m.s.GetProvisionerLogsAfterID(ctx, arg) + m.queryLatencies.WithLabelValues("GetProvisionerLogsAfterID").Observe(time.Since(start).Seconds()) + return logs, err +} + +func (m queryMetricsStore) GetQuotaAllowanceForUser(ctx context.Context, userID database.GetQuotaAllowanceForUserParams) (int64, error) { + start := time.Now() + allowance, err := m.s.GetQuotaAllowanceForUser(ctx, userID) + m.queryLatencies.WithLabelValues("GetQuotaAllowanceForUser").Observe(time.Since(start).Seconds()) + return allowance, err +} + +func (m queryMetricsStore) GetQuotaConsumedForUser(ctx context.Context, ownerID database.GetQuotaConsumedForUserParams) (int64, error) { + start := time.Now() + consumed, err := m.s.GetQuotaConsumedForUser(ctx, ownerID) + m.queryLatencies.WithLabelValues("GetQuotaConsumedForUser").Observe(time.Since(start).Seconds()) + return consumed, err +} + +func (m queryMetricsStore) GetReplicaByID(ctx context.Context, id uuid.UUID) (database.Replica, error) { + start := time.Now() + replica, err := m.s.GetReplicaByID(ctx, id) + m.queryLatencies.WithLabelValues("GetReplicaByID").Observe(time.Since(start).Seconds()) + return replica, err +} + +func (m queryMetricsStore) GetReplicasUpdatedAfter(ctx context.Context, updatedAt time.Time) ([]database.Replica, error) { + start := time.Now() + replicas, err := m.s.GetReplicasUpdatedAfter(ctx, updatedAt) + m.queryLatencies.WithLabelValues("GetReplicasUpdatedAfter").Observe(time.Since(start).Seconds()) + return replicas, err +} + +func (m queryMetricsStore) GetRuntimeConfig(ctx context.Context, key string) (string, error) { + start := time.Now() + r0, r1 := m.s.GetRuntimeConfig(ctx, key) + m.queryLatencies.WithLabelValues("GetRuntimeConfig").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetTailnetAgents(ctx context.Context, id uuid.UUID) ([]database.TailnetAgent, error) { + start := time.Now() + r0, r1 := m.s.GetTailnetAgents(ctx, id) + m.queryLatencies.WithLabelValues("GetTailnetAgents").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetTailnetClientsForAgent(ctx context.Context, agentID uuid.UUID) ([]database.TailnetClient, error) { + start := time.Now() + r0, r1 := m.s.GetTailnetClientsForAgent(ctx, agentID) + m.queryLatencies.WithLabelValues("GetTailnetClientsForAgent").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetTailnetPeers(ctx context.Context, id uuid.UUID) ([]database.TailnetPeer, error) { + start := time.Now() + r0, r1 := m.s.GetTailnetPeers(ctx, id) + m.queryLatencies.WithLabelValues("GetTailnetPeers").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetTailnetTunnelPeerBindings(ctx context.Context, srcID uuid.UUID) ([]database.GetTailnetTunnelPeerBindingsRow, error) { + start := time.Now() + r0, r1 := m.s.GetTailnetTunnelPeerBindings(ctx, srcID) + m.queryLatencies.WithLabelValues("GetTailnetTunnelPeerBindings").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetTailnetTunnelPeerIDs(ctx context.Context, srcID uuid.UUID) ([]database.GetTailnetTunnelPeerIDsRow, error) { + start := time.Now() + r0, r1 := m.s.GetTailnetTunnelPeerIDs(ctx, srcID) + m.queryLatencies.WithLabelValues("GetTailnetTunnelPeerIDs").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetTemplateAppInsights(ctx context.Context, arg database.GetTemplateAppInsightsParams) ([]database.GetTemplateAppInsightsRow, error) { + start := time.Now() + r0, r1 := m.s.GetTemplateAppInsights(ctx, arg) + m.queryLatencies.WithLabelValues("GetTemplateAppInsights").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetTemplateAppInsightsByTemplate(ctx context.Context, arg database.GetTemplateAppInsightsByTemplateParams) ([]database.GetTemplateAppInsightsByTemplateRow, error) { + start := time.Now() + r0, r1 := m.s.GetTemplateAppInsightsByTemplate(ctx, arg) + m.queryLatencies.WithLabelValues("GetTemplateAppInsightsByTemplate").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetTemplateAverageBuildTime(ctx context.Context, arg database.GetTemplateAverageBuildTimeParams) (database.GetTemplateAverageBuildTimeRow, error) { + start := time.Now() + buildTime, err := m.s.GetTemplateAverageBuildTime(ctx, arg) + m.queryLatencies.WithLabelValues("GetTemplateAverageBuildTime").Observe(time.Since(start).Seconds()) + return buildTime, err +} + +func (m queryMetricsStore) GetTemplateByID(ctx context.Context, id uuid.UUID) (database.Template, error) { + start := time.Now() + template, err := m.s.GetTemplateByID(ctx, id) + m.queryLatencies.WithLabelValues("GetTemplateByID").Observe(time.Since(start).Seconds()) + return template, err +} + +func (m queryMetricsStore) GetTemplateByOrganizationAndName(ctx context.Context, arg database.GetTemplateByOrganizationAndNameParams) (database.Template, error) { + start := time.Now() + template, err := m.s.GetTemplateByOrganizationAndName(ctx, arg) + m.queryLatencies.WithLabelValues("GetTemplateByOrganizationAndName").Observe(time.Since(start).Seconds()) + return template, err +} + +func (m queryMetricsStore) GetTemplateDAUs(ctx context.Context, arg database.GetTemplateDAUsParams) ([]database.GetTemplateDAUsRow, error) { + start := time.Now() + daus, err := m.s.GetTemplateDAUs(ctx, arg) + m.queryLatencies.WithLabelValues("GetTemplateDAUs").Observe(time.Since(start).Seconds()) + return daus, err +} + +func (m queryMetricsStore) GetTemplateInsights(ctx context.Context, arg database.GetTemplateInsightsParams) (database.GetTemplateInsightsRow, error) { + start := time.Now() + r0, r1 := m.s.GetTemplateInsights(ctx, arg) + m.queryLatencies.WithLabelValues("GetTemplateInsights").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetTemplateInsightsByInterval(ctx context.Context, arg database.GetTemplateInsightsByIntervalParams) ([]database.GetTemplateInsightsByIntervalRow, error) { + start := time.Now() + r0, r1 := m.s.GetTemplateInsightsByInterval(ctx, arg) + m.queryLatencies.WithLabelValues("GetTemplateInsightsByInterval").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetTemplateInsightsByTemplate(ctx context.Context, arg database.GetTemplateInsightsByTemplateParams) ([]database.GetTemplateInsightsByTemplateRow, error) { + start := time.Now() + r0, r1 := m.s.GetTemplateInsightsByTemplate(ctx, arg) + m.queryLatencies.WithLabelValues("GetTemplateInsightsByTemplate").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetTemplateParameterInsights(ctx context.Context, arg database.GetTemplateParameterInsightsParams) ([]database.GetTemplateParameterInsightsRow, error) { + start := time.Now() + r0, r1 := m.s.GetTemplateParameterInsights(ctx, arg) + m.queryLatencies.WithLabelValues("GetTemplateParameterInsights").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetTemplateUsageStats(ctx context.Context, arg database.GetTemplateUsageStatsParams) ([]database.TemplateUsageStat, error) { + start := time.Now() + r0, r1 := m.s.GetTemplateUsageStats(ctx, arg) + m.queryLatencies.WithLabelValues("GetTemplateUsageStats").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetTemplateVersionByID(ctx context.Context, id uuid.UUID) (database.TemplateVersion, error) { + start := time.Now() + version, err := m.s.GetTemplateVersionByID(ctx, id) + m.queryLatencies.WithLabelValues("GetTemplateVersionByID").Observe(time.Since(start).Seconds()) + return version, err +} + +func (m queryMetricsStore) GetTemplateVersionByJobID(ctx context.Context, jobID uuid.UUID) (database.TemplateVersion, error) { + start := time.Now() + version, err := m.s.GetTemplateVersionByJobID(ctx, jobID) + m.queryLatencies.WithLabelValues("GetTemplateVersionByJobID").Observe(time.Since(start).Seconds()) + return version, err +} + +func (m queryMetricsStore) GetTemplateVersionByTemplateIDAndName(ctx context.Context, arg database.GetTemplateVersionByTemplateIDAndNameParams) (database.TemplateVersion, error) { + start := time.Now() + version, err := m.s.GetTemplateVersionByTemplateIDAndName(ctx, arg) + m.queryLatencies.WithLabelValues("GetTemplateVersionByTemplateIDAndName").Observe(time.Since(start).Seconds()) + return version, err +} + +func (m queryMetricsStore) GetTemplateVersionParameters(ctx context.Context, templateVersionID uuid.UUID) ([]database.TemplateVersionParameter, error) { + start := time.Now() + parameters, err := m.s.GetTemplateVersionParameters(ctx, templateVersionID) + m.queryLatencies.WithLabelValues("GetTemplateVersionParameters").Observe(time.Since(start).Seconds()) + return parameters, err +} + +func (m queryMetricsStore) GetTemplateVersionVariables(ctx context.Context, templateVersionID uuid.UUID) ([]database.TemplateVersionVariable, error) { + start := time.Now() + variables, err := m.s.GetTemplateVersionVariables(ctx, templateVersionID) + m.queryLatencies.WithLabelValues("GetTemplateVersionVariables").Observe(time.Since(start).Seconds()) + return variables, err +} + +func (m queryMetricsStore) GetTemplateVersionWorkspaceTags(ctx context.Context, templateVersionID uuid.UUID) ([]database.TemplateVersionWorkspaceTag, error) { + start := time.Now() + r0, r1 := m.s.GetTemplateVersionWorkspaceTags(ctx, templateVersionID) + m.queryLatencies.WithLabelValues("GetTemplateVersionWorkspaceTags").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetTemplateVersionsByIDs(ctx context.Context, ids []uuid.UUID) ([]database.TemplateVersion, error) { + start := time.Now() + versions, err := m.s.GetTemplateVersionsByIDs(ctx, ids) + m.queryLatencies.WithLabelValues("GetTemplateVersionsByIDs").Observe(time.Since(start).Seconds()) + return versions, err +} + +func (m queryMetricsStore) GetTemplateVersionsByTemplateID(ctx context.Context, arg database.GetTemplateVersionsByTemplateIDParams) ([]database.TemplateVersion, error) { + start := time.Now() + versions, err := m.s.GetTemplateVersionsByTemplateID(ctx, arg) + m.queryLatencies.WithLabelValues("GetTemplateVersionsByTemplateID").Observe(time.Since(start).Seconds()) + return versions, err +} + +func (m queryMetricsStore) GetTemplateVersionsCreatedAfter(ctx context.Context, createdAt time.Time) ([]database.TemplateVersion, error) { + start := time.Now() + versions, err := m.s.GetTemplateVersionsCreatedAfter(ctx, createdAt) + m.queryLatencies.WithLabelValues("GetTemplateVersionsCreatedAfter").Observe(time.Since(start).Seconds()) + return versions, err +} + +func (m queryMetricsStore) GetTemplates(ctx context.Context) ([]database.Template, error) { + start := time.Now() + templates, err := m.s.GetTemplates(ctx) + m.queryLatencies.WithLabelValues("GetTemplates").Observe(time.Since(start).Seconds()) + return templates, err +} + +func (m queryMetricsStore) GetTemplatesWithFilter(ctx context.Context, arg database.GetTemplatesWithFilterParams) ([]database.Template, error) { + start := time.Now() + templates, err := m.s.GetTemplatesWithFilter(ctx, arg) + m.queryLatencies.WithLabelValues("GetTemplatesWithFilter").Observe(time.Since(start).Seconds()) + return templates, err +} + +func (m queryMetricsStore) GetUnexpiredLicenses(ctx context.Context) ([]database.License, error) { + start := time.Now() + licenses, err := m.s.GetUnexpiredLicenses(ctx) + m.queryLatencies.WithLabelValues("GetUnexpiredLicenses").Observe(time.Since(start).Seconds()) + return licenses, err +} + +func (m queryMetricsStore) GetUserActivityInsights(ctx context.Context, arg database.GetUserActivityInsightsParams) ([]database.GetUserActivityInsightsRow, error) { + start := time.Now() + r0, r1 := m.s.GetUserActivityInsights(ctx, arg) + m.queryLatencies.WithLabelValues("GetUserActivityInsights").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetUserByEmailOrUsername(ctx context.Context, arg database.GetUserByEmailOrUsernameParams) (database.User, error) { + start := time.Now() + user, err := m.s.GetUserByEmailOrUsername(ctx, arg) + m.queryLatencies.WithLabelValues("GetUserByEmailOrUsername").Observe(time.Since(start).Seconds()) + return user, err +} + +func (m queryMetricsStore) GetUserByID(ctx context.Context, id uuid.UUID) (database.User, error) { + start := time.Now() + user, err := m.s.GetUserByID(ctx, id) + m.queryLatencies.WithLabelValues("GetUserByID").Observe(time.Since(start).Seconds()) + return user, err +} + +func (m queryMetricsStore) GetUserCount(ctx context.Context) (int64, error) { + start := time.Now() + count, err := m.s.GetUserCount(ctx) + m.queryLatencies.WithLabelValues("GetUserCount").Observe(time.Since(start).Seconds()) + return count, err +} + +func (m queryMetricsStore) GetUserLatencyInsights(ctx context.Context, arg database.GetUserLatencyInsightsParams) ([]database.GetUserLatencyInsightsRow, error) { + start := time.Now() + r0, r1 := m.s.GetUserLatencyInsights(ctx, arg) + m.queryLatencies.WithLabelValues("GetUserLatencyInsights").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetUserLinkByLinkedID(ctx context.Context, linkedID string) (database.UserLink, error) { + start := time.Now() + link, err := m.s.GetUserLinkByLinkedID(ctx, linkedID) + m.queryLatencies.WithLabelValues("GetUserLinkByLinkedID").Observe(time.Since(start).Seconds()) + return link, err +} + +func (m queryMetricsStore) GetUserLinkByUserIDLoginType(ctx context.Context, arg database.GetUserLinkByUserIDLoginTypeParams) (database.UserLink, error) { + start := time.Now() + link, err := m.s.GetUserLinkByUserIDLoginType(ctx, arg) + m.queryLatencies.WithLabelValues("GetUserLinkByUserIDLoginType").Observe(time.Since(start).Seconds()) + return link, err +} + +func (m queryMetricsStore) GetUserLinksByUserID(ctx context.Context, userID uuid.UUID) ([]database.UserLink, error) { + start := time.Now() + r0, r1 := m.s.GetUserLinksByUserID(ctx, userID) + m.queryLatencies.WithLabelValues("GetUserLinksByUserID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetUserNotificationPreferences(ctx context.Context, userID uuid.UUID) ([]database.NotificationPreference, error) { + start := time.Now() + r0, r1 := m.s.GetUserNotificationPreferences(ctx, userID) + m.queryLatencies.WithLabelValues("GetUserNotificationPreferences").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetUserWorkspaceBuildParameters(ctx context.Context, ownerID database.GetUserWorkspaceBuildParametersParams) ([]database.GetUserWorkspaceBuildParametersRow, error) { + start := time.Now() + r0, r1 := m.s.GetUserWorkspaceBuildParameters(ctx, ownerID) + m.queryLatencies.WithLabelValues("GetUserWorkspaceBuildParameters").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetUsers(ctx context.Context, arg database.GetUsersParams) ([]database.GetUsersRow, error) { + start := time.Now() + users, err := m.s.GetUsers(ctx, arg) + m.queryLatencies.WithLabelValues("GetUsers").Observe(time.Since(start).Seconds()) + return users, err +} + +func (m queryMetricsStore) GetUsersByIDs(ctx context.Context, ids []uuid.UUID) ([]database.User, error) { + start := time.Now() + users, err := m.s.GetUsersByIDs(ctx, ids) + m.queryLatencies.WithLabelValues("GetUsersByIDs").Observe(time.Since(start).Seconds()) + return users, err +} + +func (m queryMetricsStore) GetWorkspaceAgentAndLatestBuildByAuthToken(ctx context.Context, authToken uuid.UUID) (database.GetWorkspaceAgentAndLatestBuildByAuthTokenRow, error) { + start := time.Now() + r0, r1 := m.s.GetWorkspaceAgentAndLatestBuildByAuthToken(ctx, authToken) + m.queryLatencies.WithLabelValues("GetWorkspaceAgentAndLatestBuildByAuthToken").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetWorkspaceAgentByID(ctx context.Context, id uuid.UUID) (database.WorkspaceAgent, error) { + start := time.Now() + agent, err := m.s.GetWorkspaceAgentByID(ctx, id) + m.queryLatencies.WithLabelValues("GetWorkspaceAgentByID").Observe(time.Since(start).Seconds()) + return agent, err +} + +func (m queryMetricsStore) GetWorkspaceAgentByInstanceID(ctx context.Context, authInstanceID string) (database.WorkspaceAgent, error) { + start := time.Now() + agent, err := m.s.GetWorkspaceAgentByInstanceID(ctx, authInstanceID) + m.queryLatencies.WithLabelValues("GetWorkspaceAgentByInstanceID").Observe(time.Since(start).Seconds()) + return agent, err +} + +func (m queryMetricsStore) GetWorkspaceAgentLifecycleStateByID(ctx context.Context, id uuid.UUID) (database.GetWorkspaceAgentLifecycleStateByIDRow, error) { + start := time.Now() + r0, r1 := m.s.GetWorkspaceAgentLifecycleStateByID(ctx, id) + m.queryLatencies.WithLabelValues("GetWorkspaceAgentLifecycleStateByID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetWorkspaceAgentLogSourcesByAgentIDs(ctx context.Context, ids []uuid.UUID) ([]database.WorkspaceAgentLogSource, error) { + start := time.Now() + r0, r1 := m.s.GetWorkspaceAgentLogSourcesByAgentIDs(ctx, ids) + m.queryLatencies.WithLabelValues("GetWorkspaceAgentLogSourcesByAgentIDs").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetWorkspaceAgentLogsAfter(ctx context.Context, arg database.GetWorkspaceAgentLogsAfterParams) ([]database.WorkspaceAgentLog, error) { + start := time.Now() + r0, r1 := m.s.GetWorkspaceAgentLogsAfter(ctx, arg) + m.queryLatencies.WithLabelValues("GetWorkspaceAgentLogsAfter").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetWorkspaceAgentMetadata(ctx context.Context, workspaceAgentID database.GetWorkspaceAgentMetadataParams) ([]database.WorkspaceAgentMetadatum, error) { + start := time.Now() + metadata, err := m.s.GetWorkspaceAgentMetadata(ctx, workspaceAgentID) + m.queryLatencies.WithLabelValues("GetWorkspaceAgentMetadata").Observe(time.Since(start).Seconds()) + return metadata, err +} + +func (m queryMetricsStore) GetWorkspaceAgentPortShare(ctx context.Context, arg database.GetWorkspaceAgentPortShareParams) (database.WorkspaceAgentPortShare, error) { + start := time.Now() + r0, r1 := m.s.GetWorkspaceAgentPortShare(ctx, arg) + m.queryLatencies.WithLabelValues("GetWorkspaceAgentPortShare").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetWorkspaceAgentScriptTimingsByBuildID(ctx context.Context, id uuid.UUID) ([]database.GetWorkspaceAgentScriptTimingsByBuildIDRow, error) { + start := time.Now() + r0, r1 := m.s.GetWorkspaceAgentScriptTimingsByBuildID(ctx, id) + m.queryLatencies.WithLabelValues("GetWorkspaceAgentScriptTimingsByBuildID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetWorkspaceAgentScriptsByAgentIDs(ctx context.Context, ids []uuid.UUID) ([]database.WorkspaceAgentScript, error) { + start := time.Now() + r0, r1 := m.s.GetWorkspaceAgentScriptsByAgentIDs(ctx, ids) + m.queryLatencies.WithLabelValues("GetWorkspaceAgentScriptsByAgentIDs").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetWorkspaceAgentStats(ctx context.Context, createdAt time.Time) ([]database.GetWorkspaceAgentStatsRow, error) { + start := time.Now() + stats, err := m.s.GetWorkspaceAgentStats(ctx, createdAt) + m.queryLatencies.WithLabelValues("GetWorkspaceAgentStats").Observe(time.Since(start).Seconds()) + return stats, err +} + +func (m queryMetricsStore) GetWorkspaceAgentStatsAndLabels(ctx context.Context, createdAt time.Time) ([]database.GetWorkspaceAgentStatsAndLabelsRow, error) { + start := time.Now() + stats, err := m.s.GetWorkspaceAgentStatsAndLabels(ctx, createdAt) + m.queryLatencies.WithLabelValues("GetWorkspaceAgentStatsAndLabels").Observe(time.Since(start).Seconds()) + return stats, err +} + +func (m queryMetricsStore) GetWorkspaceAgentUsageStats(ctx context.Context, createdAt time.Time) ([]database.GetWorkspaceAgentUsageStatsRow, error) { + start := time.Now() + r0, r1 := m.s.GetWorkspaceAgentUsageStats(ctx, createdAt) + m.queryLatencies.WithLabelValues("GetWorkspaceAgentUsageStats").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetWorkspaceAgentUsageStatsAndLabels(ctx context.Context, createdAt time.Time) ([]database.GetWorkspaceAgentUsageStatsAndLabelsRow, error) { + start := time.Now() + r0, r1 := m.s.GetWorkspaceAgentUsageStatsAndLabels(ctx, createdAt) + m.queryLatencies.WithLabelValues("GetWorkspaceAgentUsageStatsAndLabels").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetWorkspaceAgentsByResourceIDs(ctx context.Context, ids []uuid.UUID) ([]database.WorkspaceAgent, error) { + start := time.Now() + agents, err := m.s.GetWorkspaceAgentsByResourceIDs(ctx, ids) + m.queryLatencies.WithLabelValues("GetWorkspaceAgentsByResourceIDs").Observe(time.Since(start).Seconds()) + return agents, err +} + +func (m queryMetricsStore) GetWorkspaceAgentsCreatedAfter(ctx context.Context, createdAt time.Time) ([]database.WorkspaceAgent, error) { + start := time.Now() + agents, err := m.s.GetWorkspaceAgentsCreatedAfter(ctx, createdAt) + m.queryLatencies.WithLabelValues("GetWorkspaceAgentsCreatedAfter").Observe(time.Since(start).Seconds()) + return agents, err +} + +func (m queryMetricsStore) GetWorkspaceAgentsInLatestBuildByWorkspaceID(ctx context.Context, workspaceID uuid.UUID) ([]database.WorkspaceAgent, error) { + start := time.Now() + agents, err := m.s.GetWorkspaceAgentsInLatestBuildByWorkspaceID(ctx, workspaceID) + m.queryLatencies.WithLabelValues("GetWorkspaceAgentsInLatestBuildByWorkspaceID").Observe(time.Since(start).Seconds()) + return agents, err +} + +func (m queryMetricsStore) GetWorkspaceAppByAgentIDAndSlug(ctx context.Context, arg database.GetWorkspaceAppByAgentIDAndSlugParams) (database.WorkspaceApp, error) { + start := time.Now() + app, err := m.s.GetWorkspaceAppByAgentIDAndSlug(ctx, arg) + m.queryLatencies.WithLabelValues("GetWorkspaceAppByAgentIDAndSlug").Observe(time.Since(start).Seconds()) + return app, err +} + +func (m queryMetricsStore) GetWorkspaceAppsByAgentID(ctx context.Context, agentID uuid.UUID) ([]database.WorkspaceApp, error) { + start := time.Now() + apps, err := m.s.GetWorkspaceAppsByAgentID(ctx, agentID) + m.queryLatencies.WithLabelValues("GetWorkspaceAppsByAgentID").Observe(time.Since(start).Seconds()) + return apps, err +} + +func (m queryMetricsStore) GetWorkspaceAppsByAgentIDs(ctx context.Context, ids []uuid.UUID) ([]database.WorkspaceApp, error) { + start := time.Now() + apps, err := m.s.GetWorkspaceAppsByAgentIDs(ctx, ids) + m.queryLatencies.WithLabelValues("GetWorkspaceAppsByAgentIDs").Observe(time.Since(start).Seconds()) + return apps, err +} + +func (m queryMetricsStore) GetWorkspaceAppsCreatedAfter(ctx context.Context, createdAt time.Time) ([]database.WorkspaceApp, error) { + start := time.Now() + apps, err := m.s.GetWorkspaceAppsCreatedAfter(ctx, createdAt) + m.queryLatencies.WithLabelValues("GetWorkspaceAppsCreatedAfter").Observe(time.Since(start).Seconds()) + return apps, err +} + +func (m queryMetricsStore) GetWorkspaceBuildByID(ctx context.Context, id uuid.UUID) (database.WorkspaceBuild, error) { + start := time.Now() + build, err := m.s.GetWorkspaceBuildByID(ctx, id) + m.queryLatencies.WithLabelValues("GetWorkspaceBuildByID").Observe(time.Since(start).Seconds()) + return build, err +} + +func (m queryMetricsStore) GetWorkspaceBuildByJobID(ctx context.Context, jobID uuid.UUID) (database.WorkspaceBuild, error) { + start := time.Now() + build, err := m.s.GetWorkspaceBuildByJobID(ctx, jobID) + m.queryLatencies.WithLabelValues("GetWorkspaceBuildByJobID").Observe(time.Since(start).Seconds()) + return build, err +} + +func (m queryMetricsStore) GetWorkspaceBuildByWorkspaceIDAndBuildNumber(ctx context.Context, arg database.GetWorkspaceBuildByWorkspaceIDAndBuildNumberParams) (database.WorkspaceBuild, error) { + start := time.Now() + build, err := m.s.GetWorkspaceBuildByWorkspaceIDAndBuildNumber(ctx, arg) + m.queryLatencies.WithLabelValues("GetWorkspaceBuildByWorkspaceIDAndBuildNumber").Observe(time.Since(start).Seconds()) + return build, err +} + +func (m queryMetricsStore) GetWorkspaceBuildParameters(ctx context.Context, workspaceBuildID uuid.UUID) ([]database.WorkspaceBuildParameter, error) { + start := time.Now() + params, err := m.s.GetWorkspaceBuildParameters(ctx, workspaceBuildID) + m.queryLatencies.WithLabelValues("GetWorkspaceBuildParameters").Observe(time.Since(start).Seconds()) + return params, err +} + +func (m queryMetricsStore) GetWorkspaceBuildStatsByTemplates(ctx context.Context, since time.Time) ([]database.GetWorkspaceBuildStatsByTemplatesRow, error) { + start := time.Now() + r0, r1 := m.s.GetWorkspaceBuildStatsByTemplates(ctx, since) + m.queryLatencies.WithLabelValues("GetWorkspaceBuildStatsByTemplates").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetWorkspaceBuildsByWorkspaceID(ctx context.Context, arg database.GetWorkspaceBuildsByWorkspaceIDParams) ([]database.WorkspaceBuild, error) { + start := time.Now() + builds, err := m.s.GetWorkspaceBuildsByWorkspaceID(ctx, arg) + m.queryLatencies.WithLabelValues("GetWorkspaceBuildsByWorkspaceID").Observe(time.Since(start).Seconds()) + return builds, err +} + +func (m queryMetricsStore) GetWorkspaceBuildsCreatedAfter(ctx context.Context, createdAt time.Time) ([]database.WorkspaceBuild, error) { + start := time.Now() + builds, err := m.s.GetWorkspaceBuildsCreatedAfter(ctx, createdAt) + m.queryLatencies.WithLabelValues("GetWorkspaceBuildsCreatedAfter").Observe(time.Since(start).Seconds()) + return builds, err +} + +func (m queryMetricsStore) GetWorkspaceByAgentID(ctx context.Context, agentID uuid.UUID) (database.Workspace, error) { + start := time.Now() + workspace, err := m.s.GetWorkspaceByAgentID(ctx, agentID) + m.queryLatencies.WithLabelValues("GetWorkspaceByAgentID").Observe(time.Since(start).Seconds()) + return workspace, err +} + +func (m queryMetricsStore) GetWorkspaceByID(ctx context.Context, id uuid.UUID) (database.Workspace, error) { + start := time.Now() + workspace, err := m.s.GetWorkspaceByID(ctx, id) + m.queryLatencies.WithLabelValues("GetWorkspaceByID").Observe(time.Since(start).Seconds()) + return workspace, err +} + +func (m queryMetricsStore) GetWorkspaceByOwnerIDAndName(ctx context.Context, arg database.GetWorkspaceByOwnerIDAndNameParams) (database.Workspace, error) { + start := time.Now() + workspace, err := m.s.GetWorkspaceByOwnerIDAndName(ctx, arg) + m.queryLatencies.WithLabelValues("GetWorkspaceByOwnerIDAndName").Observe(time.Since(start).Seconds()) + return workspace, err +} + +func (m queryMetricsStore) GetWorkspaceByWorkspaceAppID(ctx context.Context, workspaceAppID uuid.UUID) (database.Workspace, error) { + start := time.Now() + workspace, err := m.s.GetWorkspaceByWorkspaceAppID(ctx, workspaceAppID) + m.queryLatencies.WithLabelValues("GetWorkspaceByWorkspaceAppID").Observe(time.Since(start).Seconds()) + return workspace, err +} + +func (m queryMetricsStore) GetWorkspaceProxies(ctx context.Context) ([]database.WorkspaceProxy, error) { + start := time.Now() + proxies, err := m.s.GetWorkspaceProxies(ctx) + m.queryLatencies.WithLabelValues("GetWorkspaceProxies").Observe(time.Since(start).Seconds()) + return proxies, err +} + +func (m queryMetricsStore) GetWorkspaceProxyByHostname(ctx context.Context, arg database.GetWorkspaceProxyByHostnameParams) (database.WorkspaceProxy, error) { + start := time.Now() + proxy, err := m.s.GetWorkspaceProxyByHostname(ctx, arg) + m.queryLatencies.WithLabelValues("GetWorkspaceProxyByHostname").Observe(time.Since(start).Seconds()) + return proxy, err +} + +func (m queryMetricsStore) GetWorkspaceProxyByID(ctx context.Context, id uuid.UUID) (database.WorkspaceProxy, error) { + start := time.Now() + proxy, err := m.s.GetWorkspaceProxyByID(ctx, id) + m.queryLatencies.WithLabelValues("GetWorkspaceProxyByID").Observe(time.Since(start).Seconds()) + return proxy, err +} + +func (m queryMetricsStore) GetWorkspaceProxyByName(ctx context.Context, name string) (database.WorkspaceProxy, error) { + start := time.Now() + proxy, err := m.s.GetWorkspaceProxyByName(ctx, name) + m.queryLatencies.WithLabelValues("GetWorkspaceProxyByName").Observe(time.Since(start).Seconds()) + return proxy, err +} + +func (m queryMetricsStore) GetWorkspaceResourceByID(ctx context.Context, id uuid.UUID) (database.WorkspaceResource, error) { + start := time.Now() + resource, err := m.s.GetWorkspaceResourceByID(ctx, id) + m.queryLatencies.WithLabelValues("GetWorkspaceResourceByID").Observe(time.Since(start).Seconds()) + return resource, err +} + +func (m queryMetricsStore) GetWorkspaceResourceMetadataByResourceIDs(ctx context.Context, ids []uuid.UUID) ([]database.WorkspaceResourceMetadatum, error) { + start := time.Now() + metadata, err := m.s.GetWorkspaceResourceMetadataByResourceIDs(ctx, ids) + m.queryLatencies.WithLabelValues("GetWorkspaceResourceMetadataByResourceIDs").Observe(time.Since(start).Seconds()) + return metadata, err +} + +func (m queryMetricsStore) GetWorkspaceResourceMetadataCreatedAfter(ctx context.Context, createdAt time.Time) ([]database.WorkspaceResourceMetadatum, error) { + start := time.Now() + metadata, err := m.s.GetWorkspaceResourceMetadataCreatedAfter(ctx, createdAt) + m.queryLatencies.WithLabelValues("GetWorkspaceResourceMetadataCreatedAfter").Observe(time.Since(start).Seconds()) + return metadata, err +} + +func (m queryMetricsStore) GetWorkspaceResourcesByJobID(ctx context.Context, jobID uuid.UUID) ([]database.WorkspaceResource, error) { + start := time.Now() + resources, err := m.s.GetWorkspaceResourcesByJobID(ctx, jobID) + m.queryLatencies.WithLabelValues("GetWorkspaceResourcesByJobID").Observe(time.Since(start).Seconds()) + return resources, err +} + +func (m queryMetricsStore) GetWorkspaceResourcesByJobIDs(ctx context.Context, ids []uuid.UUID) ([]database.WorkspaceResource, error) { + start := time.Now() + resources, err := m.s.GetWorkspaceResourcesByJobIDs(ctx, ids) + m.queryLatencies.WithLabelValues("GetWorkspaceResourcesByJobIDs").Observe(time.Since(start).Seconds()) + return resources, err +} + +func (m queryMetricsStore) GetWorkspaceResourcesCreatedAfter(ctx context.Context, createdAt time.Time) ([]database.WorkspaceResource, error) { + start := time.Now() + resources, err := m.s.GetWorkspaceResourcesCreatedAfter(ctx, createdAt) + m.queryLatencies.WithLabelValues("GetWorkspaceResourcesCreatedAfter").Observe(time.Since(start).Seconds()) + return resources, err +} + +func (m queryMetricsStore) GetWorkspaceUniqueOwnerCountByTemplateIDs(ctx context.Context, templateIds []uuid.UUID) ([]database.GetWorkspaceUniqueOwnerCountByTemplateIDsRow, error) { + start := time.Now() + r0, r1 := m.s.GetWorkspaceUniqueOwnerCountByTemplateIDs(ctx, templateIds) + m.queryLatencies.WithLabelValues("GetWorkspaceUniqueOwnerCountByTemplateIDs").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetWorkspaces(ctx context.Context, arg database.GetWorkspacesParams) ([]database.GetWorkspacesRow, error) { + start := time.Now() + workspaces, err := m.s.GetWorkspaces(ctx, arg) + m.queryLatencies.WithLabelValues("GetWorkspaces").Observe(time.Since(start).Seconds()) + return workspaces, err +} + +func (m queryMetricsStore) GetWorkspacesEligibleForTransition(ctx context.Context, now time.Time) ([]database.WorkspaceTable, error) { + start := time.Now() + workspaces, err := m.s.GetWorkspacesEligibleForTransition(ctx, now) + m.queryLatencies.WithLabelValues("GetWorkspacesEligibleForAutoStartStop").Observe(time.Since(start).Seconds()) + return workspaces, err +} + +func (m queryMetricsStore) InsertAPIKey(ctx context.Context, arg database.InsertAPIKeyParams) (database.APIKey, error) { + start := time.Now() + key, err := m.s.InsertAPIKey(ctx, arg) + m.queryLatencies.WithLabelValues("InsertAPIKey").Observe(time.Since(start).Seconds()) + return key, err +} + +func (m queryMetricsStore) InsertAllUsersGroup(ctx context.Context, organizationID uuid.UUID) (database.Group, error) { + start := time.Now() + group, err := m.s.InsertAllUsersGroup(ctx, organizationID) + m.queryLatencies.WithLabelValues("InsertAllUsersGroup").Observe(time.Since(start).Seconds()) + return group, err +} + +func (m queryMetricsStore) InsertAuditLog(ctx context.Context, arg database.InsertAuditLogParams) (database.AuditLog, error) { + start := time.Now() + log, err := m.s.InsertAuditLog(ctx, arg) + m.queryLatencies.WithLabelValues("InsertAuditLog").Observe(time.Since(start).Seconds()) + return log, err +} + +func (m queryMetricsStore) InsertCryptoKey(ctx context.Context, arg database.InsertCryptoKeyParams) (database.CryptoKey, error) { + start := time.Now() + key, err := m.s.InsertCryptoKey(ctx, arg) + m.queryLatencies.WithLabelValues("InsertCryptoKey").Observe(time.Since(start).Seconds()) + return key, err +} + +func (m queryMetricsStore) InsertCustomRole(ctx context.Context, arg database.InsertCustomRoleParams) (database.CustomRole, error) { + start := time.Now() + r0, r1 := m.s.InsertCustomRole(ctx, arg) + m.queryLatencies.WithLabelValues("InsertCustomRole").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) InsertDBCryptKey(ctx context.Context, arg database.InsertDBCryptKeyParams) error { + start := time.Now() + r0 := m.s.InsertDBCryptKey(ctx, arg) + m.queryLatencies.WithLabelValues("InsertDBCryptKey").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) InsertDERPMeshKey(ctx context.Context, value string) error { + start := time.Now() + err := m.s.InsertDERPMeshKey(ctx, value) + m.queryLatencies.WithLabelValues("InsertDERPMeshKey").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) InsertDeploymentID(ctx context.Context, value string) error { + start := time.Now() + err := m.s.InsertDeploymentID(ctx, value) + m.queryLatencies.WithLabelValues("InsertDeploymentID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) InsertExternalAuthLink(ctx context.Context, arg database.InsertExternalAuthLinkParams) (database.ExternalAuthLink, error) { + start := time.Now() + link, err := m.s.InsertExternalAuthLink(ctx, arg) + m.queryLatencies.WithLabelValues("InsertExternalAuthLink").Observe(time.Since(start).Seconds()) + return link, err +} + +func (m queryMetricsStore) InsertFile(ctx context.Context, arg database.InsertFileParams) (database.File, error) { + start := time.Now() + file, err := m.s.InsertFile(ctx, arg) + m.queryLatencies.WithLabelValues("InsertFile").Observe(time.Since(start).Seconds()) + return file, err +} + +func (m queryMetricsStore) InsertGitSSHKey(ctx context.Context, arg database.InsertGitSSHKeyParams) (database.GitSSHKey, error) { + start := time.Now() + key, err := m.s.InsertGitSSHKey(ctx, arg) + m.queryLatencies.WithLabelValues("InsertGitSSHKey").Observe(time.Since(start).Seconds()) + return key, err +} + +func (m queryMetricsStore) InsertGroup(ctx context.Context, arg database.InsertGroupParams) (database.Group, error) { + start := time.Now() + group, err := m.s.InsertGroup(ctx, arg) + m.queryLatencies.WithLabelValues("InsertGroup").Observe(time.Since(start).Seconds()) + return group, err +} + +func (m queryMetricsStore) InsertGroupMember(ctx context.Context, arg database.InsertGroupMemberParams) error { + start := time.Now() + err := m.s.InsertGroupMember(ctx, arg) + m.queryLatencies.WithLabelValues("InsertGroupMember").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) InsertLicense(ctx context.Context, arg database.InsertLicenseParams) (database.License, error) { + start := time.Now() + license, err := m.s.InsertLicense(ctx, arg) + m.queryLatencies.WithLabelValues("InsertLicense").Observe(time.Since(start).Seconds()) + return license, err +} + +func (m queryMetricsStore) InsertMissingGroups(ctx context.Context, arg database.InsertMissingGroupsParams) ([]database.Group, error) { + start := time.Now() + r0, r1 := m.s.InsertMissingGroups(ctx, arg) + m.queryLatencies.WithLabelValues("InsertMissingGroups").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) InsertOAuth2ProviderApp(ctx context.Context, arg database.InsertOAuth2ProviderAppParams) (database.OAuth2ProviderApp, error) { + start := time.Now() + r0, r1 := m.s.InsertOAuth2ProviderApp(ctx, arg) + m.queryLatencies.WithLabelValues("InsertOAuth2ProviderApp").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) InsertOAuth2ProviderAppCode(ctx context.Context, arg database.InsertOAuth2ProviderAppCodeParams) (database.OAuth2ProviderAppCode, error) { + start := time.Now() + r0, r1 := m.s.InsertOAuth2ProviderAppCode(ctx, arg) + m.queryLatencies.WithLabelValues("InsertOAuth2ProviderAppCode").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) InsertOAuth2ProviderAppSecret(ctx context.Context, arg database.InsertOAuth2ProviderAppSecretParams) (database.OAuth2ProviderAppSecret, error) { + start := time.Now() + r0, r1 := m.s.InsertOAuth2ProviderAppSecret(ctx, arg) + m.queryLatencies.WithLabelValues("InsertOAuth2ProviderAppSecret").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) InsertOAuth2ProviderAppToken(ctx context.Context, arg database.InsertOAuth2ProviderAppTokenParams) (database.OAuth2ProviderAppToken, error) { + start := time.Now() + r0, r1 := m.s.InsertOAuth2ProviderAppToken(ctx, arg) + m.queryLatencies.WithLabelValues("InsertOAuth2ProviderAppToken").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) InsertOrganization(ctx context.Context, arg database.InsertOrganizationParams) (database.Organization, error) { + start := time.Now() + organization, err := m.s.InsertOrganization(ctx, arg) + m.queryLatencies.WithLabelValues("InsertOrganization").Observe(time.Since(start).Seconds()) + return organization, err +} + +func (m queryMetricsStore) InsertOrganizationMember(ctx context.Context, arg database.InsertOrganizationMemberParams) (database.OrganizationMember, error) { + start := time.Now() + member, err := m.s.InsertOrganizationMember(ctx, arg) + m.queryLatencies.WithLabelValues("InsertOrganizationMember").Observe(time.Since(start).Seconds()) + return member, err +} + +func (m queryMetricsStore) InsertProvisionerJob(ctx context.Context, arg database.InsertProvisionerJobParams) (database.ProvisionerJob, error) { + start := time.Now() + job, err := m.s.InsertProvisionerJob(ctx, arg) + m.queryLatencies.WithLabelValues("InsertProvisionerJob").Observe(time.Since(start).Seconds()) + return job, err +} + +func (m queryMetricsStore) InsertProvisionerJobLogs(ctx context.Context, arg database.InsertProvisionerJobLogsParams) ([]database.ProvisionerJobLog, error) { + start := time.Now() + logs, err := m.s.InsertProvisionerJobLogs(ctx, arg) + m.queryLatencies.WithLabelValues("InsertProvisionerJobLogs").Observe(time.Since(start).Seconds()) + return logs, err +} + +func (m queryMetricsStore) InsertProvisionerJobTimings(ctx context.Context, arg database.InsertProvisionerJobTimingsParams) ([]database.ProvisionerJobTiming, error) { + start := time.Now() + r0, r1 := m.s.InsertProvisionerJobTimings(ctx, arg) + m.queryLatencies.WithLabelValues("InsertProvisionerJobTimings").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) InsertProvisionerKey(ctx context.Context, arg database.InsertProvisionerKeyParams) (database.ProvisionerKey, error) { + start := time.Now() + r0, r1 := m.s.InsertProvisionerKey(ctx, arg) + m.queryLatencies.WithLabelValues("InsertProvisionerKey").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) InsertReplica(ctx context.Context, arg database.InsertReplicaParams) (database.Replica, error) { + start := time.Now() + replica, err := m.s.InsertReplica(ctx, arg) + m.queryLatencies.WithLabelValues("InsertReplica").Observe(time.Since(start).Seconds()) + return replica, err +} + +func (m queryMetricsStore) InsertTemplate(ctx context.Context, arg database.InsertTemplateParams) error { + start := time.Now() + err := m.s.InsertTemplate(ctx, arg) + m.queryLatencies.WithLabelValues("InsertTemplate").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) InsertTemplateVersion(ctx context.Context, arg database.InsertTemplateVersionParams) error { + start := time.Now() + err := m.s.InsertTemplateVersion(ctx, arg) + m.queryLatencies.WithLabelValues("InsertTemplateVersion").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) InsertTemplateVersionParameter(ctx context.Context, arg database.InsertTemplateVersionParameterParams) (database.TemplateVersionParameter, error) { + start := time.Now() + parameter, err := m.s.InsertTemplateVersionParameter(ctx, arg) + m.queryLatencies.WithLabelValues("InsertTemplateVersionParameter").Observe(time.Since(start).Seconds()) + return parameter, err +} + +func (m queryMetricsStore) InsertTemplateVersionVariable(ctx context.Context, arg database.InsertTemplateVersionVariableParams) (database.TemplateVersionVariable, error) { + start := time.Now() + variable, err := m.s.InsertTemplateVersionVariable(ctx, arg) + m.queryLatencies.WithLabelValues("InsertTemplateVersionVariable").Observe(time.Since(start).Seconds()) + return variable, err +} + +func (m queryMetricsStore) InsertTemplateVersionWorkspaceTag(ctx context.Context, arg database.InsertTemplateVersionWorkspaceTagParams) (database.TemplateVersionWorkspaceTag, error) { + start := time.Now() + r0, r1 := m.s.InsertTemplateVersionWorkspaceTag(ctx, arg) + m.queryLatencies.WithLabelValues("InsertTemplateVersionWorkspaceTag").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) InsertUser(ctx context.Context, arg database.InsertUserParams) (database.User, error) { + start := time.Now() + user, err := m.s.InsertUser(ctx, arg) + m.queryLatencies.WithLabelValues("InsertUser").Observe(time.Since(start).Seconds()) + return user, err +} + +func (m queryMetricsStore) InsertUserGroupsByID(ctx context.Context, arg database.InsertUserGroupsByIDParams) ([]uuid.UUID, error) { + start := time.Now() + r0, r1 := m.s.InsertUserGroupsByID(ctx, arg) + m.queryLatencies.WithLabelValues("InsertUserGroupsByID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) InsertUserGroupsByName(ctx context.Context, arg database.InsertUserGroupsByNameParams) error { + start := time.Now() + err := m.s.InsertUserGroupsByName(ctx, arg) + m.queryLatencies.WithLabelValues("InsertUserGroupsByName").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) InsertUserLink(ctx context.Context, arg database.InsertUserLinkParams) (database.UserLink, error) { + start := time.Now() + link, err := m.s.InsertUserLink(ctx, arg) + m.queryLatencies.WithLabelValues("InsertUserLink").Observe(time.Since(start).Seconds()) + return link, err +} + +func (m queryMetricsStore) InsertWorkspace(ctx context.Context, arg database.InsertWorkspaceParams) (database.WorkspaceTable, error) { + start := time.Now() + workspace, err := m.s.InsertWorkspace(ctx, arg) + m.queryLatencies.WithLabelValues("InsertWorkspace").Observe(time.Since(start).Seconds()) + return workspace, err +} + +func (m queryMetricsStore) InsertWorkspaceAgent(ctx context.Context, arg database.InsertWorkspaceAgentParams) (database.WorkspaceAgent, error) { + start := time.Now() + agent, err := m.s.InsertWorkspaceAgent(ctx, arg) + m.queryLatencies.WithLabelValues("InsertWorkspaceAgent").Observe(time.Since(start).Seconds()) + return agent, err +} + +func (m queryMetricsStore) InsertWorkspaceAgentLogSources(ctx context.Context, arg database.InsertWorkspaceAgentLogSourcesParams) ([]database.WorkspaceAgentLogSource, error) { + start := time.Now() + r0, r1 := m.s.InsertWorkspaceAgentLogSources(ctx, arg) + m.queryLatencies.WithLabelValues("InsertWorkspaceAgentLogSources").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) InsertWorkspaceAgentLogs(ctx context.Context, arg database.InsertWorkspaceAgentLogsParams) ([]database.WorkspaceAgentLog, error) { + start := time.Now() + r0, r1 := m.s.InsertWorkspaceAgentLogs(ctx, arg) + m.queryLatencies.WithLabelValues("InsertWorkspaceAgentLogs").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) InsertWorkspaceAgentMetadata(ctx context.Context, arg database.InsertWorkspaceAgentMetadataParams) error { + start := time.Now() + err := m.s.InsertWorkspaceAgentMetadata(ctx, arg) + m.queryLatencies.WithLabelValues("InsertWorkspaceAgentMetadata").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) InsertWorkspaceAgentScriptTimings(ctx context.Context, arg database.InsertWorkspaceAgentScriptTimingsParams) (database.WorkspaceAgentScriptTiming, error) { + start := time.Now() + r0, r1 := m.s.InsertWorkspaceAgentScriptTimings(ctx, arg) + m.queryLatencies.WithLabelValues("InsertWorkspaceAgentScriptTimings").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) InsertWorkspaceAgentScripts(ctx context.Context, arg database.InsertWorkspaceAgentScriptsParams) ([]database.WorkspaceAgentScript, error) { + start := time.Now() + r0, r1 := m.s.InsertWorkspaceAgentScripts(ctx, arg) + m.queryLatencies.WithLabelValues("InsertWorkspaceAgentScripts").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) InsertWorkspaceAgentStats(ctx context.Context, arg database.InsertWorkspaceAgentStatsParams) error { + start := time.Now() + r0 := m.s.InsertWorkspaceAgentStats(ctx, arg) + m.queryLatencies.WithLabelValues("InsertWorkspaceAgentStats").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) InsertWorkspaceApp(ctx context.Context, arg database.InsertWorkspaceAppParams) (database.WorkspaceApp, error) { + start := time.Now() + app, err := m.s.InsertWorkspaceApp(ctx, arg) + m.queryLatencies.WithLabelValues("InsertWorkspaceApp").Observe(time.Since(start).Seconds()) + return app, err +} + +func (m queryMetricsStore) InsertWorkspaceAppStats(ctx context.Context, arg database.InsertWorkspaceAppStatsParams) error { + start := time.Now() + r0 := m.s.InsertWorkspaceAppStats(ctx, arg) + m.queryLatencies.WithLabelValues("InsertWorkspaceAppStats").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) InsertWorkspaceBuild(ctx context.Context, arg database.InsertWorkspaceBuildParams) error { + start := time.Now() + err := m.s.InsertWorkspaceBuild(ctx, arg) + m.queryLatencies.WithLabelValues("InsertWorkspaceBuild").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) InsertWorkspaceBuildParameters(ctx context.Context, arg database.InsertWorkspaceBuildParametersParams) error { + start := time.Now() + err := m.s.InsertWorkspaceBuildParameters(ctx, arg) + m.queryLatencies.WithLabelValues("InsertWorkspaceBuildParameters").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) InsertWorkspaceProxy(ctx context.Context, arg database.InsertWorkspaceProxyParams) (database.WorkspaceProxy, error) { + start := time.Now() + proxy, err := m.s.InsertWorkspaceProxy(ctx, arg) + m.queryLatencies.WithLabelValues("InsertWorkspaceProxy").Observe(time.Since(start).Seconds()) + return proxy, err +} + +func (m queryMetricsStore) InsertWorkspaceResource(ctx context.Context, arg database.InsertWorkspaceResourceParams) (database.WorkspaceResource, error) { + start := time.Now() + resource, err := m.s.InsertWorkspaceResource(ctx, arg) + m.queryLatencies.WithLabelValues("InsertWorkspaceResource").Observe(time.Since(start).Seconds()) + return resource, err +} + +func (m queryMetricsStore) InsertWorkspaceResourceMetadata(ctx context.Context, arg database.InsertWorkspaceResourceMetadataParams) ([]database.WorkspaceResourceMetadatum, error) { + start := time.Now() + metadata, err := m.s.InsertWorkspaceResourceMetadata(ctx, arg) + m.queryLatencies.WithLabelValues("InsertWorkspaceResourceMetadata").Observe(time.Since(start).Seconds()) + return metadata, err +} + +func (m queryMetricsStore) ListProvisionerKeysByOrganization(ctx context.Context, organizationID uuid.UUID) ([]database.ProvisionerKey, error) { + start := time.Now() + r0, r1 := m.s.ListProvisionerKeysByOrganization(ctx, organizationID) + m.queryLatencies.WithLabelValues("ListProvisionerKeysByOrganization").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) ListProvisionerKeysByOrganizationExcludeReserved(ctx context.Context, organizationID uuid.UUID) ([]database.ProvisionerKey, error) { + start := time.Now() + r0, r1 := m.s.ListProvisionerKeysByOrganizationExcludeReserved(ctx, organizationID) + m.queryLatencies.WithLabelValues("ListProvisionerKeysByOrganizationExcludeReserved").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) ListWorkspaceAgentPortShares(ctx context.Context, workspaceID uuid.UUID) ([]database.WorkspaceAgentPortShare, error) { + start := time.Now() + r0, r1 := m.s.ListWorkspaceAgentPortShares(ctx, workspaceID) + m.queryLatencies.WithLabelValues("ListWorkspaceAgentPortShares").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) OrganizationMembers(ctx context.Context, arg database.OrganizationMembersParams) ([]database.OrganizationMembersRow, error) { + start := time.Now() + r0, r1 := m.s.OrganizationMembers(ctx, arg) + m.queryLatencies.WithLabelValues("OrganizationMembers").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) ReduceWorkspaceAgentShareLevelToAuthenticatedByTemplate(ctx context.Context, templateID uuid.UUID) error { + start := time.Now() + r0 := m.s.ReduceWorkspaceAgentShareLevelToAuthenticatedByTemplate(ctx, templateID) + m.queryLatencies.WithLabelValues("ReduceWorkspaceAgentShareLevelToAuthenticatedByTemplate").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) RegisterWorkspaceProxy(ctx context.Context, arg database.RegisterWorkspaceProxyParams) (database.WorkspaceProxy, error) { + start := time.Now() + proxy, err := m.s.RegisterWorkspaceProxy(ctx, arg) + m.queryLatencies.WithLabelValues("RegisterWorkspaceProxy").Observe(time.Since(start).Seconds()) + return proxy, err +} + +func (m queryMetricsStore) RemoveUserFromAllGroups(ctx context.Context, userID uuid.UUID) error { + start := time.Now() + r0 := m.s.RemoveUserFromAllGroups(ctx, userID) + m.queryLatencies.WithLabelValues("RemoveUserFromAllGroups").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) RemoveUserFromGroups(ctx context.Context, arg database.RemoveUserFromGroupsParams) ([]uuid.UUID, error) { + start := time.Now() + r0, r1 := m.s.RemoveUserFromGroups(ctx, arg) + m.queryLatencies.WithLabelValues("RemoveUserFromGroups").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) RevokeDBCryptKey(ctx context.Context, activeKeyDigest string) error { + start := time.Now() + r0 := m.s.RevokeDBCryptKey(ctx, activeKeyDigest) + m.queryLatencies.WithLabelValues("RevokeDBCryptKey").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) TryAcquireLock(ctx context.Context, pgTryAdvisoryXactLock int64) (bool, error) { + start := time.Now() + ok, err := m.s.TryAcquireLock(ctx, pgTryAdvisoryXactLock) + m.queryLatencies.WithLabelValues("TryAcquireLock").Observe(time.Since(start).Seconds()) + return ok, err +} + +func (m queryMetricsStore) UnarchiveTemplateVersion(ctx context.Context, arg database.UnarchiveTemplateVersionParams) error { + start := time.Now() + r0 := m.s.UnarchiveTemplateVersion(ctx, arg) + m.queryLatencies.WithLabelValues("UnarchiveTemplateVersion").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UnfavoriteWorkspace(ctx context.Context, arg uuid.UUID) error { + start := time.Now() + r0 := m.s.UnfavoriteWorkspace(ctx, arg) + m.queryLatencies.WithLabelValues("UnfavoriteWorkspace").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpdateAPIKeyByID(ctx context.Context, arg database.UpdateAPIKeyByIDParams) error { + start := time.Now() + err := m.s.UpdateAPIKeyByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateAPIKeyByID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateCryptoKeyDeletesAt(ctx context.Context, arg database.UpdateCryptoKeyDeletesAtParams) (database.CryptoKey, error) { + start := time.Now() + key, err := m.s.UpdateCryptoKeyDeletesAt(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateCryptoKeyDeletesAt").Observe(time.Since(start).Seconds()) + return key, err +} + +func (m queryMetricsStore) UpdateCustomRole(ctx context.Context, arg database.UpdateCustomRoleParams) (database.CustomRole, error) { + start := time.Now() + r0, r1 := m.s.UpdateCustomRole(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateCustomRole").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) UpdateExternalAuthLink(ctx context.Context, arg database.UpdateExternalAuthLinkParams) (database.ExternalAuthLink, error) { + start := time.Now() + link, err := m.s.UpdateExternalAuthLink(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateExternalAuthLink").Observe(time.Since(start).Seconds()) + return link, err +} + +func (m queryMetricsStore) UpdateGitSSHKey(ctx context.Context, arg database.UpdateGitSSHKeyParams) (database.GitSSHKey, error) { + start := time.Now() + key, err := m.s.UpdateGitSSHKey(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateGitSSHKey").Observe(time.Since(start).Seconds()) + return key, err +} + +func (m queryMetricsStore) UpdateGroupByID(ctx context.Context, arg database.UpdateGroupByIDParams) (database.Group, error) { + start := time.Now() + group, err := m.s.UpdateGroupByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateGroupByID").Observe(time.Since(start).Seconds()) + return group, err +} + +func (m queryMetricsStore) UpdateInactiveUsersToDormant(ctx context.Context, lastSeenAfter database.UpdateInactiveUsersToDormantParams) ([]database.UpdateInactiveUsersToDormantRow, error) { + start := time.Now() + r0, r1 := m.s.UpdateInactiveUsersToDormant(ctx, lastSeenAfter) + m.queryLatencies.WithLabelValues("UpdateInactiveUsersToDormant").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) UpdateMemberRoles(ctx context.Context, arg database.UpdateMemberRolesParams) (database.OrganizationMember, error) { + start := time.Now() + member, err := m.s.UpdateMemberRoles(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateMemberRoles").Observe(time.Since(start).Seconds()) + return member, err +} + +func (m queryMetricsStore) UpdateNotificationTemplateMethodByID(ctx context.Context, arg database.UpdateNotificationTemplateMethodByIDParams) (database.NotificationTemplate, error) { + start := time.Now() + r0, r1 := m.s.UpdateNotificationTemplateMethodByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateNotificationTemplateMethodByID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) UpdateOAuth2ProviderAppByID(ctx context.Context, arg database.UpdateOAuth2ProviderAppByIDParams) (database.OAuth2ProviderApp, error) { + start := time.Now() + r0, r1 := m.s.UpdateOAuth2ProviderAppByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateOAuth2ProviderAppByID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) UpdateOAuth2ProviderAppSecretByID(ctx context.Context, arg database.UpdateOAuth2ProviderAppSecretByIDParams) (database.OAuth2ProviderAppSecret, error) { + start := time.Now() + r0, r1 := m.s.UpdateOAuth2ProviderAppSecretByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateOAuth2ProviderAppSecretByID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) UpdateOrganization(ctx context.Context, arg database.UpdateOrganizationParams) (database.Organization, error) { + start := time.Now() + r0, r1 := m.s.UpdateOrganization(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateOrganization").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) UpdateProvisionerDaemonLastSeenAt(ctx context.Context, arg database.UpdateProvisionerDaemonLastSeenAtParams) error { + start := time.Now() + r0 := m.s.UpdateProvisionerDaemonLastSeenAt(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateProvisionerDaemonLastSeenAt").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpdateProvisionerJobByID(ctx context.Context, arg database.UpdateProvisionerJobByIDParams) error { + start := time.Now() + err := m.s.UpdateProvisionerJobByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateProvisionerJobByID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateProvisionerJobWithCancelByID(ctx context.Context, arg database.UpdateProvisionerJobWithCancelByIDParams) error { + start := time.Now() + err := m.s.UpdateProvisionerJobWithCancelByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateProvisionerJobWithCancelByID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateProvisionerJobWithCompleteByID(ctx context.Context, arg database.UpdateProvisionerJobWithCompleteByIDParams) error { + start := time.Now() + err := m.s.UpdateProvisionerJobWithCompleteByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateProvisionerJobWithCompleteByID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateReplica(ctx context.Context, arg database.UpdateReplicaParams) (database.Replica, error) { + start := time.Now() + replica, err := m.s.UpdateReplica(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateReplica").Observe(time.Since(start).Seconds()) + return replica, err +} + +func (m queryMetricsStore) UpdateTailnetPeerStatusByCoordinator(ctx context.Context, arg database.UpdateTailnetPeerStatusByCoordinatorParams) error { + start := time.Now() + r0 := m.s.UpdateTailnetPeerStatusByCoordinator(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateTailnetPeerStatusByCoordinator").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpdateTemplateACLByID(ctx context.Context, arg database.UpdateTemplateACLByIDParams) error { + start := time.Now() + err := m.s.UpdateTemplateACLByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateTemplateACLByID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateTemplateAccessControlByID(ctx context.Context, arg database.UpdateTemplateAccessControlByIDParams) error { + start := time.Now() + r0 := m.s.UpdateTemplateAccessControlByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateTemplateAccessControlByID").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpdateTemplateActiveVersionByID(ctx context.Context, arg database.UpdateTemplateActiveVersionByIDParams) error { + start := time.Now() + err := m.s.UpdateTemplateActiveVersionByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateTemplateActiveVersionByID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateTemplateDeletedByID(ctx context.Context, arg database.UpdateTemplateDeletedByIDParams) error { + start := time.Now() + err := m.s.UpdateTemplateDeletedByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateTemplateDeletedByID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateTemplateMetaByID(ctx context.Context, arg database.UpdateTemplateMetaByIDParams) error { + start := time.Now() + err := m.s.UpdateTemplateMetaByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateTemplateMetaByID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateTemplateScheduleByID(ctx context.Context, arg database.UpdateTemplateScheduleByIDParams) error { + start := time.Now() + err := m.s.UpdateTemplateScheduleByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateTemplateScheduleByID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateTemplateVersionByID(ctx context.Context, arg database.UpdateTemplateVersionByIDParams) error { + start := time.Now() + err := m.s.UpdateTemplateVersionByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateTemplateVersionByID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateTemplateVersionDescriptionByJobID(ctx context.Context, arg database.UpdateTemplateVersionDescriptionByJobIDParams) error { + start := time.Now() + err := m.s.UpdateTemplateVersionDescriptionByJobID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateTemplateVersionDescriptionByJobID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateTemplateVersionExternalAuthProvidersByJobID(ctx context.Context, arg database.UpdateTemplateVersionExternalAuthProvidersByJobIDParams) error { + start := time.Now() + err := m.s.UpdateTemplateVersionExternalAuthProvidersByJobID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateTemplateVersionExternalAuthProvidersByJobID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateTemplateWorkspacesLastUsedAt(ctx context.Context, arg database.UpdateTemplateWorkspacesLastUsedAtParams) error { + start := time.Now() + r0 := m.s.UpdateTemplateWorkspacesLastUsedAt(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateTemplateWorkspacesLastUsedAt").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpdateUserAppearanceSettings(ctx context.Context, arg database.UpdateUserAppearanceSettingsParams) (database.User, error) { + start := time.Now() + r0, r1 := m.s.UpdateUserAppearanceSettings(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateUserAppearanceSettings").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) UpdateUserDeletedByID(ctx context.Context, id uuid.UUID) error { + start := time.Now() + r0 := m.s.UpdateUserDeletedByID(ctx, id) + m.queryLatencies.WithLabelValues("UpdateUserDeletedByID").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpdateUserGithubComUserID(ctx context.Context, arg database.UpdateUserGithubComUserIDParams) error { + start := time.Now() + r0 := m.s.UpdateUserGithubComUserID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateUserGithubComUserID").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpdateUserHashedOneTimePasscode(ctx context.Context, arg database.UpdateUserHashedOneTimePasscodeParams) error { + start := time.Now() + r0 := m.s.UpdateUserHashedOneTimePasscode(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateUserHashedOneTimePasscode").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpdateUserHashedPassword(ctx context.Context, arg database.UpdateUserHashedPasswordParams) error { + start := time.Now() + err := m.s.UpdateUserHashedPassword(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateUserHashedPassword").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateUserLastSeenAt(ctx context.Context, arg database.UpdateUserLastSeenAtParams) (database.User, error) { + start := time.Now() + user, err := m.s.UpdateUserLastSeenAt(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateUserLastSeenAt").Observe(time.Since(start).Seconds()) + return user, err +} + +func (m queryMetricsStore) UpdateUserLink(ctx context.Context, arg database.UpdateUserLinkParams) (database.UserLink, error) { + start := time.Now() + link, err := m.s.UpdateUserLink(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateUserLink").Observe(time.Since(start).Seconds()) + return link, err +} + +func (m queryMetricsStore) UpdateUserLinkedID(ctx context.Context, arg database.UpdateUserLinkedIDParams) (database.UserLink, error) { + start := time.Now() + link, err := m.s.UpdateUserLinkedID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateUserLinkedID").Observe(time.Since(start).Seconds()) + return link, err +} + +func (m queryMetricsStore) UpdateUserLoginType(ctx context.Context, arg database.UpdateUserLoginTypeParams) (database.User, error) { + start := time.Now() + r0, r1 := m.s.UpdateUserLoginType(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateUserLoginType").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) UpdateUserNotificationPreferences(ctx context.Context, arg database.UpdateUserNotificationPreferencesParams) (int64, error) { + start := time.Now() + r0, r1 := m.s.UpdateUserNotificationPreferences(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateUserNotificationPreferences").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) UpdateUserProfile(ctx context.Context, arg database.UpdateUserProfileParams) (database.User, error) { + start := time.Now() + user, err := m.s.UpdateUserProfile(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateUserProfile").Observe(time.Since(start).Seconds()) + return user, err +} + +func (m queryMetricsStore) UpdateUserQuietHoursSchedule(ctx context.Context, arg database.UpdateUserQuietHoursScheduleParams) (database.User, error) { + start := time.Now() + r0, r1 := m.s.UpdateUserQuietHoursSchedule(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateUserQuietHoursSchedule").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) UpdateUserRoles(ctx context.Context, arg database.UpdateUserRolesParams) (database.User, error) { + start := time.Now() + user, err := m.s.UpdateUserRoles(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateUserRoles").Observe(time.Since(start).Seconds()) + return user, err +} + +func (m queryMetricsStore) UpdateUserStatus(ctx context.Context, arg database.UpdateUserStatusParams) (database.User, error) { + start := time.Now() + user, err := m.s.UpdateUserStatus(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateUserStatus").Observe(time.Since(start).Seconds()) + return user, err +} + +func (m queryMetricsStore) UpdateWorkspace(ctx context.Context, arg database.UpdateWorkspaceParams) (database.WorkspaceTable, error) { + start := time.Now() + workspace, err := m.s.UpdateWorkspace(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateWorkspace").Observe(time.Since(start).Seconds()) + return workspace, err +} + +func (m queryMetricsStore) UpdateWorkspaceAgentConnectionByID(ctx context.Context, arg database.UpdateWorkspaceAgentConnectionByIDParams) error { + start := time.Now() + err := m.s.UpdateWorkspaceAgentConnectionByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateWorkspaceAgentConnectionByID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateWorkspaceAgentLifecycleStateByID(ctx context.Context, arg database.UpdateWorkspaceAgentLifecycleStateByIDParams) error { + start := time.Now() + r0 := m.s.UpdateWorkspaceAgentLifecycleStateByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateWorkspaceAgentLifecycleStateByID").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpdateWorkspaceAgentLogOverflowByID(ctx context.Context, arg database.UpdateWorkspaceAgentLogOverflowByIDParams) error { + start := time.Now() + r0 := m.s.UpdateWorkspaceAgentLogOverflowByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateWorkspaceAgentLogOverflowByID").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpdateWorkspaceAgentMetadata(ctx context.Context, arg database.UpdateWorkspaceAgentMetadataParams) error { + start := time.Now() + err := m.s.UpdateWorkspaceAgentMetadata(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateWorkspaceAgentMetadata").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateWorkspaceAgentStartupByID(ctx context.Context, arg database.UpdateWorkspaceAgentStartupByIDParams) error { + start := time.Now() + err := m.s.UpdateWorkspaceAgentStartupByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateWorkspaceAgentStartupByID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateWorkspaceAppHealthByID(ctx context.Context, arg database.UpdateWorkspaceAppHealthByIDParams) error { + start := time.Now() + err := m.s.UpdateWorkspaceAppHealthByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateWorkspaceAppHealthByID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateWorkspaceAutomaticUpdates(ctx context.Context, arg database.UpdateWorkspaceAutomaticUpdatesParams) error { + start := time.Now() + r0 := m.s.UpdateWorkspaceAutomaticUpdates(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateWorkspaceAutomaticUpdates").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpdateWorkspaceAutostart(ctx context.Context, arg database.UpdateWorkspaceAutostartParams) error { + start := time.Now() + err := m.s.UpdateWorkspaceAutostart(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateWorkspaceAutostart").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateWorkspaceBuildCostByID(ctx context.Context, arg database.UpdateWorkspaceBuildCostByIDParams) error { + start := time.Now() + err := m.s.UpdateWorkspaceBuildCostByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateWorkspaceBuildCostByID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateWorkspaceBuildDeadlineByID(ctx context.Context, arg database.UpdateWorkspaceBuildDeadlineByIDParams) error { + start := time.Now() + r0 := m.s.UpdateWorkspaceBuildDeadlineByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateWorkspaceBuildDeadlineByID").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpdateWorkspaceBuildProvisionerStateByID(ctx context.Context, arg database.UpdateWorkspaceBuildProvisionerStateByIDParams) error { + start := time.Now() + r0 := m.s.UpdateWorkspaceBuildProvisionerStateByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateWorkspaceBuildProvisionerStateByID").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpdateWorkspaceDeletedByID(ctx context.Context, arg database.UpdateWorkspaceDeletedByIDParams) error { + start := time.Now() + err := m.s.UpdateWorkspaceDeletedByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateWorkspaceDeletedByID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateWorkspaceDormantDeletingAt(ctx context.Context, arg database.UpdateWorkspaceDormantDeletingAtParams) (database.WorkspaceTable, error) { + start := time.Now() + ws, r0 := m.s.UpdateWorkspaceDormantDeletingAt(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateWorkspaceDormantDeletingAt").Observe(time.Since(start).Seconds()) + return ws, r0 +} + +func (m queryMetricsStore) UpdateWorkspaceLastUsedAt(ctx context.Context, arg database.UpdateWorkspaceLastUsedAtParams) error { + start := time.Now() + err := m.s.UpdateWorkspaceLastUsedAt(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateWorkspaceLastUsedAt").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateWorkspaceProxy(ctx context.Context, arg database.UpdateWorkspaceProxyParams) (database.WorkspaceProxy, error) { + start := time.Now() + proxy, err := m.s.UpdateWorkspaceProxy(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateWorkspaceProxy").Observe(time.Since(start).Seconds()) + return proxy, err +} + +func (m queryMetricsStore) UpdateWorkspaceProxyDeleted(ctx context.Context, arg database.UpdateWorkspaceProxyDeletedParams) error { + start := time.Now() + r0 := m.s.UpdateWorkspaceProxyDeleted(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateWorkspaceProxyDeleted").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpdateWorkspaceTTL(ctx context.Context, arg database.UpdateWorkspaceTTLParams) error { + start := time.Now() + r0 := m.s.UpdateWorkspaceTTL(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateWorkspaceTTL").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpdateWorkspacesDormantDeletingAtByTemplateID(ctx context.Context, arg database.UpdateWorkspacesDormantDeletingAtByTemplateIDParams) ([]database.WorkspaceTable, error) { + start := time.Now() + r0, r1 := m.s.UpdateWorkspacesDormantDeletingAtByTemplateID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateWorkspacesDormantDeletingAtByTemplateID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) UpsertAnnouncementBanners(ctx context.Context, value string) error { + start := time.Now() + r0 := m.s.UpsertAnnouncementBanners(ctx, value) + m.queryLatencies.WithLabelValues("UpsertAnnouncementBanners").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpsertAppSecurityKey(ctx context.Context, value string) error { + start := time.Now() + r0 := m.s.UpsertAppSecurityKey(ctx, value) + m.queryLatencies.WithLabelValues("UpsertAppSecurityKey").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpsertApplicationName(ctx context.Context, value string) error { + start := time.Now() + r0 := m.s.UpsertApplicationName(ctx, value) + m.queryLatencies.WithLabelValues("UpsertApplicationName").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpsertCoordinatorResumeTokenSigningKey(ctx context.Context, value string) error { + start := time.Now() + r0 := m.s.UpsertCoordinatorResumeTokenSigningKey(ctx, value) + m.queryLatencies.WithLabelValues("UpsertCoordinatorResumeTokenSigningKey").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpsertDefaultProxy(ctx context.Context, arg database.UpsertDefaultProxyParams) error { + start := time.Now() + r0 := m.s.UpsertDefaultProxy(ctx, arg) + m.queryLatencies.WithLabelValues("UpsertDefaultProxy").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpsertHealthSettings(ctx context.Context, value string) error { + start := time.Now() + r0 := m.s.UpsertHealthSettings(ctx, value) + m.queryLatencies.WithLabelValues("UpsertHealthSettings").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpsertJFrogXrayScanByWorkspaceAndAgentID(ctx context.Context, arg database.UpsertJFrogXrayScanByWorkspaceAndAgentIDParams) error { + start := time.Now() + r0 := m.s.UpsertJFrogXrayScanByWorkspaceAndAgentID(ctx, arg) + m.queryLatencies.WithLabelValues("UpsertJFrogXrayScanByWorkspaceAndAgentID").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpsertLastUpdateCheck(ctx context.Context, value string) error { + start := time.Now() + r0 := m.s.UpsertLastUpdateCheck(ctx, value) + m.queryLatencies.WithLabelValues("UpsertLastUpdateCheck").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpsertLogoURL(ctx context.Context, value string) error { + start := time.Now() + r0 := m.s.UpsertLogoURL(ctx, value) + m.queryLatencies.WithLabelValues("UpsertLogoURL").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpsertNotificationReportGeneratorLog(ctx context.Context, arg database.UpsertNotificationReportGeneratorLogParams) error { + start := time.Now() + r0 := m.s.UpsertNotificationReportGeneratorLog(ctx, arg) + m.queryLatencies.WithLabelValues("UpsertNotificationReportGeneratorLog").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpsertNotificationsSettings(ctx context.Context, value string) error { + start := time.Now() + r0 := m.s.UpsertNotificationsSettings(ctx, value) + m.queryLatencies.WithLabelValues("UpsertNotificationsSettings").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpsertOAuthSigningKey(ctx context.Context, value string) error { + start := time.Now() + r0 := m.s.UpsertOAuthSigningKey(ctx, value) + m.queryLatencies.WithLabelValues("UpsertOAuthSigningKey").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpsertProvisionerDaemon(ctx context.Context, arg database.UpsertProvisionerDaemonParams) (database.ProvisionerDaemon, error) { + start := time.Now() + r0, r1 := m.s.UpsertProvisionerDaemon(ctx, arg) + m.queryLatencies.WithLabelValues("UpsertProvisionerDaemon").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) UpsertRuntimeConfig(ctx context.Context, arg database.UpsertRuntimeConfigParams) error { + start := time.Now() + r0 := m.s.UpsertRuntimeConfig(ctx, arg) + m.queryLatencies.WithLabelValues("UpsertRuntimeConfig").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpsertTailnetAgent(ctx context.Context, arg database.UpsertTailnetAgentParams) (database.TailnetAgent, error) { + start := time.Now() + r0, r1 := m.s.UpsertTailnetAgent(ctx, arg) + m.queryLatencies.WithLabelValues("UpsertTailnetAgent").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) UpsertTailnetClient(ctx context.Context, arg database.UpsertTailnetClientParams) (database.TailnetClient, error) { + start := time.Now() + r0, r1 := m.s.UpsertTailnetClient(ctx, arg) + m.queryLatencies.WithLabelValues("UpsertTailnetClient").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) UpsertTailnetClientSubscription(ctx context.Context, arg database.UpsertTailnetClientSubscriptionParams) error { + start := time.Now() + r0 := m.s.UpsertTailnetClientSubscription(ctx, arg) + m.queryLatencies.WithLabelValues("UpsertTailnetClientSubscription").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpsertTailnetCoordinator(ctx context.Context, id uuid.UUID) (database.TailnetCoordinator, error) { + start := time.Now() + r0, r1 := m.s.UpsertTailnetCoordinator(ctx, id) + m.queryLatencies.WithLabelValues("UpsertTailnetCoordinator").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) UpsertTailnetPeer(ctx context.Context, arg database.UpsertTailnetPeerParams) (database.TailnetPeer, error) { + start := time.Now() + r0, r1 := m.s.UpsertTailnetPeer(ctx, arg) + m.queryLatencies.WithLabelValues("UpsertTailnetPeer").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) UpsertTailnetTunnel(ctx context.Context, arg database.UpsertTailnetTunnelParams) (database.TailnetTunnel, error) { + start := time.Now() + r0, r1 := m.s.UpsertTailnetTunnel(ctx, arg) + m.queryLatencies.WithLabelValues("UpsertTailnetTunnel").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) UpsertTemplateUsageStats(ctx context.Context) error { + start := time.Now() + r0 := m.s.UpsertTemplateUsageStats(ctx) + m.queryLatencies.WithLabelValues("UpsertTemplateUsageStats").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpsertWorkspaceAgentPortShare(ctx context.Context, arg database.UpsertWorkspaceAgentPortShareParams) (database.WorkspaceAgentPortShare, error) { + start := time.Now() + r0, r1 := m.s.UpsertWorkspaceAgentPortShare(ctx, arg) + m.queryLatencies.WithLabelValues("UpsertWorkspaceAgentPortShare").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetAuthorizedTemplates(ctx context.Context, arg database.GetTemplatesWithFilterParams, prepared rbac.PreparedAuthorized) ([]database.Template, error) { + start := time.Now() + templates, err := m.s.GetAuthorizedTemplates(ctx, arg, prepared) + m.queryLatencies.WithLabelValues("GetAuthorizedTemplates").Observe(time.Since(start).Seconds()) + return templates, err +} + +func (m queryMetricsStore) GetTemplateGroupRoles(ctx context.Context, id uuid.UUID) ([]database.TemplateGroup, error) { + start := time.Now() + roles, err := m.s.GetTemplateGroupRoles(ctx, id) + m.queryLatencies.WithLabelValues("GetTemplateGroupRoles").Observe(time.Since(start).Seconds()) + return roles, err +} + +func (m queryMetricsStore) GetTemplateUserRoles(ctx context.Context, id uuid.UUID) ([]database.TemplateUser, error) { + start := time.Now() + roles, err := m.s.GetTemplateUserRoles(ctx, id) + m.queryLatencies.WithLabelValues("GetTemplateUserRoles").Observe(time.Since(start).Seconds()) + return roles, err +} + +func (m queryMetricsStore) GetAuthorizedWorkspaces(ctx context.Context, arg database.GetWorkspacesParams, prepared rbac.PreparedAuthorized) ([]database.GetWorkspacesRow, error) { + start := time.Now() + workspaces, err := m.s.GetAuthorizedWorkspaces(ctx, arg, prepared) + m.queryLatencies.WithLabelValues("GetAuthorizedWorkspaces").Observe(time.Since(start).Seconds()) + return workspaces, err +} + +func (m queryMetricsStore) GetAuthorizedUsers(ctx context.Context, arg database.GetUsersParams, prepared rbac.PreparedAuthorized) ([]database.GetUsersRow, error) { + start := time.Now() + r0, r1 := m.s.GetAuthorizedUsers(ctx, arg, prepared) + m.queryLatencies.WithLabelValues("GetAuthorizedUsers").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetAuthorizedAuditLogsOffset(ctx context.Context, arg database.GetAuditLogsOffsetParams, prepared rbac.PreparedAuthorized) ([]database.GetAuditLogsOffsetRow, error) { + start := time.Now() + r0, r1 := m.s.GetAuthorizedAuditLogsOffset(ctx, arg, prepared) + m.queryLatencies.WithLabelValues("GetAuthorizedAuditLogsOffset").Observe(time.Since(start).Seconds()) + return r0, r1 +} diff --git a/coderd/database/dbmock/dbmock.go b/coderd/database/dbmock/dbmock.go index b3c7b9e7615d3..ffc9ab79f777e 100644 --- a/coderd/database/dbmock/dbmock.go +++ b/coderd/database/dbmock/dbmock.go @@ -11,7 +11,6 @@ package dbmock import ( context "context" - sql "database/sql" reflect "reflect" time "time" @@ -3489,7 +3488,7 @@ func (mr *MockStoreMockRecorder) GetWorkspacesEligibleForTransition(arg0, arg1 a } // InTx mocks base method. -func (m *MockStore) InTx(arg0 func(database.Store) error, arg1 *sql.TxOptions) error { +func (m *MockStore) InTx(arg0 func(database.Store) error, arg1 *database.TxOptions) error { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "InTx", arg0, arg1) ret0, _ := ret[0].(error) diff --git a/coderd/database/dbpurge/dbpurge.go b/coderd/database/dbpurge/dbpurge.go index 00244cfd63533..e9c22611f1879 100644 --- a/coderd/database/dbpurge/dbpurge.go +++ b/coderd/database/dbpurge/dbpurge.go @@ -66,7 +66,7 @@ func New(ctx context.Context, logger slog.Logger, db database.Store, clk quartz. logger.Info(ctx, "purged old database entries", slog.F("duration", clk.Since(start))) return nil - }, nil); err != nil { + }, database.DefaultTXOptions().WithID("db_purge")); err != nil { logger.Error(ctx, "failed to purge old database entries", slog.Error(err)) return } diff --git a/coderd/database/dbrollup/dbrollup.go b/coderd/database/dbrollup/dbrollup.go index 36eddc41fc544..c6b61c587580e 100644 --- a/coderd/database/dbrollup/dbrollup.go +++ b/coderd/database/dbrollup/dbrollup.go @@ -108,7 +108,7 @@ func (r *Rolluper) start(ctx context.Context) { ev.TemplateUsageStats = true return tx.UpsertTemplateUsageStats(ctx) - }, nil) + }, database.DefaultTXOptions().WithID("db_rollup")) }) err := eg.Wait() diff --git a/coderd/database/dbrollup/dbrollup_test.go b/coderd/database/dbrollup/dbrollup_test.go index 0c32ddc9a9c9a..6d541dd66969b 100644 --- a/coderd/database/dbrollup/dbrollup_test.go +++ b/coderd/database/dbrollup/dbrollup_test.go @@ -38,7 +38,7 @@ type wrapUpsertDB struct { resume <-chan struct{} } -func (w *wrapUpsertDB) InTx(fn func(database.Store) error, opts *sql.TxOptions) error { +func (w *wrapUpsertDB) InTx(fn func(database.Store) error, opts *database.TxOptions) error { return w.Store.InTx(func(tx database.Store) error { return fn(&wrapUpsertDB{Store: tx, resume: w.resume}) }, opts) diff --git a/coderd/database/dump.sql b/coderd/database/dump.sql index 3a9a5a7a2d8f6..e4e119423ea78 100644 --- a/coderd/database/dump.sql +++ b/coderd/database/dump.sql @@ -38,7 +38,8 @@ CREATE TYPE build_reason AS ENUM ( ); CREATE TYPE crypto_key_feature AS ENUM ( - 'workspace_apps', + 'workspace_apps_token', + 'workspace_apps_api_key', 'oidc_convert', 'tailnet_resume' ); @@ -667,7 +668,6 @@ CREATE TABLE users ( github_com_user_id bigint, hashed_one_time_passcode bytea, one_time_passcode_expires_at timestamp with time zone, - must_reset_password boolean DEFAULT false NOT NULL, CONSTRAINT one_time_passcode_set CHECK ((((hashed_one_time_passcode IS NULL) AND (one_time_passcode_expires_at IS NULL)) OR ((hashed_one_time_passcode IS NOT NULL) AND (one_time_passcode_expires_at IS NOT NULL)))) ); @@ -683,8 +683,6 @@ COMMENT ON COLUMN users.hashed_one_time_passcode IS 'A hash of the one-time-pass COMMENT ON COLUMN users.one_time_passcode_expires_at IS 'The time when the one-time-passcode expires.'; -COMMENT ON COLUMN users.must_reset_password IS 'Determines if the user should be forced to change their password.'; - CREATE VIEW group_members_expanded AS WITH all_members AS ( SELECT group_members.user_id, diff --git a/coderd/database/migrations/000271_cryptokey_features.down.sql b/coderd/database/migrations/000271_cryptokey_features.down.sql new file mode 100644 index 0000000000000..7cdd00d222da8 --- /dev/null +++ b/coderd/database/migrations/000271_cryptokey_features.down.sql @@ -0,0 +1,18 @@ +-- Step 1: Remove the new entries from crypto_keys table +DELETE FROM crypto_keys +WHERE feature IN ('workspace_apps_token', 'workspace_apps_api_key'); + +CREATE TYPE old_crypto_key_feature AS ENUM ( + 'workspace_apps', + 'oidc_convert', + 'tailnet_resume' +); + +ALTER TABLE crypto_keys + ALTER COLUMN feature TYPE old_crypto_key_feature + USING (feature::text::old_crypto_key_feature); + +DROP TYPE crypto_key_feature; + +ALTER TYPE old_crypto_key_feature RENAME TO crypto_key_feature; + diff --git a/coderd/database/migrations/000271_cryptokey_features.up.sql b/coderd/database/migrations/000271_cryptokey_features.up.sql new file mode 100644 index 0000000000000..bca75d220d0c7 --- /dev/null +++ b/coderd/database/migrations/000271_cryptokey_features.up.sql @@ -0,0 +1,18 @@ +-- Create a new enum type with the desired values +CREATE TYPE new_crypto_key_feature AS ENUM ( + 'workspace_apps_token', + 'workspace_apps_api_key', + 'oidc_convert', + 'tailnet_resume' +); + +DELETE FROM crypto_keys WHERE feature = 'workspace_apps'; + +-- Drop the old type and rename the new one +ALTER TABLE crypto_keys + ALTER COLUMN feature TYPE new_crypto_key_feature + USING (feature::text::new_crypto_key_feature); + +DROP TYPE crypto_key_feature; + +ALTER TYPE new_crypto_key_feature RENAME TO crypto_key_feature; diff --git a/coderd/database/migrations/000272_remove_must_reset_password.down.sql b/coderd/database/migrations/000272_remove_must_reset_password.down.sql new file mode 100644 index 0000000000000..9f798fc1898ca --- /dev/null +++ b/coderd/database/migrations/000272_remove_must_reset_password.down.sql @@ -0,0 +1 @@ +ALTER TABLE users ADD COLUMN must_reset_password bool NOT NULL DEFAULT false; diff --git a/coderd/database/migrations/000272_remove_must_reset_password.up.sql b/coderd/database/migrations/000272_remove_must_reset_password.up.sql new file mode 100644 index 0000000000000..d93e464493cc4 --- /dev/null +++ b/coderd/database/migrations/000272_remove_must_reset_password.up.sql @@ -0,0 +1 @@ +ALTER TABLE users DROP COLUMN must_reset_password; diff --git a/coderd/database/migrations/testdata/fixtures/000271_cryptokey_features.up.sql b/coderd/database/migrations/testdata/fixtures/000271_cryptokey_features.up.sql new file mode 100644 index 0000000000000..5cb2cd4c95509 --- /dev/null +++ b/coderd/database/migrations/testdata/fixtures/000271_cryptokey_features.up.sql @@ -0,0 +1,40 @@ +INSERT INTO crypto_keys (feature, sequence, secret, secret_key_id, starts_at, deletes_at) +VALUES ( + 'workspace_apps_token', + 1, + 'abc', + NULL, + '1970-01-01 00:00:00 UTC'::timestamptz, + '2100-01-01 00:00:00 UTC'::timestamptz +); + +INSERT INTO crypto_keys (feature, sequence, secret, secret_key_id, starts_at, deletes_at) +VALUES ( + 'workspace_apps_api_key', + 1, + 'def', + NULL, + '1970-01-01 00:00:00 UTC'::timestamptz, + '2100-01-01 00:00:00 UTC'::timestamptz +); + +INSERT INTO crypto_keys (feature, sequence, secret, secret_key_id, starts_at, deletes_at) +VALUES ( + 'oidc_convert', + 2, + 'ghi', + NULL, + '1970-01-01 00:00:00 UTC'::timestamptz, + '2100-01-01 00:00:00 UTC'::timestamptz +); + +INSERT INTO crypto_keys (feature, sequence, secret, secret_key_id, starts_at, deletes_at) +VALUES ( + 'tailnet_resume', + 2, + 'jkl', + NULL, + '1970-01-01 00:00:00 UTC'::timestamptz, + '2100-01-01 00:00:00 UTC'::timestamptz +); + diff --git a/coderd/database/modelqueries.go b/coderd/database/modelqueries.go index 9888027e01559..9cab04d8e5c2e 100644 --- a/coderd/database/modelqueries.go +++ b/coderd/database/modelqueries.go @@ -374,7 +374,6 @@ func (q *sqlQuerier) GetAuthorizedUsers(ctx context.Context, arg GetUsersParams, &i.GithubComUserID, &i.HashedOneTimePasscode, &i.OneTimePasscodeExpiresAt, - &i.MustResetPassword, &i.Count, ); err != nil { return nil, err diff --git a/coderd/database/models.go b/coderd/database/models.go index 1207587d46529..680450a7826d0 100644 --- a/coderd/database/models.go +++ b/coderd/database/models.go @@ -345,9 +345,10 @@ func AllBuildReasonValues() []BuildReason { type CryptoKeyFeature string const ( - CryptoKeyFeatureWorkspaceApps CryptoKeyFeature = "workspace_apps" - CryptoKeyFeatureOidcConvert CryptoKeyFeature = "oidc_convert" - CryptoKeyFeatureTailnetResume CryptoKeyFeature = "tailnet_resume" + CryptoKeyFeatureWorkspaceAppsToken CryptoKeyFeature = "workspace_apps_token" + CryptoKeyFeatureWorkspaceAppsAPIKey CryptoKeyFeature = "workspace_apps_api_key" + CryptoKeyFeatureOIDCConvert CryptoKeyFeature = "oidc_convert" + CryptoKeyFeatureTailnetResume CryptoKeyFeature = "tailnet_resume" ) func (e *CryptoKeyFeature) Scan(src interface{}) error { @@ -387,8 +388,9 @@ func (ns NullCryptoKeyFeature) Value() (driver.Value, error) { func (e CryptoKeyFeature) Valid() bool { switch e { - case CryptoKeyFeatureWorkspaceApps, - CryptoKeyFeatureOidcConvert, + case CryptoKeyFeatureWorkspaceAppsToken, + CryptoKeyFeatureWorkspaceAppsAPIKey, + CryptoKeyFeatureOIDCConvert, CryptoKeyFeatureTailnetResume: return true } @@ -397,8 +399,9 @@ func (e CryptoKeyFeature) Valid() bool { func AllCryptoKeyFeatureValues() []CryptoKeyFeature { return []CryptoKeyFeature{ - CryptoKeyFeatureWorkspaceApps, - CryptoKeyFeatureOidcConvert, + CryptoKeyFeatureWorkspaceAppsToken, + CryptoKeyFeatureWorkspaceAppsAPIKey, + CryptoKeyFeatureOIDCConvert, CryptoKeyFeatureTailnetResume, } } @@ -2876,8 +2879,6 @@ type User struct { HashedOneTimePasscode []byte `db:"hashed_one_time_passcode" json:"hashed_one_time_passcode"` // The time when the one-time-passcode expires. OneTimePasscodeExpiresAt sql.NullTime `db:"one_time_passcode_expires_at" json:"one_time_passcode_expires_at"` - // Determines if the user should be forced to change their password. - MustResetPassword bool `db:"must_reset_password" json:"must_reset_password"` } type UserLink struct { diff --git a/coderd/database/queries.sql.go b/coderd/database/queries.sql.go index 45cbef3f5e1d8..d00c4ec3bcdef 100644 --- a/coderd/database/queries.sql.go +++ b/coderd/database/queries.sql.go @@ -10031,7 +10031,7 @@ func (q *sqlQuerier) GetAuthorizationUserRoles(ctx context.Context, userID uuid. const getUserByEmailOrUsername = `-- name: GetUserByEmailOrUsername :one SELECT - id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at, must_reset_password + id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at FROM users WHERE @@ -10068,14 +10068,13 @@ func (q *sqlQuerier) GetUserByEmailOrUsername(ctx context.Context, arg GetUserBy &i.GithubComUserID, &i.HashedOneTimePasscode, &i.OneTimePasscodeExpiresAt, - &i.MustResetPassword, ) return i, err } const getUserByID = `-- name: GetUserByID :one SELECT - id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at, must_reset_password + id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at FROM users WHERE @@ -10106,7 +10105,6 @@ func (q *sqlQuerier) GetUserByID(ctx context.Context, id uuid.UUID) (User, error &i.GithubComUserID, &i.HashedOneTimePasscode, &i.OneTimePasscodeExpiresAt, - &i.MustResetPassword, ) return i, err } @@ -10129,7 +10127,7 @@ func (q *sqlQuerier) GetUserCount(ctx context.Context) (int64, error) { const getUsers = `-- name: GetUsers :many SELECT - id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at, must_reset_password, COUNT(*) OVER() AS count + id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at, COUNT(*) OVER() AS count FROM users WHERE @@ -10231,7 +10229,6 @@ type GetUsersRow struct { GithubComUserID sql.NullInt64 `db:"github_com_user_id" json:"github_com_user_id"` HashedOneTimePasscode []byte `db:"hashed_one_time_passcode" json:"hashed_one_time_passcode"` OneTimePasscodeExpiresAt sql.NullTime `db:"one_time_passcode_expires_at" json:"one_time_passcode_expires_at"` - MustResetPassword bool `db:"must_reset_password" json:"must_reset_password"` Count int64 `db:"count" json:"count"` } @@ -10273,7 +10270,6 @@ func (q *sqlQuerier) GetUsers(ctx context.Context, arg GetUsersParams) ([]GetUse &i.GithubComUserID, &i.HashedOneTimePasscode, &i.OneTimePasscodeExpiresAt, - &i.MustResetPassword, &i.Count, ); err != nil { return nil, err @@ -10290,7 +10286,7 @@ func (q *sqlQuerier) GetUsers(ctx context.Context, arg GetUsersParams) ([]GetUse } const getUsersByIDs = `-- name: GetUsersByIDs :many -SELECT id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at, must_reset_password FROM users WHERE id = ANY($1 :: uuid [ ]) +SELECT id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at FROM users WHERE id = ANY($1 :: uuid [ ]) ` // This shouldn't check for deleted, because it's frequently used @@ -10324,7 +10320,6 @@ func (q *sqlQuerier) GetUsersByIDs(ctx context.Context, ids []uuid.UUID) ([]User &i.GithubComUserID, &i.HashedOneTimePasscode, &i.OneTimePasscodeExpiresAt, - &i.MustResetPassword, ); err != nil { return nil, err } @@ -10353,7 +10348,7 @@ INSERT INTO login_type ) VALUES - ($1, $2, $3, $4, $5, $6, $7, $8, $9) RETURNING id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at, must_reset_password + ($1, $2, $3, $4, $5, $6, $7, $8, $9) RETURNING id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at ` type InsertUserParams struct { @@ -10400,7 +10395,6 @@ func (q *sqlQuerier) InsertUser(ctx context.Context, arg InsertUserParams) (User &i.GithubComUserID, &i.HashedOneTimePasscode, &i.OneTimePasscodeExpiresAt, - &i.MustResetPassword, ) return i, err } @@ -10459,7 +10453,7 @@ SET updated_at = $3 WHERE id = $1 -RETURNING id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at, must_reset_password +RETURNING id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at ` type UpdateUserAppearanceSettingsParams struct { @@ -10490,7 +10484,6 @@ func (q *sqlQuerier) UpdateUserAppearanceSettings(ctx context.Context, arg Updat &i.GithubComUserID, &i.HashedOneTimePasscode, &i.OneTimePasscodeExpiresAt, - &i.MustResetPassword, ) return i, err } @@ -10577,7 +10570,7 @@ SET last_seen_at = $2, updated_at = $3 WHERE - id = $1 RETURNING id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at, must_reset_password + id = $1 RETURNING id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at ` type UpdateUserLastSeenAtParams struct { @@ -10608,7 +10601,6 @@ func (q *sqlQuerier) UpdateUserLastSeenAt(ctx context.Context, arg UpdateUserLas &i.GithubComUserID, &i.HashedOneTimePasscode, &i.OneTimePasscodeExpiresAt, - &i.MustResetPassword, ) return i, err } @@ -10626,7 +10618,7 @@ SET '':: bytea END WHERE - id = $2 RETURNING id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at, must_reset_password + id = $2 RETURNING id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at ` type UpdateUserLoginTypeParams struct { @@ -10656,7 +10648,6 @@ func (q *sqlQuerier) UpdateUserLoginType(ctx context.Context, arg UpdateUserLogi &i.GithubComUserID, &i.HashedOneTimePasscode, &i.OneTimePasscodeExpiresAt, - &i.MustResetPassword, ) return i, err } @@ -10672,7 +10663,7 @@ SET name = $6 WHERE id = $1 -RETURNING id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at, must_reset_password +RETURNING id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at ` type UpdateUserProfileParams struct { @@ -10713,7 +10704,6 @@ func (q *sqlQuerier) UpdateUserProfile(ctx context.Context, arg UpdateUserProfil &i.GithubComUserID, &i.HashedOneTimePasscode, &i.OneTimePasscodeExpiresAt, - &i.MustResetPassword, ) return i, err } @@ -10725,7 +10715,7 @@ SET quiet_hours_schedule = $2 WHERE id = $1 -RETURNING id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at, must_reset_password +RETURNING id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at ` type UpdateUserQuietHoursScheduleParams struct { @@ -10755,7 +10745,6 @@ func (q *sqlQuerier) UpdateUserQuietHoursSchedule(ctx context.Context, arg Updat &i.GithubComUserID, &i.HashedOneTimePasscode, &i.OneTimePasscodeExpiresAt, - &i.MustResetPassword, ) return i, err } @@ -10768,7 +10757,7 @@ SET rbac_roles = ARRAY(SELECT DISTINCT UNNEST($1 :: text[])) WHERE id = $2 -RETURNING id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at, must_reset_password +RETURNING id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at ` type UpdateUserRolesParams struct { @@ -10798,7 +10787,6 @@ func (q *sqlQuerier) UpdateUserRoles(ctx context.Context, arg UpdateUserRolesPar &i.GithubComUserID, &i.HashedOneTimePasscode, &i.OneTimePasscodeExpiresAt, - &i.MustResetPassword, ) return i, err } @@ -10810,7 +10798,7 @@ SET status = $2, updated_at = $3 WHERE - id = $1 RETURNING id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at, must_reset_password + id = $1 RETURNING id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at ` type UpdateUserStatusParams struct { @@ -10841,7 +10829,6 @@ func (q *sqlQuerier) UpdateUserStatus(ctx context.Context, arg UpdateUserStatusP &i.GithubComUserID, &i.HashedOneTimePasscode, &i.OneTimePasscodeExpiresAt, - &i.MustResetPassword, ) return i, err } diff --git a/coderd/database/sqlc.yaml b/coderd/database/sqlc.yaml index a70e45a522989..257c95ddb2d7a 100644 --- a/coderd/database/sqlc.yaml +++ b/coderd/database/sqlc.yaml @@ -135,6 +135,8 @@ sql: api_key_id: APIKeyID callback_url: CallbackURL login_type_oauth2_provider_app: LoginTypeOAuth2ProviderApp + crypto_key_feature_workspace_apps_api_key: CryptoKeyFeatureWorkspaceAppsAPIKey + crypto_key_feature_oidc_convert: CryptoKeyFeatureOIDCConvert rules: - name: do-not-use-public-schema-in-queries message: "do not use public schema in queries" diff --git a/coderd/database/tx.go b/coderd/database/tx.go index 43da15f3f058c..32a25753513ed 100644 --- a/coderd/database/tx.go +++ b/coderd/database/tx.go @@ -33,7 +33,7 @@ func ReadModifyUpdate(db Store, f func(tx Store) error, ) error { var err error for retries := 0; retries < maxRetries; retries++ { - err = db.InTx(f, &sql.TxOptions{ + err = db.InTx(f, &TxOptions{ Isolation: sql.LevelRepeatableRead, }) var pqe *pq.Error diff --git a/coderd/database/tx_test.go b/coderd/database/tx_test.go index d97c1bc26d57f..5f051085188ca 100644 --- a/coderd/database/tx_test.go +++ b/coderd/database/tx_test.go @@ -19,7 +19,7 @@ func TestReadModifyUpdate_OK(t *testing.T) { mDB := dbmock.NewMockStore(gomock.NewController(t)) mDB.EXPECT(). - InTx(gomock.Any(), &sql.TxOptions{Isolation: sql.LevelRepeatableRead}). + InTx(gomock.Any(), &database.TxOptions{Isolation: sql.LevelRepeatableRead}). Times(1). Return(nil) err := database.ReadModifyUpdate(mDB, func(tx database.Store) error { @@ -34,11 +34,11 @@ func TestReadModifyUpdate_RetryOK(t *testing.T) { mDB := dbmock.NewMockStore(gomock.NewController(t)) firstUpdate := mDB.EXPECT(). - InTx(gomock.Any(), &sql.TxOptions{Isolation: sql.LevelRepeatableRead}). + InTx(gomock.Any(), &database.TxOptions{Isolation: sql.LevelRepeatableRead}). Times(1). Return(&pq.Error{Code: pq.ErrorCode("40001")}) mDB.EXPECT(). - InTx(gomock.Any(), &sql.TxOptions{Isolation: sql.LevelRepeatableRead}). + InTx(gomock.Any(), &database.TxOptions{Isolation: sql.LevelRepeatableRead}). After(firstUpdate). Times(1). Return(nil) @@ -55,7 +55,7 @@ func TestReadModifyUpdate_HardError(t *testing.T) { mDB := dbmock.NewMockStore(gomock.NewController(t)) mDB.EXPECT(). - InTx(gomock.Any(), &sql.TxOptions{Isolation: sql.LevelRepeatableRead}). + InTx(gomock.Any(), &database.TxOptions{Isolation: sql.LevelRepeatableRead}). Times(1). Return(xerrors.New("a bad thing happened")) @@ -71,7 +71,7 @@ func TestReadModifyUpdate_TooManyRetries(t *testing.T) { mDB := dbmock.NewMockStore(gomock.NewController(t)) mDB.EXPECT(). - InTx(gomock.Any(), &sql.TxOptions{Isolation: sql.LevelRepeatableRead}). + InTx(gomock.Any(), &database.TxOptions{Isolation: sql.LevelRepeatableRead}). Times(5). Return(&pq.Error{Code: pq.ErrorCode("40001")}) err := database.ReadModifyUpdate(mDB, func(tx database.Store) error { diff --git a/coderd/idpsync/role_test.go b/coderd/idpsync/role_test.go index c6ab989881976..45e9edd6c1dd4 100644 --- a/coderd/idpsync/role_test.go +++ b/coderd/idpsync/role_test.go @@ -2,7 +2,6 @@ package idpsync_test import ( "context" - "database/sql" "encoding/json" "testing" @@ -324,7 +323,7 @@ func TestNoopNoDiff(t *testing.T) { // and 'UpdateMemberRoles'. mDB.EXPECT().InTx( gomock.Any(), gomock.Any(), - ).DoAndReturn(func(f func(database.Store) error, _ *sql.TxOptions) error { + ).DoAndReturn(func(f func(database.Store) error, _ *database.TxOptions) error { err := f(mDB) return err }) diff --git a/coderd/insights_test.go b/coderd/insights_test.go index 06fe8d46ca5ac..bf8aa4bc44506 100644 --- a/coderd/insights_test.go +++ b/coderd/insights_test.go @@ -700,14 +700,13 @@ func TestTemplateInsights_Golden(t *testing.T) { connectionCount = 0 } for createdAt.Before(stat.endedAt) { - err = batcher.Add(createdAt, workspace.agentID, workspace.template.id, workspace.user.(*testUser).sdk.ID, workspace.id, &agentproto.Stats{ + batcher.Add(createdAt, workspace.agentID, workspace.template.id, workspace.user.(*testUser).sdk.ID, workspace.id, &agentproto.Stats{ ConnectionCount: connectionCount, SessionCountVscode: stat.sessionCountVSCode, SessionCountJetbrains: stat.sessionCountJetBrains, SessionCountReconnectingPty: stat.sessionCountReconnectingPTY, SessionCountSsh: stat.sessionCountSSH, }, false) - require.NoError(t, err, "want no error inserting agent stats") createdAt = createdAt.Add(30 * time.Second) } } @@ -1599,14 +1598,13 @@ func TestUserActivityInsights_Golden(t *testing.T) { connectionCount = 0 } for createdAt.Before(stat.endedAt) { - err = batcher.Add(createdAt, workspace.agentID, workspace.template.id, workspace.user.(*testUser).sdk.ID, workspace.id, &agentproto.Stats{ + batcher.Add(createdAt, workspace.agentID, workspace.template.id, workspace.user.(*testUser).sdk.ID, workspace.id, &agentproto.Stats{ ConnectionCount: connectionCount, SessionCountVscode: stat.sessionCountVSCode, SessionCountJetbrains: stat.sessionCountJetBrains, SessionCountReconnectingPty: stat.sessionCountReconnectingPTY, SessionCountSsh: stat.sessionCountSSH, }, false) - require.NoError(t, err, "want no error inserting agent stats") createdAt = createdAt.Add(30 * time.Second) } } diff --git a/coderd/jwtutils/jwe.go b/coderd/jwtutils/jwe.go index d03816a477a26..bc9d0ddd2a9c8 100644 --- a/coderd/jwtutils/jwe.go +++ b/coderd/jwtutils/jwe.go @@ -65,6 +65,12 @@ func Encrypt(ctx context.Context, e EncryptKeyProvider, claims Claims) (string, return compact, nil } +func WithDecryptExpected(expected jwt.Expected) func(*DecryptOptions) { + return func(opts *DecryptOptions) { + opts.RegisteredClaims = expected + } +} + // DecryptOptions are options for decrypting a JWE. type DecryptOptions struct { RegisteredClaims jwt.Expected @@ -100,7 +106,7 @@ func Decrypt(ctx context.Context, d DecryptKeyProvider, token string, claims Cla kid := object.Header.KeyID if kid == "" { - return xerrors.Errorf("expected %q header to be a string", keyIDHeaderKey) + return ErrMissingKeyID } key, err := d.DecryptingKey(ctx, kid) diff --git a/coderd/jwtutils/jws.go b/coderd/jwtutils/jws.go index 73f35e672492d..0c8ca9aa30f39 100644 --- a/coderd/jwtutils/jws.go +++ b/coderd/jwtutils/jws.go @@ -10,10 +10,27 @@ import ( "golang.org/x/xerrors" ) +var ErrMissingKeyID = xerrors.New("missing key ID") + const ( keyIDHeaderKey = "kid" ) +// RegisteredClaims is a convenience type for embedding jwt.Claims. It should be +// preferred over embedding jwt.Claims directly since it will ensure that certain fields are set. +type RegisteredClaims jwt.Claims + +func (r RegisteredClaims) Validate(e jwt.Expected) error { + if r.Expiry == nil { + return xerrors.Errorf("expiry is required") + } + if e.Time.IsZero() { + return xerrors.Errorf("expected time is required") + } + + return (jwt.Claims(r)).Validate(e) +} + // Claims defines the payload for a JWT. Most callers // should embed jwt.Claims type Claims interface { @@ -24,6 +41,11 @@ const ( signingAlgo = jose.HS512 ) +type SigningKeyManager interface { + SigningKeyProvider + VerifyKeyProvider +} + type SigningKeyProvider interface { SigningKey(ctx context.Context) (id string, key interface{}, err error) } @@ -75,6 +97,12 @@ type VerifyOptions struct { SignatureAlgorithm jose.SignatureAlgorithm } +func WithVerifyExpected(expected jwt.Expected) func(*VerifyOptions) { + return func(opts *VerifyOptions) { + opts.RegisteredClaims = expected + } +} + // Verify verifies that a token was signed by the provided key. It unmarshals into the provided claims. func Verify(ctx context.Context, v VerifyKeyProvider, token string, claims Claims, opts ...func(*VerifyOptions)) error { options := VerifyOptions{ @@ -105,7 +133,7 @@ func Verify(ctx context.Context, v VerifyKeyProvider, token string, claims Claim kid := signature.Header.KeyID if kid == "" { - return xerrors.Errorf("expected %q header to be a string", keyIDHeaderKey) + return ErrMissingKeyID } key, err := v.VerifyingKey(ctx, kid) @@ -125,3 +153,35 @@ func Verify(ctx context.Context, v VerifyKeyProvider, token string, claims Claim return claims.Validate(options.RegisteredClaims) } + +// StaticKey fulfills the SigningKeycache and EncryptionKeycache interfaces. Useful for testing. +type StaticKey struct { + ID string + Key interface{} +} + +func (s StaticKey) SigningKey(_ context.Context) (string, interface{}, error) { + return s.ID, s.Key, nil +} + +func (s StaticKey) VerifyingKey(_ context.Context, id string) (interface{}, error) { + if id != s.ID { + return nil, xerrors.Errorf("invalid id %q", id) + } + return s.Key, nil +} + +func (s StaticKey) EncryptingKey(_ context.Context) (string, interface{}, error) { + return s.ID, s.Key, nil +} + +func (s StaticKey) DecryptingKey(_ context.Context, id string) (interface{}, error) { + if id != s.ID { + return nil, xerrors.Errorf("invalid id %q", id) + } + return s.Key, nil +} + +func (StaticKey) Close() error { + return nil +} diff --git a/coderd/jwtutils/jwt_test.go b/coderd/jwtutils/jwt_test.go index 697e5d210d858..5d1f4d48bdb4a 100644 --- a/coderd/jwtutils/jwt_test.go +++ b/coderd/jwtutils/jwt_test.go @@ -236,11 +236,11 @@ func TestJWS(t *testing.T) { ctx = testutil.Context(t, testutil.WaitShort) db, _ = dbtestutil.NewDB(t) _ = dbgen.CryptoKey(t, db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureOidcConvert, + Feature: database.CryptoKeyFeatureOIDCConvert, StartsAt: time.Now(), }) log = slogtest.Make(t, nil) - fetcher = &cryptokeys.DBFetcher{DB: db, Feature: database.CryptoKeyFeatureOidcConvert} + fetcher = &cryptokeys.DBFetcher{DB: db} ) cache, err := cryptokeys.NewSigningCache(ctx, log, fetcher, codersdk.CryptoKeyFeatureOIDCConvert) @@ -326,15 +326,15 @@ func TestJWE(t *testing.T) { ctx = testutil.Context(t, testutil.WaitShort) db, _ = dbtestutil.NewDB(t) _ = dbgen.CryptoKey(t, db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, StartsAt: time.Now(), }) log = slogtest.Make(t, nil) - fetcher = &cryptokeys.DBFetcher{DB: db, Feature: database.CryptoKeyFeatureWorkspaceApps} + fetcher = &cryptokeys.DBFetcher{DB: db} ) - cache, err := cryptokeys.NewEncryptionCache(ctx, log, fetcher, codersdk.CryptoKeyFeatureWorkspaceApp) + cache, err := cryptokeys.NewEncryptionCache(ctx, log, fetcher, codersdk.CryptoKeyFeatureWorkspaceAppsAPIKey) require.NoError(t, err) claims := testClaims{ diff --git a/coderd/notifications/dispatch/smtp.go b/coderd/notifications/dispatch/smtp.go index e18aeaef88b81..dfb628b62eb86 100644 --- a/coderd/notifications/dispatch/smtp.go +++ b/coderd/notifications/dispatch/smtp.go @@ -453,7 +453,7 @@ func (s *SMTPHandler) auth(ctx context.Context, mechs string) (sasl.Client, erro continue } if password == "" { - errs = multierror.Append(errs, xerrors.New("cannot use PLAIN auth, password not defined (see CODER_NOTIFICATIONS_EMAIL_AUTH_PASSWORD)")) + errs = multierror.Append(errs, xerrors.New("cannot use PLAIN auth, password not defined (see CODER_EMAIL_AUTH_PASSWORD)")) continue } @@ -475,7 +475,7 @@ func (s *SMTPHandler) auth(ctx context.Context, mechs string) (sasl.Client, erro continue } if password == "" { - errs = multierror.Append(errs, xerrors.New("cannot use LOGIN auth, password not defined (see CODER_NOTIFICATIONS_EMAIL_AUTH_PASSWORD)")) + errs = multierror.Append(errs, xerrors.New("cannot use LOGIN auth, password not defined (see CODER_EMAIL_AUTH_PASSWORD)")) continue } diff --git a/coderd/notifications/fetcher.go b/coderd/notifications/fetcher.go index a579275d127bf..0688b88907981 100644 --- a/coderd/notifications/fetcher.go +++ b/coderd/notifications/fetcher.go @@ -38,6 +38,10 @@ func (n *notifier) fetchAppName(ctx context.Context) (string, error) { } return "", xerrors.Errorf("get application name: %w", err) } + + if appName == "" { + appName = notificationsDefaultAppName + } return appName, nil } @@ -49,5 +53,9 @@ func (n *notifier) fetchLogoURL(ctx context.Context) (string, error) { } return "", xerrors.Errorf("get logo URL: %w", err) } + + if logoURL == "" { + logoURL = notificationsDefaultLogoURL + } return logoURL, nil } diff --git a/coderd/notifications/fetcher_internal_test.go b/coderd/notifications/fetcher_internal_test.go new file mode 100644 index 0000000000000..a8d0149c883b8 --- /dev/null +++ b/coderd/notifications/fetcher_internal_test.go @@ -0,0 +1,231 @@ +package notifications + +import ( + "context" + "database/sql" + "testing" + "text/template" + + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" + "golang.org/x/xerrors" + + "github.com/coder/coder/v2/coderd/database/dbmock" +) + +func TestNotifier_FetchHelpers(t *testing.T) { + t.Parallel() + + t.Run("ok", func(t *testing.T) { + t.Parallel() + + ctrl := gomock.NewController(t) + dbmock := dbmock.NewMockStore(ctrl) + + n := ¬ifier{ + store: dbmock, + helpers: template.FuncMap{}, + } + + dbmock.EXPECT().GetApplicationName(gomock.Any()).Return("ACME Inc.", nil) + dbmock.EXPECT().GetLogoURL(gomock.Any()).Return("https://example.com/logo.png", nil) + + ctx := context.Background() + helpers, err := n.fetchHelpers(ctx) + require.NoError(t, err) + + appName, ok := helpers["app_name"].(func() string) + require.True(t, ok) + require.Equal(t, "ACME Inc.", appName()) + + logoURL, ok := helpers["logo_url"].(func() string) + require.True(t, ok) + require.Equal(t, "https://example.com/logo.png", logoURL()) + }) + + t.Run("failed to fetch app name", func(t *testing.T) { + t.Parallel() + + ctrl := gomock.NewController(t) + dbmock := dbmock.NewMockStore(ctrl) + + n := ¬ifier{ + store: dbmock, + helpers: template.FuncMap{}, + } + + dbmock.EXPECT().GetApplicationName(gomock.Any()).Return("", xerrors.New("internal error")) + + ctx := context.Background() + _, err := n.fetchHelpers(ctx) + require.Error(t, err) + require.ErrorContains(t, err, "get application name") + }) + + t.Run("failed to fetch logo URL", func(t *testing.T) { + t.Parallel() + + ctrl := gomock.NewController(t) + dbmock := dbmock.NewMockStore(ctrl) + + n := ¬ifier{ + store: dbmock, + helpers: template.FuncMap{}, + } + + dbmock.EXPECT().GetApplicationName(gomock.Any()).Return("ACME Inc.", nil) + dbmock.EXPECT().GetLogoURL(gomock.Any()).Return("", xerrors.New("internal error")) + + ctx := context.Background() + _, err := n.fetchHelpers(ctx) + require.ErrorContains(t, err, "get logo URL") + }) +} + +func TestNotifier_FetchAppName(t *testing.T) { + t.Parallel() + + t.Run("ok", func(t *testing.T) { + t.Parallel() + + ctrl := gomock.NewController(t) + dbmock := dbmock.NewMockStore(ctrl) + + n := ¬ifier{ + store: dbmock, + } + + dbmock.EXPECT().GetApplicationName(gomock.Any()).Return("ACME Inc.", nil) + + ctx := context.Background() + appName, err := n.fetchAppName(ctx) + require.NoError(t, err) + require.Equal(t, "ACME Inc.", appName) + }) + + t.Run("No rows", func(t *testing.T) { + t.Parallel() + ctrl := gomock.NewController(t) + dbmock := dbmock.NewMockStore(ctrl) + + n := ¬ifier{ + store: dbmock, + } + + dbmock.EXPECT().GetApplicationName(gomock.Any()).Return("", sql.ErrNoRows) + + ctx := context.Background() + appName, err := n.fetchAppName(ctx) + require.NoError(t, err) + require.Equal(t, notificationsDefaultAppName, appName) + }) + + t.Run("Empty string", func(t *testing.T) { + t.Parallel() + + ctrl := gomock.NewController(t) + dbmock := dbmock.NewMockStore(ctrl) + + n := ¬ifier{ + store: dbmock, + } + + dbmock.EXPECT().GetApplicationName(gomock.Any()).Return("", nil) + + ctx := context.Background() + appName, err := n.fetchAppName(ctx) + require.NoError(t, err) + require.Equal(t, notificationsDefaultAppName, appName) + }) + + t.Run("internal error", func(t *testing.T) { + t.Parallel() + + ctrl := gomock.NewController(t) + dbmock := dbmock.NewMockStore(ctrl) + + n := ¬ifier{ + store: dbmock, + } + + dbmock.EXPECT().GetApplicationName(gomock.Any()).Return("", xerrors.New("internal error")) + + ctx := context.Background() + _, err := n.fetchAppName(ctx) + require.Error(t, err) + }) +} + +func TestNotifier_FetchLogoURL(t *testing.T) { + t.Parallel() + + t.Run("ok", func(t *testing.T) { + t.Parallel() + + ctrl := gomock.NewController(t) + dbmock := dbmock.NewMockStore(ctrl) + + n := ¬ifier{ + store: dbmock, + } + + dbmock.EXPECT().GetLogoURL(gomock.Any()).Return("https://example.com/logo.png", nil) + + ctx := context.Background() + logoURL, err := n.fetchLogoURL(ctx) + require.NoError(t, err) + require.Equal(t, "https://example.com/logo.png", logoURL) + }) + + t.Run("No rows", func(t *testing.T) { + t.Parallel() + ctrl := gomock.NewController(t) + dbmock := dbmock.NewMockStore(ctrl) + + n := ¬ifier{ + store: dbmock, + } + + dbmock.EXPECT().GetLogoURL(gomock.Any()).Return("", sql.ErrNoRows) + + ctx := context.Background() + logoURL, err := n.fetchLogoURL(ctx) + require.NoError(t, err) + require.Equal(t, notificationsDefaultLogoURL, logoURL) + }) + + t.Run("Empty string", func(t *testing.T) { + t.Parallel() + + ctrl := gomock.NewController(t) + dbmock := dbmock.NewMockStore(ctrl) + + n := ¬ifier{ + store: dbmock, + } + + dbmock.EXPECT().GetLogoURL(gomock.Any()).Return("", nil) + + ctx := context.Background() + logoURL, err := n.fetchLogoURL(ctx) + require.NoError(t, err) + require.Equal(t, notificationsDefaultLogoURL, logoURL) + }) + + t.Run("internal error", func(t *testing.T) { + t.Parallel() + + ctrl := gomock.NewController(t) + dbmock := dbmock.NewMockStore(ctrl) + + n := ¬ifier{ + store: dbmock, + } + + dbmock.EXPECT().GetLogoURL(gomock.Any()).Return("", xerrors.New("internal error")) + + ctx := context.Background() + _, err := n.fetchLogoURL(ctx) + require.Error(t, err) + }) +} diff --git a/coderd/promoauth/oauth2_test.go b/coderd/promoauth/oauth2_test.go index e54608385ccfe..9e31d90944f36 100644 --- a/coderd/promoauth/oauth2_test.go +++ b/coderd/promoauth/oauth2_test.go @@ -3,24 +3,19 @@ package promoauth_test import ( "context" "fmt" - "io" "net/http" - "net/http/httptest" "net/url" "strings" "testing" "time" "github.com/prometheus/client_golang/prometheus" - "github.com/prometheus/client_golang/prometheus/promhttp" - ptestutil "github.com/prometheus/client_golang/prometheus/testutil" - io_prometheus_client "github.com/prometheus/client_model/go" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "golang.org/x/exp/maps" "golang.org/x/oauth2" "github.com/coder/coder/v2/coderd/coderdtest/oidctest" + "github.com/coder/coder/v2/coderd/coderdtest/promhelp" "github.com/coder/coder/v2/coderd/externalauth" "github.com/coder/coder/v2/coderd/promoauth" "github.com/coder/coder/v2/testutil" @@ -34,7 +29,7 @@ func TestInstrument(t *testing.T) { reg := prometheus.NewRegistry() t.Cleanup(func() { if t.Failed() { - t.Log(registryDump(reg)) + t.Log(promhelp.RegistryDump(reg)) } }) @@ -46,7 +41,7 @@ func TestInstrument(t *testing.T) { const metricname = "coderd_oauth2_external_requests_total" count := func(source string) int { labels["source"] = source - return counterValue(t, reg, "coderd_oauth2_external_requests_total", labels) + return promhelp.CounterValue(t, reg, "coderd_oauth2_external_requests_total", labels) } factory := promoauth.NewFactory(reg) @@ -58,7 +53,7 @@ func TestInstrument(t *testing.T) { } // 0 Requests before we start - require.Nil(t, metricValue(t, reg, metricname, labels), "no metrics at start") + require.Nil(t, promhelp.MetricValue(t, reg, metricname, labels), "no metrics at start") noClientCtx := ctx // This should never be done, but promoauth should not break the default client @@ -94,7 +89,7 @@ func TestInstrument(t *testing.T) { // Verify the default client was not broken. This check is added because we // extend the http.DefaultTransport. If a `.Clone()` is not done, this can be // mis-used. It is cheap to run this quick check. - snapshot := registryDump(reg) + snapshot := promhelp.RegistryDump(reg) req, err := http.NewRequestWithContext(ctx, http.MethodGet, must[*url.URL](t)(idp.IssuerURL().Parse("/.well-known/openid-configuration")).String(), nil) require.NoError(t, err) @@ -103,7 +98,7 @@ func TestInstrument(t *testing.T) { require.NoError(t, err) _ = resp.Body.Close() - require.NoError(t, compare(reg, snapshot), "http default client corrupted") + require.NoError(t, promhelp.Compare(reg, snapshot), "http default client corrupted") } func TestGithubRateLimits(t *testing.T) { @@ -214,37 +209,26 @@ func TestGithubRateLimits(t *testing.T) { } pass := true if !c.ExpectNoMetrics { - pass = pass && assert.Equal(t, gaugeValue(t, reg, "coderd_oauth2_external_requests_rate_limit_total", labels), c.Limit, "limit") - pass = pass && assert.Equal(t, gaugeValue(t, reg, "coderd_oauth2_external_requests_rate_limit_remaining", labels), c.Remaining, "remaining") - pass = pass && assert.Equal(t, gaugeValue(t, reg, "coderd_oauth2_external_requests_rate_limit_used", labels), c.Used, "used") + pass = pass && assert.Equal(t, promhelp.GaugeValue(t, reg, "coderd_oauth2_external_requests_rate_limit_total", labels), c.Limit, "limit") + pass = pass && assert.Equal(t, promhelp.GaugeValue(t, reg, "coderd_oauth2_external_requests_rate_limit_remaining", labels), c.Remaining, "remaining") + pass = pass && assert.Equal(t, promhelp.GaugeValue(t, reg, "coderd_oauth2_external_requests_rate_limit_used", labels), c.Used, "used") if !c.at.IsZero() { until := c.Reset.Sub(c.at) // Float accuracy is not great, so we allow a delta of 2 - pass = pass && assert.InDelta(t, gaugeValue(t, reg, "coderd_oauth2_external_requests_rate_limit_reset_in_seconds", labels), int(until.Seconds()), 2, "reset in") + pass = pass && assert.InDelta(t, promhelp.GaugeValue(t, reg, "coderd_oauth2_external_requests_rate_limit_reset_in_seconds", labels), int(until.Seconds()), 2, "reset in") } } else { - pass = pass && assert.Nil(t, metricValue(t, reg, "coderd_oauth2_external_requests_rate_limit_total", labels), "not exists") + pass = pass && assert.Nil(t, promhelp.MetricValue(t, reg, "coderd_oauth2_external_requests_rate_limit_total", labels), "not exists") } // Helpful debugging if !pass { - t.Log(registryDump(reg)) + t.Log(promhelp.RegistryDump(reg)) } }) } } -func registryDump(reg *prometheus.Registry) string { - h := promhttp.HandlerFor(reg, promhttp.HandlerOpts{}) - rec := httptest.NewRecorder() - req, _ := http.NewRequestWithContext(context.Background(), http.MethodGet, "/", nil) - h.ServeHTTP(rec, req) - resp := rec.Result() - data, _ := io.ReadAll(resp.Body) - _ = resp.Body.Close() - return string(data) -} - func must[V any](t *testing.T) func(v V, err error) V { return func(v V, err error) V { t.Helper() @@ -252,39 +236,3 @@ func must[V any](t *testing.T) func(v V, err error) V { return v } } - -func gaugeValue(t testing.TB, reg prometheus.Gatherer, metricName string, labels prometheus.Labels) int { - labeled := metricValue(t, reg, metricName, labels) - require.NotNilf(t, labeled, "metric %q with labels %v not found", metricName, labels) - return int(labeled.GetGauge().GetValue()) -} - -func counterValue(t testing.TB, reg prometheus.Gatherer, metricName string, labels prometheus.Labels) int { - labeled := metricValue(t, reg, metricName, labels) - require.NotNilf(t, labeled, "metric %q with labels %v not found", metricName, labels) - return int(labeled.GetCounter().GetValue()) -} - -func compare(reg prometheus.Gatherer, compare string) error { - return ptestutil.GatherAndCompare(reg, strings.NewReader(compare)) -} - -func metricValue(t testing.TB, reg prometheus.Gatherer, metricName string, labels prometheus.Labels) *io_prometheus_client.Metric { - metrics, err := reg.Gather() - require.NoError(t, err) - - for _, m := range metrics { - if m.GetName() == metricName { - for _, labeled := range m.GetMetric() { - mLables := make(prometheus.Labels) - for _, v := range labeled.GetLabel() { - mLables[v.GetName()] = v.GetValue() - } - if maps.Equal(mLables, labels) { - return labeled - } - } - } - } - return nil -} diff --git a/coderd/templates.go b/coderd/templates.go index cbc6eb784d2e4..de47b5225a973 100644 --- a/coderd/templates.go +++ b/coderd/templates.go @@ -467,7 +467,7 @@ func (api *API) postTemplateByOrganization(rw http.ResponseWriter, r *http.Reque templateVersionAudit.New = newTemplateVersion return nil - }, nil) + }, database.DefaultTXOptions().WithID("postTemplate")) if err != nil { httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ Message: "Internal error inserting template.", diff --git a/coderd/userauth.go b/coderd/userauth.go index 85ab0d77e6cc1..13f9b088d731f 100644 --- a/coderd/userauth.go +++ b/coderd/userauth.go @@ -15,7 +15,8 @@ import ( "time" "github.com/coreos/go-oidc/v3/oidc" - "github.com/golang-jwt/jwt/v4" + "github.com/go-jose/go-jose/v4" + "github.com/go-jose/go-jose/v4/jwt" "github.com/google/go-github/v43/github" "github.com/google/uuid" "github.com/moby/moby/pkg/namesgenerator" @@ -23,6 +24,9 @@ import ( "golang.org/x/xerrors" "cdr.dev/slog" + "github.com/coder/coder/v2/coderd/cryptokeys" + "github.com/coder/coder/v2/coderd/idpsync" + "github.com/coder/coder/v2/coderd/jwtutils" "github.com/coder/coder/v2/coderd/apikey" "github.com/coder/coder/v2/coderd/audit" @@ -32,7 +36,6 @@ import ( "github.com/coder/coder/v2/coderd/externalauth" "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/coderd/httpmw" - "github.com/coder/coder/v2/coderd/idpsync" "github.com/coder/coder/v2/coderd/notifications" "github.com/coder/coder/v2/coderd/promoauth" "github.com/coder/coder/v2/coderd/rbac" @@ -49,7 +52,7 @@ const ( ) type OAuthConvertStateClaims struct { - jwt.RegisteredClaims + jwtutils.RegisteredClaims UserID uuid.UUID `json:"user_id"` State string `json:"state"` @@ -57,6 +60,10 @@ type OAuthConvertStateClaims struct { ToLoginType codersdk.LoginType `json:"to_login_type"` } +func (o *OAuthConvertStateClaims) Validate(e jwt.Expected) error { + return o.RegisteredClaims.Validate(e) +} + // postConvertLoginType replies with an oauth state token capable of converting // the user to an oauth user. // @@ -149,11 +156,11 @@ func (api *API) postConvertLoginType(rw http.ResponseWriter, r *http.Request) { // Eg: Developers with more than 1 deployment. now := time.Now() claims := &OAuthConvertStateClaims{ - RegisteredClaims: jwt.RegisteredClaims{ + RegisteredClaims: jwtutils.RegisteredClaims{ Issuer: api.DeploymentID, Subject: stateString, Audience: []string{user.ID.String()}, - ExpiresAt: jwt.NewNumericDate(now.Add(time.Minute * 5)), + Expiry: jwt.NewNumericDate(now.Add(time.Minute * 5)), NotBefore: jwt.NewNumericDate(now.Add(time.Second * -1)), IssuedAt: jwt.NewNumericDate(now), ID: uuid.NewString(), @@ -164,9 +171,7 @@ func (api *API) postConvertLoginType(rw http.ResponseWriter, r *http.Request) { ToLoginType: req.ToType, } - token := jwt.NewWithClaims(jwt.SigningMethodHS512, claims) - // Key must be a byte slice, not an array. So make sure to include the [:] - tokenString, err := token.SignedString(api.OAuthSigningKey[:]) + token, err := jwtutils.Sign(ctx, api.OIDCConvertKeyCache, claims) if err != nil { httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ Message: "Internal error signing state jwt.", @@ -176,8 +181,8 @@ func (api *API) postConvertLoginType(rw http.ResponseWriter, r *http.Request) { } aReq.New = database.AuditOAuthConvertState{ - CreatedAt: claims.IssuedAt.Time, - ExpiresAt: claims.ExpiresAt.Time, + CreatedAt: claims.IssuedAt.Time(), + ExpiresAt: claims.Expiry.Time(), FromLoginType: database.LoginType(claims.FromLoginType), ToLoginType: database.LoginType(claims.ToLoginType), UserID: claims.UserID, @@ -186,8 +191,8 @@ func (api *API) postConvertLoginType(rw http.ResponseWriter, r *http.Request) { http.SetCookie(rw, &http.Cookie{ Name: OAuthConvertCookieValue, Path: "/", - Value: tokenString, - Expires: claims.ExpiresAt.Time, + Value: token, + Expires: claims.Expiry.Time(), Secure: api.SecureAuthCookie, HttpOnly: true, // Must be SameSite to work on the redirected auth flow from the @@ -196,7 +201,7 @@ func (api *API) postConvertLoginType(rw http.ResponseWriter, r *http.Request) { }) httpapi.Write(ctx, rw, http.StatusCreated, codersdk.OAuthConversionResponse{ StateString: stateString, - ExpiresAt: claims.ExpiresAt.Time, + ExpiresAt: claims.Expiry.Time(), ToType: claims.ToLoginType, UserID: claims.UserID, }) @@ -286,6 +291,8 @@ func (api *API) postRequestOneTimePasscode(rw http.ResponseWriter, r *http.Reque if err != nil { logger.Error(ctx, "unable to notify user about one-time passcode request", slog.Error(err)) } + } else { + logger.Warn(ctx, "password reset requested for account that does not exist", slog.F("email", req.Email)) } } @@ -376,6 +383,7 @@ func (api *API) postChangePasswordWithOneTimePasscode(rw http.ResponseWriter, r now := dbtime.Now() if !equal || now.After(user.OneTimePasscodeExpiresAt.Time) { + logger.Warn(ctx, "password reset attempted with invalid or expired one-time passcode", slog.F("email", req.Email)) httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ Message: "Incorrect email or one-time passcode.", }) @@ -1677,10 +1685,9 @@ func (api *API) convertUserToOauth(ctx context.Context, r *http.Request, db data } } var claims OAuthConvertStateClaims - token, err := jwt.ParseWithClaims(jwtCookie.Value, &claims, func(_ *jwt.Token) (interface{}, error) { - return api.OAuthSigningKey[:], nil - }) - if xerrors.Is(err, jwt.ErrSignatureInvalid) || !token.Valid { + + err = jwtutils.Verify(ctx, api.OIDCConvertKeyCache, jwtCookie.Value, &claims) + if xerrors.Is(err, cryptokeys.ErrKeyNotFound) || xerrors.Is(err, cryptokeys.ErrKeyInvalid) || xerrors.Is(err, jose.ErrCryptoFailure) || xerrors.Is(err, jwtutils.ErrMissingKeyID) { // These errors are probably because the user is mixing 2 coder deployments. return database.User{}, idpsync.HTTPError{ Code: http.StatusBadRequest, @@ -1709,7 +1716,7 @@ func (api *API) convertUserToOauth(ctx context.Context, r *http.Request, db data oauthConvertAudit.UserID = claims.UserID oauthConvertAudit.Old = user - if claims.RegisteredClaims.Issuer != api.DeploymentID { + if claims.Issuer != api.DeploymentID { return database.User{}, idpsync.HTTPError{ Code: http.StatusForbidden, Msg: "Request to convert login type failed. Issuer mismatch. Found a cookie from another coder deployment, please try again.", diff --git a/coderd/userauth_test.go b/coderd/userauth_test.go index 20dfe7f723899..6386be7eb8be4 100644 --- a/coderd/userauth_test.go +++ b/coderd/userauth_test.go @@ -3,6 +3,8 @@ package coderd_test import ( "context" "crypto" + "crypto/rand" + "encoding/json" "fmt" "io" "net/http" @@ -13,6 +15,7 @@ import ( "time" "github.com/coreos/go-oidc/v3/oidc" + "github.com/go-jose/go-jose/v4" "github.com/golang-jwt/jwt/v4" "github.com/google/go-github/v43/github" "github.com/google/uuid" @@ -27,10 +30,12 @@ import ( "github.com/coder/coder/v2/coderd/audit" "github.com/coder/coder/v2/coderd/coderdtest" "github.com/coder/coder/v2/coderd/coderdtest/oidctest" + "github.com/coder/coder/v2/coderd/cryptokeys" "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/dbauthz" "github.com/coder/coder/v2/coderd/database/dbgen" "github.com/coder/coder/v2/coderd/database/dbtestutil" + "github.com/coder/coder/v2/coderd/jwtutils" "github.com/coder/coder/v2/coderd/notifications" "github.com/coder/coder/v2/coderd/promoauth" "github.com/coder/coder/v2/codersdk" @@ -1316,6 +1321,7 @@ func TestUserOIDC(t *testing.T) { owner := coderdtest.CreateFirstUser(t, client) user, userData := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) + require.Equal(t, codersdk.LoginTypePassword, userData.LoginType) claims := jwt.MapClaims{ "email": userData.Email, @@ -1323,15 +1329,17 @@ func TestUserOIDC(t *testing.T) { var err error user.HTTPClient.Jar, err = cookiejar.New(nil) require.NoError(t, err) + user.HTTPClient.Transport = http.DefaultTransport.(*http.Transport).Clone() ctx := testutil.Context(t, testutil.WaitShort) + convertResponse, err := user.ConvertLoginType(ctx, codersdk.ConvertLoginRequest{ ToType: codersdk.LoginTypeOIDC, Password: "SomeSecurePassword!", }) require.NoError(t, err) - fake.LoginWithClient(t, user, claims, func(r *http.Request) { + _, _ = fake.LoginWithClient(t, user, claims, func(r *http.Request) { r.URL.RawQuery = url.Values{ "oidc_merge_state": {convertResponse.StateString}, }.Encode() @@ -1341,6 +1349,99 @@ func TestUserOIDC(t *testing.T) { r.AddCookie(cookie) } }) + + info, err := client.User(ctx, userData.ID.String()) + require.NoError(t, err) + require.Equal(t, codersdk.LoginTypeOIDC, info.LoginType) + }) + + t.Run("BadJWT", func(t *testing.T) { + t.Parallel() + + var ( + ctx = testutil.Context(t, testutil.WaitMedium) + logger = slogtest.Make(t, nil) + ) + + auditor := audit.NewMock() + fake := oidctest.NewFakeIDP(t, + oidctest.WithRefresh(func(_ string) error { + return xerrors.New("refreshing token should never occur") + }), + oidctest.WithServing(), + ) + cfg := fake.OIDCConfig(t, nil, func(cfg *coderd.OIDCConfig) { + cfg.AllowSignups = true + }) + + db, ps := dbtestutil.NewDB(t) + fetcher := &cryptokeys.DBFetcher{ + DB: db, + } + + kc, err := cryptokeys.NewSigningCache(ctx, logger, fetcher, codersdk.CryptoKeyFeatureOIDCConvert) + require.NoError(t, err) + + client := coderdtest.New(t, &coderdtest.Options{ + Auditor: auditor, + OIDCConfig: cfg, + Database: db, + Pubsub: ps, + OIDCConvertKeyCache: kc, + }) + + owner := coderdtest.CreateFirstUser(t, client) + user, userData := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) + + claims := jwt.MapClaims{ + "email": userData.Email, + } + user.HTTPClient.Jar, err = cookiejar.New(nil) + require.NoError(t, err) + user.HTTPClient.Transport = http.DefaultTransport.(*http.Transport).Clone() + + convertResponse, err := user.ConvertLoginType(ctx, codersdk.ConvertLoginRequest{ + ToType: codersdk.LoginTypeOIDC, + Password: "SomeSecurePassword!", + }) + require.NoError(t, err) + + // Update the cookie to use a bad signing key. We're asserting the behavior of the scenario + // where a JWT gets minted on an old version of Coder but gets verified on a new version. + _, resp := fake.AttemptLogin(t, user, claims, func(r *http.Request) { + r.URL.RawQuery = url.Values{ + "oidc_merge_state": {convertResponse.StateString}, + }.Encode() + r.Header.Set(codersdk.SessionTokenHeader, user.SessionToken()) + + cookies := user.HTTPClient.Jar.Cookies(user.URL) + for i, cookie := range cookies { + if cookie.Name != coderd.OAuthConvertCookieValue { + continue + } + + jwt := cookie.Value + var claims coderd.OAuthConvertStateClaims + err := jwtutils.Verify(ctx, kc, jwt, &claims) + require.NoError(t, err) + badJWT := generateBadJWT(t, claims) + cookie.Value = badJWT + cookies[i] = cookie + } + + user.HTTPClient.Jar.SetCookies(user.URL, cookies) + + for _, cookie := range cookies { + fmt.Printf("cookie: %+v\n", cookie) + r.AddCookie(cookie) + } + }) + defer resp.Body.Close() + require.Equal(t, http.StatusBadRequest, resp.StatusCode) + var respErr codersdk.Response + err = json.NewDecoder(resp.Body).Decode(&respErr) + require.NoError(t, err) + require.Contains(t, respErr.Message, "Using an invalid jwt to authorize this action.") }) t.Run("AlternateUsername", func(t *testing.T) { @@ -2022,3 +2123,24 @@ func inflateClaims(t testing.TB, seed jwt.MapClaims, size int) jwt.MapClaims { seed["random_data"] = junk return seed } + +// generateBadJWT generates a JWT with a random key. It's intended to emulate the old-style JWT's we generated. +func generateBadJWT(t *testing.T, claims interface{}) string { + t.Helper() + + var buf [64]byte + _, err := rand.Read(buf[:]) + require.NoError(t, err) + signer, err := jose.NewSigner(jose.SigningKey{ + Algorithm: jose.HS512, + Key: buf[:], + }, nil) + require.NoError(t, err) + payload, err := json.Marshal(claims) + require.NoError(t, err) + signed, err := signer.Sign(payload) + require.NoError(t, err) + compact, err := signed.CompactSerialize() + require.NoError(t, err) + return compact +} diff --git a/coderd/workspaceagents.go b/coderd/workspaceagents.go index 6ea631f2e7d0c..a181697f27279 100644 --- a/coderd/workspaceagents.go +++ b/coderd/workspaceagents.go @@ -32,6 +32,7 @@ import ( "github.com/coder/coder/v2/coderd/externalauth" "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/coderd/httpmw" + "github.com/coder/coder/v2/coderd/jwtutils" "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/codersdk/agentsdk" @@ -852,8 +853,12 @@ func (api *API) workspaceAgentClientCoordinate(rw http.ResponseWriter, r *http.R ) if resumeToken != "" { var err error - peerID, err = api.Options.CoordinatorResumeTokenProvider.VerifyResumeToken(resumeToken) - if err != nil { + peerID, err = api.Options.CoordinatorResumeTokenProvider.VerifyResumeToken(ctx, resumeToken) + // If the token is missing the key ID, it's probably an old token in which + // case we just want to generate a new peer ID. + if xerrors.Is(err, jwtutils.ErrMissingKeyID) { + peerID = uuid.New() + } else if err != nil { httpapi.Write(ctx, rw, http.StatusUnauthorized, codersdk.Response{ Message: workspacesdk.CoordinateAPIInvalidResumeToken, Detail: err.Error(), @@ -862,9 +867,10 @@ func (api *API) workspaceAgentClientCoordinate(rw http.ResponseWriter, r *http.R }, }) return + } else { + api.Logger.Debug(ctx, "accepted coordinate resume token for peer", + slog.F("peer_id", peerID.String())) } - api.Logger.Debug(ctx, "accepted coordinate resume token for peer", - slog.F("peer_id", peerID.String())) } api.WebsocketWaitMutex.Lock() diff --git a/coderd/workspaceagents_test.go b/coderd/workspaceagents_test.go index 8c0801a914d61..ba677975471d6 100644 --- a/coderd/workspaceagents_test.go +++ b/coderd/workspaceagents_test.go @@ -13,6 +13,7 @@ import ( "testing" "time" + "github.com/go-jose/go-jose/v4/jwt" "github.com/google/uuid" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -36,6 +37,7 @@ import ( "github.com/coder/coder/v2/coderd/database/dbtime" "github.com/coder/coder/v2/coderd/database/pubsub" "github.com/coder/coder/v2/coderd/externalauth" + "github.com/coder/coder/v2/coderd/jwtutils" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/codersdk/agentsdk" "github.com/coder/coder/v2/codersdk/workspacesdk" @@ -531,20 +533,20 @@ func newResumeTokenRecordingProvider(t testing.TB, underlying tailnet.ResumeToke } } -func (r *resumeTokenRecordingProvider) GenerateResumeToken(peerID uuid.UUID) (*tailnetproto.RefreshResumeTokenResponse, error) { +func (r *resumeTokenRecordingProvider) GenerateResumeToken(ctx context.Context, peerID uuid.UUID) (*tailnetproto.RefreshResumeTokenResponse, error) { select { case r.generateCalls <- peerID: - return r.ResumeTokenProvider.GenerateResumeToken(peerID) + return r.ResumeTokenProvider.GenerateResumeToken(ctx, peerID) default: r.t.Error("generateCalls full") return nil, xerrors.New("generateCalls full") } } -func (r *resumeTokenRecordingProvider) VerifyResumeToken(token string) (uuid.UUID, error) { +func (r *resumeTokenRecordingProvider) VerifyResumeToken(ctx context.Context, token string) (uuid.UUID, error) { select { case r.verifyCalls <- token: - return r.ResumeTokenProvider.VerifyResumeToken(token) + return r.ResumeTokenProvider.VerifyResumeToken(ctx, token) default: r.t.Error("verifyCalls full") return uuid.Nil, xerrors.New("verifyCalls full") @@ -554,69 +556,136 @@ func (r *resumeTokenRecordingProvider) VerifyResumeToken(token string) (uuid.UUI func TestWorkspaceAgentClientCoordinate_ResumeToken(t *testing.T) { t.Parallel() - logger := slogtest.Make(t, nil).Leveled(slog.LevelDebug) - clock := quartz.NewMock(t) - resumeTokenSigningKey, err := tailnet.GenerateResumeTokenSigningKey() - require.NoError(t, err) - resumeTokenProvider := newResumeTokenRecordingProvider( - t, - tailnet.NewResumeTokenKeyProvider(resumeTokenSigningKey, clock, time.Hour), - ) - client, closer, api := coderdtest.NewWithAPI(t, &coderdtest.Options{ - Coordinator: tailnet.NewCoordinator(logger), - CoordinatorResumeTokenProvider: resumeTokenProvider, + t.Run("OK", func(t *testing.T) { + t.Parallel() + + logger := slogtest.Make(t, nil).Leveled(slog.LevelDebug) + clock := quartz.NewMock(t) + resumeTokenSigningKey, err := tailnet.GenerateResumeTokenSigningKey() + mgr := jwtutils.StaticKey{ + ID: uuid.New().String(), + Key: resumeTokenSigningKey[:], + } + require.NoError(t, err) + resumeTokenProvider := newResumeTokenRecordingProvider( + t, + tailnet.NewResumeTokenKeyProvider(mgr, clock, time.Hour), + ) + client, closer, api := coderdtest.NewWithAPI(t, &coderdtest.Options{ + Coordinator: tailnet.NewCoordinator(logger), + CoordinatorResumeTokenProvider: resumeTokenProvider, + }) + defer closer.Close() + user := coderdtest.CreateFirstUser(t, client) + + // Create a workspace with an agent. No need to connect it since clients can + // still connect to the coordinator while the agent isn't connected. + r := dbfake.WorkspaceBuild(t, api.Database, database.WorkspaceTable{ + OrganizationID: user.OrganizationID, + OwnerID: user.UserID, + }).WithAgent().Do() + agentTokenUUID, err := uuid.Parse(r.AgentToken) + require.NoError(t, err) + ctx := testutil.Context(t, testutil.WaitLong) + agentAndBuild, err := api.Database.GetWorkspaceAgentAndLatestBuildByAuthToken(dbauthz.AsSystemRestricted(ctx), agentTokenUUID) //nolint + require.NoError(t, err) + + // Connect with no resume token, and ensure that the peer ID is set to a + // random value. + originalResumeToken, err := connectToCoordinatorAndFetchResumeToken(ctx, logger, client, agentAndBuild.WorkspaceAgent.ID, "") + require.NoError(t, err) + originalPeerID := testutil.RequireRecvCtx(ctx, t, resumeTokenProvider.generateCalls) + require.NotEqual(t, originalPeerID, uuid.Nil) + + // Connect with a valid resume token, and ensure that the peer ID is set to + // the stored value. + clock.Advance(time.Second) + newResumeToken, err := connectToCoordinatorAndFetchResumeToken(ctx, logger, client, agentAndBuild.WorkspaceAgent.ID, originalResumeToken) + require.NoError(t, err) + verifiedToken := testutil.RequireRecvCtx(ctx, t, resumeTokenProvider.verifyCalls) + require.Equal(t, originalResumeToken, verifiedToken) + newPeerID := testutil.RequireRecvCtx(ctx, t, resumeTokenProvider.generateCalls) + require.Equal(t, originalPeerID, newPeerID) + require.NotEqual(t, originalResumeToken, newResumeToken) + + // Connect with an invalid resume token, and ensure that the request is + // rejected. + clock.Advance(time.Second) + _, err = connectToCoordinatorAndFetchResumeToken(ctx, logger, client, agentAndBuild.WorkspaceAgent.ID, "invalid") + require.Error(t, err) + var sdkErr *codersdk.Error + require.ErrorAs(t, err, &sdkErr) + require.Equal(t, http.StatusUnauthorized, sdkErr.StatusCode()) + require.Len(t, sdkErr.Validations, 1) + require.Equal(t, "resume_token", sdkErr.Validations[0].Field) + verifiedToken = testutil.RequireRecvCtx(ctx, t, resumeTokenProvider.verifyCalls) + require.Equal(t, "invalid", verifiedToken) + + select { + case <-resumeTokenProvider.generateCalls: + t.Fatal("unexpected peer ID in channel") + default: + } }) - defer closer.Close() - user := coderdtest.CreateFirstUser(t, client) - // Create a workspace with an agent. No need to connect it since clients can - // still connect to the coordinator while the agent isn't connected. - r := dbfake.WorkspaceBuild(t, api.Database, database.WorkspaceTable{ - OrganizationID: user.OrganizationID, - OwnerID: user.UserID, - }).WithAgent().Do() - agentTokenUUID, err := uuid.Parse(r.AgentToken) - require.NoError(t, err) - ctx := testutil.Context(t, testutil.WaitLong) - agentAndBuild, err := api.Database.GetWorkspaceAgentAndLatestBuildByAuthToken(dbauthz.AsSystemRestricted(ctx), agentTokenUUID) //nolint - require.NoError(t, err) + t.Run("BadJWT", func(t *testing.T) { + t.Parallel() - // Connect with no resume token, and ensure that the peer ID is set to a - // random value. - originalResumeToken, err := connectToCoordinatorAndFetchResumeToken(ctx, logger, client, agentAndBuild.WorkspaceAgent.ID, "") - require.NoError(t, err) - originalPeerID := testutil.RequireRecvCtx(ctx, t, resumeTokenProvider.generateCalls) - require.NotEqual(t, originalPeerID, uuid.Nil) + logger := slogtest.Make(t, nil).Leveled(slog.LevelDebug) + clock := quartz.NewMock(t) + resumeTokenSigningKey, err := tailnet.GenerateResumeTokenSigningKey() + mgr := jwtutils.StaticKey{ + ID: uuid.New().String(), + Key: resumeTokenSigningKey[:], + } + require.NoError(t, err) + resumeTokenProvider := newResumeTokenRecordingProvider( + t, + tailnet.NewResumeTokenKeyProvider(mgr, clock, time.Hour), + ) + client, closer, api := coderdtest.NewWithAPI(t, &coderdtest.Options{ + Coordinator: tailnet.NewCoordinator(logger), + CoordinatorResumeTokenProvider: resumeTokenProvider, + }) + defer closer.Close() + user := coderdtest.CreateFirstUser(t, client) - // Connect with a valid resume token, and ensure that the peer ID is set to - // the stored value. - clock.Advance(time.Second) - newResumeToken, err := connectToCoordinatorAndFetchResumeToken(ctx, logger, client, agentAndBuild.WorkspaceAgent.ID, originalResumeToken) - require.NoError(t, err) - verifiedToken := testutil.RequireRecvCtx(ctx, t, resumeTokenProvider.verifyCalls) - require.Equal(t, originalResumeToken, verifiedToken) - newPeerID := testutil.RequireRecvCtx(ctx, t, resumeTokenProvider.generateCalls) - require.Equal(t, originalPeerID, newPeerID) - require.NotEqual(t, originalResumeToken, newResumeToken) - - // Connect with an invalid resume token, and ensure that the request is - // rejected. - clock.Advance(time.Second) - _, err = connectToCoordinatorAndFetchResumeToken(ctx, logger, client, agentAndBuild.WorkspaceAgent.ID, "invalid") - require.Error(t, err) - var sdkErr *codersdk.Error - require.ErrorAs(t, err, &sdkErr) - require.Equal(t, http.StatusUnauthorized, sdkErr.StatusCode()) - require.Len(t, sdkErr.Validations, 1) - require.Equal(t, "resume_token", sdkErr.Validations[0].Field) - verifiedToken = testutil.RequireRecvCtx(ctx, t, resumeTokenProvider.verifyCalls) - require.Equal(t, "invalid", verifiedToken) + // Create a workspace with an agent. No need to connect it since clients can + // still connect to the coordinator while the agent isn't connected. + r := dbfake.WorkspaceBuild(t, api.Database, database.WorkspaceTable{ + OrganizationID: user.OrganizationID, + OwnerID: user.UserID, + }).WithAgent().Do() + agentTokenUUID, err := uuid.Parse(r.AgentToken) + require.NoError(t, err) + ctx := testutil.Context(t, testutil.WaitLong) + agentAndBuild, err := api.Database.GetWorkspaceAgentAndLatestBuildByAuthToken(dbauthz.AsSystemRestricted(ctx), agentTokenUUID) //nolint + require.NoError(t, err) - select { - case <-resumeTokenProvider.generateCalls: - t.Fatal("unexpected peer ID in channel") - default: - } + // Connect with no resume token, and ensure that the peer ID is set to a + // random value. + originalResumeToken, err := connectToCoordinatorAndFetchResumeToken(ctx, logger, client, agentAndBuild.WorkspaceAgent.ID, "") + require.NoError(t, err) + originalPeerID := testutil.RequireRecvCtx(ctx, t, resumeTokenProvider.generateCalls) + require.NotEqual(t, originalPeerID, uuid.Nil) + + // Connect with an outdated token, and ensure that the peer ID is set to a + // random value. We don't want to fail requests just because + // a user got unlucky during a deployment upgrade. + outdatedToken := generateBadJWT(t, jwtutils.RegisteredClaims{ + Subject: originalPeerID.String(), + Expiry: jwt.NewNumericDate(clock.Now().Add(time.Minute)), + }) + + clock.Advance(time.Second) + newResumeToken, err := connectToCoordinatorAndFetchResumeToken(ctx, logger, client, agentAndBuild.WorkspaceAgent.ID, outdatedToken) + require.NoError(t, err) + verifiedToken := testutil.RequireRecvCtx(ctx, t, resumeTokenProvider.verifyCalls) + require.Equal(t, outdatedToken, verifiedToken) + newPeerID := testutil.RequireRecvCtx(ctx, t, resumeTokenProvider.generateCalls) + require.NotEqual(t, originalPeerID, newPeerID) + require.NotEqual(t, originalResumeToken, newResumeToken) + }) } // connectToCoordinatorAndFetchResumeToken connects to the tailnet coordinator diff --git a/coderd/workspaceagentsrpc_test.go b/coderd/workspaceagentsrpc_test.go index 817aa11c4c292..3f1f1a2b8a764 100644 --- a/coderd/workspaceagentsrpc_test.go +++ b/coderd/workspaceagentsrpc_test.go @@ -3,6 +3,7 @@ package coderd_test import ( "context" "testing" + "time" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -11,6 +12,7 @@ import ( "github.com/coder/coder/v2/coderd/coderdtest" "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/dbfake" + "github.com/coder/coder/v2/coderd/database/dbtime" "github.com/coder/coder/v2/codersdk/agentsdk" "github.com/coder/coder/v2/provisionersdk/proto" "github.com/coder/coder/v2/testutil" @@ -20,7 +22,12 @@ import ( func TestWorkspaceAgentReportStats(t *testing.T) { t.Parallel() - client, db := coderdtest.NewWithDatabase(t, nil) + tickCh := make(chan time.Time) + flushCh := make(chan int, 1) + client, db := coderdtest.NewWithDatabase(t, &coderdtest.Options{ + WorkspaceUsageTrackerFlush: flushCh, + WorkspaceUsageTrackerTick: tickCh, + }) user := coderdtest.CreateFirstUser(t, client) r := dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{ OrganizationID: user.OrganizationID, @@ -53,6 +60,10 @@ func TestWorkspaceAgentReportStats(t *testing.T) { }) require.NoError(t, err) + tickCh <- dbtime.Now() + count := <-flushCh + require.Equal(t, 1, count, "expected one flush with one id") + newWorkspace, err := client.Workspace(context.Background(), r.Workspace.ID) require.NoError(t, err) diff --git a/coderd/workspaceapps.go b/coderd/workspaceapps.go index d2fa11b9ea2ea..e264dbd80b58d 100644 --- a/coderd/workspaceapps.go +++ b/coderd/workspaceapps.go @@ -16,6 +16,7 @@ import ( "github.com/coder/coder/v2/coderd/database/dbtime" "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/coderd/httpmw" + "github.com/coder/coder/v2/coderd/jwtutils" "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/coderd/workspaceapps" "github.com/coder/coder/v2/coderd/workspaceapps/appurl" @@ -122,10 +123,11 @@ func (api *API) workspaceApplicationAuth(rw http.ResponseWriter, r *http.Request return } - // Encrypt the API key. - encryptedAPIKey, err := api.AppSecurityKey.EncryptAPIKey(workspaceapps.EncryptedAPIKeyPayload{ + payload := workspaceapps.EncryptedAPIKeyPayload{ APIKey: cookie.Value, - }) + } + payload.Fill(api.Clock.Now()) + encryptedAPIKey, err := jwtutils.Encrypt(ctx, api.AppEncryptionKeyCache, payload) if err != nil { httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ Message: "Failed to encrypt API key.", diff --git a/coderd/workspaceapps/apptest/apptest.go b/coderd/workspaceapps/apptest/apptest.go index 14adf2d61d362..c6e251806230d 100644 --- a/coderd/workspaceapps/apptest/apptest.go +++ b/coderd/workspaceapps/apptest/apptest.go @@ -3,6 +3,7 @@ package apptest import ( "bufio" "context" + "crypto/rand" "encoding/json" "fmt" "io" @@ -408,6 +409,67 @@ func Run(t *testing.T, appHostIsPrimary bool, factory DeploymentFactory) { require.Equal(t, http.StatusInternalServerError, resp.StatusCode) assertWorkspaceLastUsedAtNotUpdated(t, appDetails) }) + + t.Run("BadJWT", func(t *testing.T) { + t.Parallel() + + appDetails := setupProxyTest(t, nil) + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) + defer cancel() + + u := appDetails.PathAppURL(appDetails.Apps.Owner) + resp, err := requestWithRetries(ctx, t, appDetails.AppClient(t), http.MethodGet, u.String(), nil) + require.NoError(t, err) + defer resp.Body.Close() + body, err := io.ReadAll(resp.Body) + require.NoError(t, err) + require.Equal(t, proxyTestAppBody, string(body)) + require.Equal(t, http.StatusOK, resp.StatusCode) + + appTokenCookie := findCookie(resp.Cookies(), codersdk.SignedAppTokenCookie) + require.NotNil(t, appTokenCookie, "no signed app token cookie in response") + require.Equal(t, appTokenCookie.Path, u.Path, "incorrect path on app token cookie") + + object, err := jose.ParseSigned(appTokenCookie.Value) + require.NoError(t, err) + require.Len(t, object.Signatures, 1) + + // Parse the payload. + var tok workspaceapps.SignedToken + //nolint:gosec + err = json.Unmarshal(object.UnsafePayloadWithoutVerification(), &tok) + require.NoError(t, err) + + appTokenClient := appDetails.AppClient(t) + apiKey := appTokenClient.SessionToken() + appTokenClient.SetSessionToken("") + appTokenClient.HTTPClient.Jar, err = cookiejar.New(nil) + require.NoError(t, err) + // Sign the token with an old-style key. + appTokenCookie.Value = generateBadJWT(t, tok) + appTokenClient.HTTPClient.Jar.SetCookies(u, + []*http.Cookie{ + appTokenCookie, + { + Name: codersdk.PathAppSessionTokenCookie, + Value: apiKey, + }, + }, + ) + + resp, err = requestWithRetries(ctx, t, appTokenClient, http.MethodGet, u.String(), nil) + require.NoError(t, err) + defer resp.Body.Close() + body, err = io.ReadAll(resp.Body) + require.NoError(t, err) + require.Equal(t, proxyTestAppBody, string(body)) + require.Equal(t, http.StatusOK, resp.StatusCode) + assertWorkspaceLastUsedAtUpdated(t, appDetails) + + // Since the old token is invalid, the signed app token cookie should have a new value. + newTokenCookie := findCookie(resp.Cookies(), codersdk.SignedAppTokenCookie) + require.NotEqual(t, appTokenCookie.Value, newTokenCookie.Value) + }) }) t.Run("WorkspaceApplicationAuth", func(t *testing.T) { @@ -463,7 +525,7 @@ func Run(t *testing.T, appHostIsPrimary bool, factory DeploymentFactory) { appClient.SetSessionToken("") // Try to load the application without authentication. - u := c.appURL + u := *c.appURL u.Path = path.Join(u.Path, "/test") req, err := http.NewRequestWithContext(ctx, http.MethodGet, u.String(), nil) require.NoError(t, err) @@ -500,7 +562,7 @@ func Run(t *testing.T, appHostIsPrimary bool, factory DeploymentFactory) { // Copy the query parameters and then check equality. u.RawQuery = gotLocation.RawQuery - require.Equal(t, u, gotLocation) + require.Equal(t, u, *gotLocation) // Verify the API key is set. encryptedAPIKey := gotLocation.Query().Get(workspaceapps.SubdomainProxyAPIKeyParam) @@ -580,6 +642,38 @@ func Run(t *testing.T, appHostIsPrimary bool, factory DeploymentFactory) { resp.Body.Close() require.Equal(t, http.StatusOK, resp.StatusCode) }) + + t.Run("BadJWE", func(t *testing.T) { + t.Parallel() + + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) + defer cancel() + + currentKeyStr := appDetails.SDKClient.SessionToken() + appClient := appDetails.AppClient(t) + appClient.SetSessionToken("") + u := *c.appURL + u.Path = path.Join(u.Path, "/test") + badToken := generateBadJWE(t, workspaceapps.EncryptedAPIKeyPayload{ + APIKey: currentKeyStr, + }) + + u.RawQuery = (url.Values{ + workspaceapps.SubdomainProxyAPIKeyParam: {badToken}, + }).Encode() + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, u.String(), nil) + require.NoError(t, err) + + var resp *http.Response + resp, err = doWithRetries(t, appClient, req) + require.NoError(t, err) + defer resp.Body.Close() + require.Equal(t, http.StatusBadRequest, resp.StatusCode) + body, err := io.ReadAll(resp.Body) + require.NoError(t, err) + require.Contains(t, string(body), "Could not decrypt API key. Please remove the query parameter and try again.") + }) } }) }) @@ -1077,6 +1171,68 @@ func Run(t *testing.T, appHostIsPrimary bool, factory DeploymentFactory) { assertWorkspaceLastUsedAtNotUpdated(t, appDetails) }) }) + + t.Run("BadJWT", func(t *testing.T) { + t.Parallel() + + appDetails := setupProxyTest(t, nil) + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) + defer cancel() + + u := appDetails.SubdomainAppURL(appDetails.Apps.Owner) + resp, err := requestWithRetries(ctx, t, appDetails.AppClient(t), http.MethodGet, u.String(), nil) + require.NoError(t, err) + defer resp.Body.Close() + body, err := io.ReadAll(resp.Body) + require.NoError(t, err) + require.Equal(t, proxyTestAppBody, string(body)) + require.Equal(t, http.StatusOK, resp.StatusCode) + + appTokenCookie := findCookie(resp.Cookies(), codersdk.SignedAppTokenCookie) + require.NotNil(t, appTokenCookie, "no signed token cookie in response") + require.Equal(t, appTokenCookie.Path, "/", "incorrect path on signed token cookie") + + object, err := jose.ParseSigned(appTokenCookie.Value) + require.NoError(t, err) + require.Len(t, object.Signatures, 1) + + // Parse the payload. + var tok workspaceapps.SignedToken + //nolint:gosec + err = json.Unmarshal(object.UnsafePayloadWithoutVerification(), &tok) + require.NoError(t, err) + + appTokenClient := appDetails.AppClient(t) + apiKey := appTokenClient.SessionToken() + appTokenClient.SetSessionToken("") + appTokenClient.HTTPClient.Jar, err = cookiejar.New(nil) + require.NoError(t, err) + // Sign the token with an old-style key. + appTokenCookie.Value = generateBadJWT(t, tok) + appTokenClient.HTTPClient.Jar.SetCookies(u, + []*http.Cookie{ + appTokenCookie, + { + Name: codersdk.SubdomainAppSessionTokenCookie, + Value: apiKey, + }, + }, + ) + + // We should still be able to successfully proxy. + resp, err = requestWithRetries(ctx, t, appTokenClient, http.MethodGet, u.String(), nil) + require.NoError(t, err) + defer resp.Body.Close() + body, err = io.ReadAll(resp.Body) + require.NoError(t, err) + require.Equal(t, proxyTestAppBody, string(body)) + require.Equal(t, http.StatusOK, resp.StatusCode) + assertWorkspaceLastUsedAtUpdated(t, appDetails) + + // Since the old token is invalid, the signed app token cookie should have a new value. + newTokenCookie := findCookie(resp.Cookies(), codersdk.SignedAppTokenCookie) + require.NotEqual(t, appTokenCookie.Value, newTokenCookie.Value) + }) }) t.Run("PortSharing", func(t *testing.T) { @@ -1789,3 +1945,57 @@ func assertWorkspaceLastUsedAtNotUpdated(t testing.TB, details *Details) { require.NoError(t, err) require.Equal(t, before.LastUsedAt, after.LastUsedAt, "workspace LastUsedAt updated when it should not have been") } + +func generateBadJWE(t *testing.T, claims interface{}) string { + t.Helper() + var buf [32]byte + _, err := rand.Read(buf[:]) + require.NoError(t, err) + encrypt, err := jose.NewEncrypter( + jose.A256GCM, + jose.Recipient{ + Algorithm: jose.A256GCMKW, + Key: buf[:], + }, &jose.EncrypterOptions{ + Compression: jose.DEFLATE, + }, + ) + require.NoError(t, err) + payload, err := json.Marshal(claims) + require.NoError(t, err) + signed, err := encrypt.Encrypt(payload) + require.NoError(t, err) + compact, err := signed.CompactSerialize() + require.NoError(t, err) + return compact +} + +// generateBadJWT generates a JWT with a random key. It's intended to emulate the old-style JWT's we generated. +func generateBadJWT(t *testing.T, claims interface{}) string { + t.Helper() + + var buf [64]byte + _, err := rand.Read(buf[:]) + require.NoError(t, err) + signer, err := jose.NewSigner(jose.SigningKey{ + Algorithm: jose.HS512, + Key: buf[:], + }, nil) + require.NoError(t, err) + payload, err := json.Marshal(claims) + require.NoError(t, err) + signed, err := signer.Sign(payload) + require.NoError(t, err) + compact, err := signed.CompactSerialize() + require.NoError(t, err) + return compact +} + +func findCookie(cookies []*http.Cookie, name string) *http.Cookie { + for _, cookie := range cookies { + if cookie.Name == name { + return cookie + } + } + return nil +} diff --git a/coderd/workspaceapps/db.go b/coderd/workspaceapps/db.go index 1b369cf6d6ef4..1aa4dfe91bdd0 100644 --- a/coderd/workspaceapps/db.go +++ b/coderd/workspaceapps/db.go @@ -13,11 +13,15 @@ import ( "golang.org/x/exp/slices" "golang.org/x/xerrors" + "github.com/go-jose/go-jose/v4/jwt" + "cdr.dev/slog" + "github.com/coder/coder/v2/coderd/cryptokeys" "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/dbauthz" "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/coderd/httpmw" + "github.com/coder/coder/v2/coderd/jwtutils" "github.com/coder/coder/v2/coderd/rbac" "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/codersdk" @@ -35,12 +39,20 @@ type DBTokenProvider struct { DeploymentValues *codersdk.DeploymentValues OAuth2Configs *httpmw.OAuth2Configs WorkspaceAgentInactiveTimeout time.Duration - SigningKey SecurityKey + Keycache cryptokeys.SigningKeycache } var _ SignedTokenProvider = &DBTokenProvider{} -func NewDBTokenProvider(log slog.Logger, accessURL *url.URL, authz rbac.Authorizer, db database.Store, cfg *codersdk.DeploymentValues, oauth2Cfgs *httpmw.OAuth2Configs, workspaceAgentInactiveTimeout time.Duration, signingKey SecurityKey) SignedTokenProvider { +func NewDBTokenProvider(log slog.Logger, + accessURL *url.URL, + authz rbac.Authorizer, + db database.Store, + cfg *codersdk.DeploymentValues, + oauth2Cfgs *httpmw.OAuth2Configs, + workspaceAgentInactiveTimeout time.Duration, + signer cryptokeys.SigningKeycache, +) SignedTokenProvider { if workspaceAgentInactiveTimeout == 0 { workspaceAgentInactiveTimeout = 1 * time.Minute } @@ -53,12 +65,12 @@ func NewDBTokenProvider(log slog.Logger, accessURL *url.URL, authz rbac.Authoriz DeploymentValues: cfg, OAuth2Configs: oauth2Cfgs, WorkspaceAgentInactiveTimeout: workspaceAgentInactiveTimeout, - SigningKey: signingKey, + Keycache: signer, } } func (p *DBTokenProvider) FromRequest(r *http.Request) (*SignedToken, bool) { - return FromRequest(r, p.SigningKey) + return FromRequest(r, p.Keycache) } func (p *DBTokenProvider) Issue(ctx context.Context, rw http.ResponseWriter, r *http.Request, issueReq IssueTokenRequest) (*SignedToken, string, bool) { @@ -70,7 +82,7 @@ func (p *DBTokenProvider) Issue(ctx context.Context, rw http.ResponseWriter, r * dangerousSystemCtx := dbauthz.AsSystemRestricted(ctx) appReq := issueReq.AppRequest.Normalize() - err := appReq.Validate() + err := appReq.Check() if err != nil { WriteWorkspaceApp500(p.Logger, p.DashboardURL, rw, r, &appReq, err, "invalid app request") return nil, "", false @@ -210,9 +222,11 @@ func (p *DBTokenProvider) Issue(ctx context.Context, rw http.ResponseWriter, r * return nil, "", false } + token.RegisteredClaims = jwtutils.RegisteredClaims{ + Expiry: jwt.NewNumericDate(time.Now().Add(DefaultTokenExpiry)), + } // Sign the token. - token.Expiry = time.Now().Add(DefaultTokenExpiry) - tokenStr, err := p.SigningKey.SignToken(token) + tokenStr, err := jwtutils.Sign(ctx, p.Keycache, token) if err != nil { WriteWorkspaceApp500(p.Logger, p.DashboardURL, rw, r, &appReq, err, "generate token") return nil, "", false diff --git a/coderd/workspaceapps/db_test.go b/coderd/workspaceapps/db_test.go index 6c5a0212aff2b..bf364f1ce62b3 100644 --- a/coderd/workspaceapps/db_test.go +++ b/coderd/workspaceapps/db_test.go @@ -13,6 +13,7 @@ import ( "testing" "time" + "github.com/go-jose/go-jose/v4/jwt" "github.com/google/uuid" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -20,6 +21,7 @@ import ( "github.com/coder/coder/v2/agent/agenttest" "github.com/coder/coder/v2/coderd/coderdtest" "github.com/coder/coder/v2/coderd/httpmw" + "github.com/coder/coder/v2/coderd/jwtutils" "github.com/coder/coder/v2/coderd/workspaceapps" "github.com/coder/coder/v2/coderd/workspaceapps/appurl" "github.com/coder/coder/v2/codersdk" @@ -94,8 +96,7 @@ func Test_ResolveRequest(t *testing.T) { _ = closer.Close() }) - ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitMedium) - t.Cleanup(cancel) + ctx := testutil.Context(t, testutil.WaitMedium) firstUser := coderdtest.CreateFirstUser(t, client) me, err := client.User(ctx, codersdk.Me) @@ -276,15 +277,17 @@ func Test_ResolveRequest(t *testing.T) { _ = w.Body.Close() require.Equal(t, &workspaceapps.SignedToken{ + RegisteredClaims: jwtutils.RegisteredClaims{ + Expiry: jwt.NewNumericDate(token.Expiry.Time()), + }, Request: req, - Expiry: token.Expiry, // ignored to avoid flakiness UserID: me.ID, WorkspaceID: workspace.ID, AgentID: agentID, AppURL: appURL, }, token) require.NotZero(t, token.Expiry) - require.WithinDuration(t, time.Now().Add(workspaceapps.DefaultTokenExpiry), token.Expiry, time.Minute) + require.WithinDuration(t, time.Now().Add(workspaceapps.DefaultTokenExpiry), token.Expiry.Time(), time.Minute) // Check that the token was set in the response and is valid. require.Len(t, w.Cookies(), 1) @@ -292,10 +295,11 @@ func Test_ResolveRequest(t *testing.T) { require.Equal(t, codersdk.SignedAppTokenCookie, cookie.Name) require.Equal(t, req.BasePath, cookie.Path) - parsedToken, err := api.AppSecurityKey.VerifySignedToken(cookie.Value) + var parsedToken workspaceapps.SignedToken + err := jwtutils.Verify(ctx, api.AppSigningKeyCache, cookie.Value, &parsedToken) require.NoError(t, err) // normalize expiry - require.WithinDuration(t, token.Expiry, parsedToken.Expiry, 2*time.Second) + require.WithinDuration(t, token.Expiry.Time(), parsedToken.Expiry.Time(), 2*time.Second) parsedToken.Expiry = token.Expiry require.Equal(t, token, &parsedToken) @@ -314,7 +318,7 @@ func Test_ResolveRequest(t *testing.T) { }) require.True(t, ok) // normalize expiry - require.WithinDuration(t, token.Expiry, secondToken.Expiry, 2*time.Second) + require.WithinDuration(t, token.Expiry.Time(), secondToken.Expiry.Time(), 2*time.Second) secondToken.Expiry = token.Expiry require.Equal(t, token, secondToken) } @@ -540,13 +544,16 @@ func Test_ResolveRequest(t *testing.T) { // App name differs AppSlugOrPort: appNamePublic, }).Normalize(), - Expiry: time.Now().Add(time.Minute), + RegisteredClaims: jwtutils.RegisteredClaims{ + Expiry: jwt.NewNumericDate(time.Now().Add(time.Minute)), + }, UserID: me.ID, WorkspaceID: workspace.ID, AgentID: agentID, AppURL: appURL, } - badTokenStr, err := api.AppSecurityKey.SignToken(badToken) + + badTokenStr, err := jwtutils.Sign(ctx, api.AppSigningKeyCache, badToken) require.NoError(t, err) req := (workspaceapps.Request{ @@ -589,7 +596,8 @@ func Test_ResolveRequest(t *testing.T) { require.Len(t, cookies, 1) require.Equal(t, cookies[0].Name, codersdk.SignedAppTokenCookie) require.NotEqual(t, cookies[0].Value, badTokenStr) - parsedToken, err := api.AppSecurityKey.VerifySignedToken(cookies[0].Value) + var parsedToken workspaceapps.SignedToken + err = jwtutils.Verify(ctx, api.AppSigningKeyCache, cookies[0].Value, &parsedToken) require.NoError(t, err) require.Equal(t, appNameOwner, parsedToken.AppSlugOrPort) }) diff --git a/coderd/workspaceapps/provider.go b/coderd/workspaceapps/provider.go index 8d4b7fd149800..1887036e35cbf 100644 --- a/coderd/workspaceapps/provider.go +++ b/coderd/workspaceapps/provider.go @@ -38,7 +38,7 @@ type ResolveRequestOptions struct { func ResolveRequest(rw http.ResponseWriter, r *http.Request, opts ResolveRequestOptions) (*SignedToken, bool) { appReq := opts.AppRequest.Normalize() - err := appReq.Validate() + err := appReq.Check() if err != nil { // This is a 500 since it's a coder server or proxy that's making this // request struct based on details from the request. The values should @@ -79,7 +79,7 @@ func ResolveRequest(rw http.ResponseWriter, r *http.Request, opts ResolveRequest Name: codersdk.SignedAppTokenCookie, Value: tokenStr, Path: appReq.BasePath, - Expires: token.Expiry, + Expires: token.Expiry.Time(), }) return token, true diff --git a/coderd/workspaceapps/proxy.go b/coderd/workspaceapps/proxy.go index c6cd01395db5c..a9c60357a009d 100644 --- a/coderd/workspaceapps/proxy.go +++ b/coderd/workspaceapps/proxy.go @@ -11,17 +11,21 @@ import ( "strconv" "strings" "sync" + "time" "github.com/go-chi/chi/v5" + "github.com/go-jose/go-jose/v4/jwt" "github.com/google/uuid" "go.opentelemetry.io/otel/trace" "nhooyr.io/websocket" "cdr.dev/slog" "github.com/coder/coder/v2/agent/agentssh" + "github.com/coder/coder/v2/coderd/cryptokeys" "github.com/coder/coder/v2/coderd/database/dbtime" "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/coderd/httpmw" + "github.com/coder/coder/v2/coderd/jwtutils" "github.com/coder/coder/v2/coderd/tracing" "github.com/coder/coder/v2/coderd/util/slice" "github.com/coder/coder/v2/coderd/workspaceapps/appurl" @@ -97,8 +101,8 @@ type Server struct { HostnameRegex *regexp.Regexp RealIPConfig *httpmw.RealIPConfig - SignedTokenProvider SignedTokenProvider - AppSecurityKey SecurityKey + SignedTokenProvider SignedTokenProvider + APIKeyEncryptionKeycache cryptokeys.EncryptionKeycache // DisablePathApps disables path-based apps. This is a security feature as path // based apps share the same cookie as the dashboard, and are susceptible to XSS @@ -176,7 +180,10 @@ func (s *Server) handleAPIKeySmuggling(rw http.ResponseWriter, r *http.Request, } // Exchange the encoded API key for a real one. - token, err := s.AppSecurityKey.DecryptAPIKey(encryptedAPIKey) + var payload EncryptedAPIKeyPayload + err := jwtutils.Decrypt(ctx, s.APIKeyEncryptionKeycache, encryptedAPIKey, &payload, jwtutils.WithDecryptExpected(jwt.Expected{ + Time: time.Now(), + })) if err != nil { s.Logger.Debug(ctx, "could not decrypt smuggled workspace app API key", slog.Error(err)) site.RenderStaticErrorPage(rw, r, site.ErrorPageData{ @@ -225,7 +232,7 @@ func (s *Server) handleAPIKeySmuggling(rw http.ResponseWriter, r *http.Request, // server using the wrong value. http.SetCookie(rw, &http.Cookie{ Name: AppConnectSessionTokenCookieName(accessMethod), - Value: token, + Value: payload.APIKey, Domain: domain, Path: "/", MaxAge: 0, diff --git a/coderd/workspaceapps/request.go b/coderd/workspaceapps/request.go index 4f6a6f3a64e65..0833ab731fe67 100644 --- a/coderd/workspaceapps/request.go +++ b/coderd/workspaceapps/request.go @@ -124,9 +124,9 @@ func (r Request) Normalize() Request { return req } -// Validate ensures the request is correct and contains the necessary +// Check ensures the request is correct and contains the necessary // parameters. -func (r Request) Validate() error { +func (r Request) Check() error { switch r.AccessMethod { case AccessMethodPath, AccessMethodSubdomain, AccessMethodTerminal: default: diff --git a/coderd/workspaceapps/request_test.go b/coderd/workspaceapps/request_test.go index b6e4bb7a2e65f..fbabc840745e9 100644 --- a/coderd/workspaceapps/request_test.go +++ b/coderd/workspaceapps/request_test.go @@ -279,7 +279,7 @@ func Test_RequestValidate(t *testing.T) { if !c.noNormalize { req = c.req.Normalize() } - err := req.Validate() + err := req.Check() if c.errContains == "" { require.NoError(t, err) } else { diff --git a/coderd/workspaceapps/token.go b/coderd/workspaceapps/token.go index 33428b0e25f13..dcd8c5a0e5c34 100644 --- a/coderd/workspaceapps/token.go +++ b/coderd/workspaceapps/token.go @@ -1,35 +1,27 @@ package workspaceapps import ( - "encoding/base64" - "encoding/hex" - "encoding/json" "net/http" "strings" "time" - "github.com/go-jose/go-jose/v3" + "github.com/go-jose/go-jose/v4/jwt" "github.com/google/uuid" "golang.org/x/xerrors" - "github.com/coder/coder/v2/coderd/database/dbtime" + "github.com/coder/coder/v2/coderd/cryptokeys" + "github.com/coder/coder/v2/coderd/jwtutils" "github.com/coder/coder/v2/codersdk" ) -const ( - tokenSigningAlgorithm = jose.HS512 - apiKeyEncryptionAlgorithm = jose.A256GCMKW -) - // SignedToken is the struct data contained inside a workspace app JWE. It // contains the details of the workspace app that the token is valid for to // avoid database queries. type SignedToken struct { + jwtutils.RegisteredClaims // Request details. Request `json:"request"` - // Trusted resolved details. - Expiry time.Time `json:"expiry"` // set by GenerateToken if unset UserID uuid.UUID `json:"user_id"` WorkspaceID uuid.UUID `json:"workspace_id"` AgentID uuid.UUID `json:"agent_id"` @@ -57,191 +49,32 @@ func (t SignedToken) MatchesRequest(req Request) bool { t.AppSlugOrPort == req.AppSlugOrPort } -// SecurityKey is used for signing and encrypting app tokens and API keys. -// -// The first 64 bytes of the key are used for signing tokens with HMAC-SHA256, -// and the last 32 bytes are used for encrypting API keys with AES-256-GCM. -// We use a single key for both operations to avoid having to store and manage -// two keys. -type SecurityKey [96]byte - -func (k SecurityKey) IsZero() bool { - return k == SecurityKey{} -} - -func (k SecurityKey) String() string { - return hex.EncodeToString(k[:]) -} - -func (k SecurityKey) signingKey() []byte { - return k[:64] -} - -func (k SecurityKey) encryptionKey() []byte { - return k[64:] -} - -func KeyFromString(str string) (SecurityKey, error) { - var key SecurityKey - decoded, err := hex.DecodeString(str) - if err != nil { - return key, xerrors.Errorf("decode key: %w", err) - } - if len(decoded) != len(key) { - return key, xerrors.Errorf("expected key to be %d bytes, got %d", len(key), len(decoded)) - } - copy(key[:], decoded) - - return key, nil -} - -// SignToken generates a signed workspace app token with the given payload. If -// the payload doesn't have an expiry, it will be set to the current time plus -// the default expiry. -func (k SecurityKey) SignToken(payload SignedToken) (string, error) { - if payload.Expiry.IsZero() { - payload.Expiry = time.Now().Add(DefaultTokenExpiry) - } - payloadBytes, err := json.Marshal(payload) - if err != nil { - return "", xerrors.Errorf("marshal payload to JSON: %w", err) - } - - signer, err := jose.NewSigner(jose.SigningKey{ - Algorithm: tokenSigningAlgorithm, - Key: k.signingKey(), - }, nil) - if err != nil { - return "", xerrors.Errorf("create signer: %w", err) - } - - signedObject, err := signer.Sign(payloadBytes) - if err != nil { - return "", xerrors.Errorf("sign payload: %w", err) - } - - serialized, err := signedObject.CompactSerialize() - if err != nil { - return "", xerrors.Errorf("serialize JWS: %w", err) - } - - return serialized, nil -} - -// VerifySignedToken parses a signed workspace app token with the given key and -// returns the payload. If the token is invalid or expired, an error is -// returned. -func (k SecurityKey) VerifySignedToken(str string) (SignedToken, error) { - object, err := jose.ParseSigned(str) - if err != nil { - return SignedToken{}, xerrors.Errorf("parse JWS: %w", err) - } - if len(object.Signatures) != 1 { - return SignedToken{}, xerrors.New("expected 1 signature") - } - if object.Signatures[0].Header.Algorithm != string(tokenSigningAlgorithm) { - return SignedToken{}, xerrors.Errorf("expected token signing algorithm to be %q, got %q", tokenSigningAlgorithm, object.Signatures[0].Header.Algorithm) - } - - output, err := object.Verify(k.signingKey()) - if err != nil { - return SignedToken{}, xerrors.Errorf("verify JWS: %w", err) - } - - var tok SignedToken - err = json.Unmarshal(output, &tok) - if err != nil { - return SignedToken{}, xerrors.Errorf("unmarshal payload: %w", err) - } - if tok.Expiry.Before(time.Now()) { - return SignedToken{}, xerrors.New("signed app token expired") - } - - return tok, nil -} - type EncryptedAPIKeyPayload struct { - APIKey string `json:"api_key"` - ExpiresAt time.Time `json:"expires_at"` + jwtutils.RegisteredClaims + APIKey string `json:"api_key"` } -// EncryptAPIKey encrypts an API key for subdomain token smuggling. -func (k SecurityKey) EncryptAPIKey(payload EncryptedAPIKeyPayload) (string, error) { - if payload.APIKey == "" { - return "", xerrors.New("API key is empty") - } - if payload.ExpiresAt.IsZero() { - // Very short expiry as these keys are only used once as part of an - // automatic redirection flow. - payload.ExpiresAt = dbtime.Now().Add(time.Minute) - } - - payloadBytes, err := json.Marshal(payload) - if err != nil { - return "", xerrors.Errorf("marshal payload: %w", err) - } - - // JWEs seem to apply a nonce themselves. - encrypter, err := jose.NewEncrypter( - jose.A256GCM, - jose.Recipient{ - Algorithm: apiKeyEncryptionAlgorithm, - Key: k.encryptionKey(), - }, - &jose.EncrypterOptions{ - Compression: jose.DEFLATE, - }, - ) - if err != nil { - return "", xerrors.Errorf("initializer jose encrypter: %w", err) - } - encryptedObject, err := encrypter.Encrypt(payloadBytes) - if err != nil { - return "", xerrors.Errorf("encrypt jwe: %w", err) - } - - encrypted := encryptedObject.FullSerialize() - return base64.RawURLEncoding.EncodeToString([]byte(encrypted)), nil +func (e *EncryptedAPIKeyPayload) Fill(now time.Time) { + e.Issuer = "coderd" + e.Audience = jwt.Audience{"wsproxy"} + e.Expiry = jwt.NewNumericDate(now.Add(time.Minute)) + e.NotBefore = jwt.NewNumericDate(now.Add(-time.Minute)) } -// DecryptAPIKey undoes EncryptAPIKey and is used in the subdomain app handler. -func (k SecurityKey) DecryptAPIKey(encryptedAPIKey string) (string, error) { - encrypted, err := base64.RawURLEncoding.DecodeString(encryptedAPIKey) - if err != nil { - return "", xerrors.Errorf("base64 decode encrypted API key: %w", err) +func (e EncryptedAPIKeyPayload) Validate(ex jwt.Expected) error { + if e.NotBefore == nil { + return xerrors.Errorf("not before is required") } - object, err := jose.ParseEncrypted(string(encrypted)) - if err != nil { - return "", xerrors.Errorf("parse encrypted API key: %w", err) - } - if object.Header.Algorithm != string(apiKeyEncryptionAlgorithm) { - return "", xerrors.Errorf("expected API key encryption algorithm to be %q, got %q", apiKeyEncryptionAlgorithm, object.Header.Algorithm) - } - - // Decrypt using the hashed secret. - decrypted, err := object.Decrypt(k.encryptionKey()) - if err != nil { - return "", xerrors.Errorf("decrypt API key: %w", err) - } - - // Unmarshal the payload. - var payload EncryptedAPIKeyPayload - if err := json.Unmarshal(decrypted, &payload); err != nil { - return "", xerrors.Errorf("unmarshal decrypted payload: %w", err) - } - - // Validate expiry. - if payload.ExpiresAt.Before(dbtime.Now()) { - return "", xerrors.New("encrypted API key expired") - } + ex.Issuer = "coderd" + ex.AnyAudience = jwt.Audience{"wsproxy"} - return payload.APIKey, nil + return e.RegisteredClaims.Validate(ex) } // FromRequest returns the signed token from the request, if it exists and is // valid. The caller must check that the token matches the request. -func FromRequest(r *http.Request, key SecurityKey) (*SignedToken, bool) { +func FromRequest(r *http.Request, mgr cryptokeys.SigningKeycache) (*SignedToken, bool) { // Get all signed app tokens from the request. This includes the query // parameter and all matching cookies sent with the request. If there are // somehow multiple signed app token cookies, we want to try all of them @@ -270,8 +103,12 @@ func FromRequest(r *http.Request, key SecurityKey) (*SignedToken, bool) { tokens = tokens[:4] } + ctx := r.Context() for _, tokenStr := range tokens { - token, err := key.VerifySignedToken(tokenStr) + var token SignedToken + err := jwtutils.Verify(ctx, mgr, tokenStr, &token, jwtutils.WithVerifyExpected(jwt.Expected{ + Time: time.Now(), + })) if err == nil { req := token.Request.Normalize() if hasQueryParam && req.AccessMethod != AccessMethodTerminal { @@ -280,7 +117,7 @@ func FromRequest(r *http.Request, key SecurityKey) (*SignedToken, bool) { return nil, false } - err := req.Validate() + err := req.Check() if err == nil { // The request has a valid signed app token, which is a valid // token signed by us. The caller must check that it matches diff --git a/coderd/workspaceapps/token_test.go b/coderd/workspaceapps/token_test.go index c656ae2ab77b8..db070268fa196 100644 --- a/coderd/workspaceapps/token_test.go +++ b/coderd/workspaceapps/token_test.go @@ -1,22 +1,22 @@ package workspaceapps_test import ( - "fmt" + "crypto/rand" "net/http" "net/http/httptest" "testing" "time" + "github.com/go-jose/go-jose/v4/jwt" + "github.com/coder/coder/v2/codersdk" + "github.com/coder/coder/v2/testutil" - "github.com/go-jose/go-jose/v3" "github.com/google/uuid" "github.com/stretchr/testify/require" - "github.com/coder/coder/v2/coderd/coderdtest" - "github.com/coder/coder/v2/coderd/database/dbtime" + "github.com/coder/coder/v2/coderd/jwtutils" "github.com/coder/coder/v2/coderd/workspaceapps" - "github.com/coder/coder/v2/cryptorand" ) func Test_TokenMatchesRequest(t *testing.T) { @@ -283,129 +283,6 @@ func Test_TokenMatchesRequest(t *testing.T) { } } -func Test_GenerateToken(t *testing.T) { - t.Parallel() - - t.Run("SetExpiry", func(t *testing.T) { - t.Parallel() - - tokenStr, err := coderdtest.AppSecurityKey.SignToken(workspaceapps.SignedToken{ - Request: workspaceapps.Request{ - AccessMethod: workspaceapps.AccessMethodPath, - BasePath: "/app", - UsernameOrID: "foo", - WorkspaceNameOrID: "bar", - AgentNameOrID: "baz", - AppSlugOrPort: "qux", - }, - - Expiry: time.Time{}, - UserID: uuid.MustParse("b1530ba9-76f3-415e-b597-4ddd7cd466a4"), - WorkspaceID: uuid.MustParse("1e6802d3-963e-45ac-9d8c-bf997016ffed"), - AgentID: uuid.MustParse("9ec18681-d2c9-4c9e-9186-f136efb4edbe"), - AppURL: "http://127.0.0.1:8080", - }) - require.NoError(t, err) - - token, err := coderdtest.AppSecurityKey.VerifySignedToken(tokenStr) - require.NoError(t, err) - - require.WithinDuration(t, time.Now().Add(time.Minute), token.Expiry, 15*time.Second) - }) - - future := time.Now().Add(time.Hour) - cases := []struct { - name string - token workspaceapps.SignedToken - parseErrContains string - }{ - { - name: "OK1", - token: workspaceapps.SignedToken{ - Request: workspaceapps.Request{ - AccessMethod: workspaceapps.AccessMethodPath, - BasePath: "/app", - UsernameOrID: "foo", - WorkspaceNameOrID: "bar", - AgentNameOrID: "baz", - AppSlugOrPort: "qux", - }, - - Expiry: future, - UserID: uuid.MustParse("b1530ba9-76f3-415e-b597-4ddd7cd466a4"), - WorkspaceID: uuid.MustParse("1e6802d3-963e-45ac-9d8c-bf997016ffed"), - AgentID: uuid.MustParse("9ec18681-d2c9-4c9e-9186-f136efb4edbe"), - AppURL: "http://127.0.0.1:8080", - }, - }, - { - name: "OK2", - token: workspaceapps.SignedToken{ - Request: workspaceapps.Request{ - AccessMethod: workspaceapps.AccessMethodSubdomain, - BasePath: "/", - UsernameOrID: "oof", - WorkspaceNameOrID: "rab", - AgentNameOrID: "zab", - AppSlugOrPort: "xuq", - }, - - Expiry: future, - UserID: uuid.MustParse("6fa684a3-11aa-49fd-8512-ab527bd9b900"), - WorkspaceID: uuid.MustParse("b2d816cc-505c-441d-afdf-dae01781bc0b"), - AgentID: uuid.MustParse("6c4396e1-af88-4a8a-91a3-13ea54fc29fb"), - AppURL: "http://localhost:9090", - }, - }, - { - name: "Expired", - token: workspaceapps.SignedToken{ - Request: workspaceapps.Request{ - AccessMethod: workspaceapps.AccessMethodSubdomain, - BasePath: "/", - UsernameOrID: "foo", - WorkspaceNameOrID: "bar", - AgentNameOrID: "baz", - AppSlugOrPort: "qux", - }, - - Expiry: time.Now().Add(-time.Hour), - UserID: uuid.MustParse("b1530ba9-76f3-415e-b597-4ddd7cd466a4"), - WorkspaceID: uuid.MustParse("1e6802d3-963e-45ac-9d8c-bf997016ffed"), - AgentID: uuid.MustParse("9ec18681-d2c9-4c9e-9186-f136efb4edbe"), - AppURL: "http://127.0.0.1:8080", - }, - parseErrContains: "token expired", - }, - } - - for _, c := range cases { - c := c - - t.Run(c.name, func(t *testing.T) { - t.Parallel() - - str, err := coderdtest.AppSecurityKey.SignToken(c.token) - require.NoError(t, err) - - // Tokens aren't deterministic as they have a random nonce, so we - // can't compare them directly. - - token, err := coderdtest.AppSecurityKey.VerifySignedToken(str) - if c.parseErrContains != "" { - require.Error(t, err) - require.ErrorContains(t, err, c.parseErrContains) - } else { - require.NoError(t, err) - // normalize the expiry - require.WithinDuration(t, c.token.Expiry, token.Expiry, 10*time.Second) - c.token.Expiry = token.Expiry - require.Equal(t, c.token, token) - } - }) - } -} - func Test_FromRequest(t *testing.T) { t.Parallel() @@ -419,7 +296,13 @@ func Test_FromRequest(t *testing.T) { Value: "invalid", }) + ctx := testutil.Context(t, testutil.WaitShort) + signer := newSigner(t) + token := workspaceapps.SignedToken{ + RegisteredClaims: jwtutils.RegisteredClaims{ + Expiry: jwt.NewNumericDate(time.Now().Add(time.Hour)), + }, Request: workspaceapps.Request{ AccessMethod: workspaceapps.AccessMethodSubdomain, BasePath: "/", @@ -429,7 +312,6 @@ func Test_FromRequest(t *testing.T) { AgentNameOrID: "agent", AppSlugOrPort: "app", }, - Expiry: time.Now().Add(time.Hour), UserID: uuid.New(), WorkspaceID: uuid.New(), AgentID: uuid.New(), @@ -438,16 +320,15 @@ func Test_FromRequest(t *testing.T) { // Add an expired cookie expired := token - expired.Expiry = time.Now().Add(time.Hour * -1) - expiredStr, err := coderdtest.AppSecurityKey.SignToken(token) + expired.RegisteredClaims.Expiry = jwt.NewNumericDate(time.Now().Add(time.Hour * -1)) + expiredStr, err := jwtutils.Sign(ctx, signer, expired) require.NoError(t, err) r.AddCookie(&http.Cookie{ Name: codersdk.SignedAppTokenCookie, Value: expiredStr, }) - // Add a valid token - validStr, err := coderdtest.AppSecurityKey.SignToken(token) + validStr, err := jwtutils.Sign(ctx, signer, token) require.NoError(t, err) r.AddCookie(&http.Cookie{ @@ -455,147 +336,27 @@ func Test_FromRequest(t *testing.T) { Value: validStr, }) - signed, ok := workspaceapps.FromRequest(r, coderdtest.AppSecurityKey) + signed, ok := workspaceapps.FromRequest(r, signer) require.True(t, ok, "expected a token to be found") // Confirm it is the correct token. require.Equal(t, signed.UserID, token.UserID) }) } -// The ParseToken fn is tested quite thoroughly in the GenerateToken test as -// well. -func Test_ParseToken(t *testing.T) { - t.Parallel() - - t.Run("InvalidJWS", func(t *testing.T) { - t.Parallel() - - token, err := coderdtest.AppSecurityKey.VerifySignedToken("invalid") - require.Error(t, err) - require.ErrorContains(t, err, "parse JWS") - require.Equal(t, workspaceapps.SignedToken{}, token) - }) - - t.Run("VerifySignature", func(t *testing.T) { - t.Parallel() +func newSigner(t *testing.T) jwtutils.StaticKey { + t.Helper() - // Create a valid token using a different key. - var otherKey workspaceapps.SecurityKey - copy(otherKey[:], coderdtest.AppSecurityKey[:]) - for i := range otherKey { - otherKey[i] ^= 0xff - } - require.NotEqual(t, coderdtest.AppSecurityKey, otherKey) - - tokenStr, err := otherKey.SignToken(workspaceapps.SignedToken{ - Request: workspaceapps.Request{ - AccessMethod: workspaceapps.AccessMethodPath, - BasePath: "/app", - UsernameOrID: "foo", - WorkspaceNameOrID: "bar", - AgentNameOrID: "baz", - AppSlugOrPort: "qux", - }, - - Expiry: time.Now().Add(time.Hour), - UserID: uuid.MustParse("b1530ba9-76f3-415e-b597-4ddd7cd466a4"), - WorkspaceID: uuid.MustParse("1e6802d3-963e-45ac-9d8c-bf997016ffed"), - AgentID: uuid.MustParse("9ec18681-d2c9-4c9e-9186-f136efb4edbe"), - AppURL: "http://127.0.0.1:8080", - }) - require.NoError(t, err) - - // Verify the token is invalid. - token, err := coderdtest.AppSecurityKey.VerifySignedToken(tokenStr) - require.Error(t, err) - require.ErrorContains(t, err, "verify JWS") - require.Equal(t, workspaceapps.SignedToken{}, token) - }) - - t.Run("InvalidBody", func(t *testing.T) { - t.Parallel() - - // Create a signature for an invalid body. - signer, err := jose.NewSigner(jose.SigningKey{Algorithm: jose.HS512, Key: coderdtest.AppSecurityKey[:64]}, nil) - require.NoError(t, err) - signedObject, err := signer.Sign([]byte("hi")) - require.NoError(t, err) - serialized, err := signedObject.CompactSerialize() - require.NoError(t, err) - - token, err := coderdtest.AppSecurityKey.VerifySignedToken(serialized) - require.Error(t, err) - require.ErrorContains(t, err, "unmarshal payload") - require.Equal(t, workspaceapps.SignedToken{}, token) - }) -} - -func TestAPIKeyEncryption(t *testing.T) { - t.Parallel() - - genAPIKey := func(t *testing.T) string { - id, _ := cryptorand.String(10) - secret, _ := cryptorand.String(22) - - return fmt.Sprintf("%s-%s", id, secret) + return jwtutils.StaticKey{ + ID: "test", + Key: generateSecret(t, 64), } +} - t.Run("OK", func(t *testing.T) { - t.Parallel() - - key := genAPIKey(t) - encrypted, err := coderdtest.AppSecurityKey.EncryptAPIKey(workspaceapps.EncryptedAPIKeyPayload{ - APIKey: key, - }) - require.NoError(t, err) - - decryptedKey, err := coderdtest.AppSecurityKey.DecryptAPIKey(encrypted) - require.NoError(t, err) - require.Equal(t, key, decryptedKey) - }) - - t.Run("Verifies", func(t *testing.T) { - t.Parallel() - - t.Run("Expiry", func(t *testing.T) { - t.Parallel() - - key := genAPIKey(t) - encrypted, err := coderdtest.AppSecurityKey.EncryptAPIKey(workspaceapps.EncryptedAPIKeyPayload{ - APIKey: key, - ExpiresAt: dbtime.Now().Add(-1 * time.Hour), - }) - require.NoError(t, err) - - decryptedKey, err := coderdtest.AppSecurityKey.DecryptAPIKey(encrypted) - require.Error(t, err) - require.ErrorContains(t, err, "expired") - require.Empty(t, decryptedKey) - }) - - t.Run("EncryptionKey", func(t *testing.T) { - t.Parallel() - - // Create a valid token using a different key. - var otherKey workspaceapps.SecurityKey - copy(otherKey[:], coderdtest.AppSecurityKey[:]) - for i := range otherKey { - otherKey[i] ^= 0xff - } - require.NotEqual(t, coderdtest.AppSecurityKey, otherKey) - - // Encrypt with the other key. - key := genAPIKey(t) - encrypted, err := otherKey.EncryptAPIKey(workspaceapps.EncryptedAPIKeyPayload{ - APIKey: key, - }) - require.NoError(t, err) +func generateSecret(t *testing.T, size int) []byte { + t.Helper() - // Decrypt with the original key. - decryptedKey, err := coderdtest.AppSecurityKey.DecryptAPIKey(encrypted) - require.Error(t, err) - require.ErrorContains(t, err, "decrypt API key") - require.Empty(t, decryptedKey) - }) - }) + secret := make([]byte, size) + _, err := rand.Read(secret) + require.NoError(t, err) + return secret } diff --git a/coderd/workspaceapps_test.go b/coderd/workspaceapps_test.go index 1d00b7daa7bd9..52b3e18b4e6ad 100644 --- a/coderd/workspaceapps_test.go +++ b/coderd/workspaceapps_test.go @@ -5,16 +5,23 @@ import ( "net/http" "net/url" "testing" + "time" + "github.com/go-jose/go-jose/v4/jwt" "github.com/stretchr/testify/require" + "cdr.dev/slog/sloggers/slogtest" + "github.com/coder/coder/v2/coderd/coderdtest" + "github.com/coder/coder/v2/coderd/cryptokeys" "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/dbgen" "github.com/coder/coder/v2/coderd/database/dbtestutil" + "github.com/coder/coder/v2/coderd/jwtutils" "github.com/coder/coder/v2/coderd/workspaceapps" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/testutil" + "github.com/coder/quartz" ) func TestGetAppHost(t *testing.T) { @@ -181,16 +188,28 @@ func TestWorkspaceApplicationAuth(t *testing.T) { t.Run(c.name, func(t *testing.T) { t.Parallel() - db, pubsub := dbtestutil.NewDB(t) - + ctx := testutil.Context(t, testutil.WaitMedium) + logger := slogtest.Make(t, nil) accessURL, err := url.Parse(c.accessURL) require.NoError(t, err) + db, ps := dbtestutil.NewDB(t) + fetcher := &cryptokeys.DBFetcher{ + DB: db, + } + + kc, err := cryptokeys.NewEncryptionCache(ctx, logger, fetcher, codersdk.CryptoKeyFeatureWorkspaceAppsAPIKey) + require.NoError(t, err) + + clock := quartz.NewMock(t) + client := coderdtest.New(t, &coderdtest.Options{ - Database: db, - Pubsub: pubsub, - AccessURL: accessURL, - AppHostname: c.appHostname, + AccessURL: accessURL, + AppHostname: c.appHostname, + Database: db, + Pubsub: ps, + APIKeyEncryptionCache: kc, + Clock: clock, }) _ = coderdtest.CreateFirstUser(t, client) @@ -240,7 +259,15 @@ func TestWorkspaceApplicationAuth(t *testing.T) { loc.RawQuery = q.Encode() require.Equal(t, c.expectRedirect, loc.String()) - // The decrypted key is verified in the apptest test suite. + var token workspaceapps.EncryptedAPIKeyPayload + err = jwtutils.Decrypt(ctx, kc, encryptedAPIKey, &token, jwtutils.WithDecryptExpected(jwt.Expected{ + Time: clock.Now(), + AnyAudience: jwt.Audience{"wsproxy"}, + Issuer: "coderd", + })) + require.NoError(t, err) + require.Equal(t, jwt.NewNumericDate(clock.Now().Add(time.Minute)), token.Expiry) + require.Equal(t, jwt.NewNumericDate(clock.Now().Add(-time.Minute)), token.NotBefore) }) } } diff --git a/coderd/workspacestats/batcher.go b/coderd/workspacestats/batcher.go index 1f14c5cec5a17..46efc69170562 100644 --- a/coderd/workspacestats/batcher.go +++ b/coderd/workspacestats/batcher.go @@ -25,7 +25,7 @@ const ( ) type Batcher interface { - Add(now time.Time, agentID uuid.UUID, templateID uuid.UUID, userID uuid.UUID, workspaceID uuid.UUID, st *agentproto.Stats, usage bool) error + Add(now time.Time, agentID uuid.UUID, templateID uuid.UUID, userID uuid.UUID, workspaceID uuid.UUID, st *agentproto.Stats, usage bool) } // DBBatcher holds a buffer of agent stats and periodically flushes them to @@ -139,7 +139,7 @@ func (b *DBBatcher) Add( workspaceID uuid.UUID, st *agentproto.Stats, usage bool, -) error { +) { b.mu.Lock() defer b.mu.Unlock() @@ -176,7 +176,6 @@ func (b *DBBatcher) Add( b.flushLever <- struct{}{} b.flushForced.Store(true) } - return nil } // Run runs the batcher. diff --git a/coderd/workspacestats/batcher_internal_test.go b/coderd/workspacestats/batcher_internal_test.go index 3e106f07e4e2f..874acd7667dce 100644 --- a/coderd/workspacestats/batcher_internal_test.go +++ b/coderd/workspacestats/batcher_internal_test.go @@ -63,7 +63,7 @@ func TestBatchStats(t *testing.T) { // Given: a single data point is added for workspace t2 := t1.Add(time.Second) t.Logf("inserting 1 stat") - require.NoError(t, b.Add(t2.Add(time.Millisecond), deps1.Agent.ID, deps1.User.ID, deps1.Template.ID, deps1.Workspace.ID, randStats(t), false)) + b.Add(t2.Add(time.Millisecond), deps1.Agent.ID, deps1.User.ID, deps1.Template.ID, deps1.Workspace.ID, randStats(t), false) // When: it becomes time to report stats // Signal a tick and wait for a flush to complete. @@ -87,9 +87,9 @@ func TestBatchStats(t *testing.T) { t.Logf("inserting %d stats", defaultBufferSize) for i := 0; i < defaultBufferSize; i++ { if i%2 == 0 { - require.NoError(t, b.Add(t3.Add(time.Millisecond), deps1.Agent.ID, deps1.User.ID, deps1.Template.ID, deps1.Workspace.ID, randStats(t), false)) + b.Add(t3.Add(time.Millisecond), deps1.Agent.ID, deps1.User.ID, deps1.Template.ID, deps1.Workspace.ID, randStats(t), false) } else { - require.NoError(t, b.Add(t3.Add(time.Millisecond), deps2.Agent.ID, deps2.User.ID, deps2.Template.ID, deps2.Workspace.ID, randStats(t), false)) + b.Add(t3.Add(time.Millisecond), deps2.Agent.ID, deps2.User.ID, deps2.Template.ID, deps2.Workspace.ID, randStats(t), false) } } }() diff --git a/coderd/workspacestats/reporter.go b/coderd/workspacestats/reporter.go index fecfd1b1eda92..e59a9f15d5e95 100644 --- a/coderd/workspacestats/reporter.go +++ b/coderd/workspacestats/reporter.go @@ -6,7 +6,6 @@ import ( "time" "github.com/google/uuid" - "golang.org/x/sync/errgroup" "golang.org/x/xerrors" "cdr.dev/slog" @@ -118,70 +117,64 @@ func (r *Reporter) ReportAppStats(ctx context.Context, stats []workspaceapps.Sta return nil } +// nolint:revive // usage is a control flag while we have the experiment func (r *Reporter) ReportAgentStats(ctx context.Context, now time.Time, workspace database.Workspace, workspaceAgent database.WorkspaceAgent, templateName string, stats *agentproto.Stats, usage bool) error { - if stats.ConnectionCount > 0 { - var nextAutostart time.Time - if workspace.AutostartSchedule.String != "" { - templateSchedule, err := (*(r.opts.TemplateScheduleStore.Load())).Get(ctx, r.opts.Database, workspace.TemplateID) - // If the template schedule fails to load, just default to bumping - // without the next transition and log it. - if err != nil { - r.opts.Logger.Error(ctx, "failed to load template schedule bumping activity, defaulting to bumping by 60min", - slog.F("workspace_id", workspace.ID), - slog.F("template_id", workspace.TemplateID), - slog.Error(err), - ) - } else { - next, allowed := schedule.NextAutostart(now, workspace.AutostartSchedule.String, templateSchedule) - if allowed { - nextAutostart = next - } - } - } - ActivityBumpWorkspace(ctx, r.opts.Logger.Named("activity_bump"), r.opts.Database, workspace.ID, nextAutostart) - } + // update agent stats + r.opts.StatsBatcher.Add(now, workspaceAgent.ID, workspace.TemplateID, workspace.OwnerID, workspace.ID, stats, usage) - var errGroup errgroup.Group - errGroup.Go(func() error { - err := r.opts.StatsBatcher.Add(now, workspaceAgent.ID, workspace.TemplateID, workspace.OwnerID, workspace.ID, stats, usage) - if err != nil { - r.opts.Logger.Error(ctx, "add agent stats to batcher", slog.Error(err)) - return xerrors.Errorf("insert workspace agent stats batch: %w", err) - } - return nil - }) - errGroup.Go(func() error { - err := r.opts.Database.UpdateWorkspaceLastUsedAt(ctx, database.UpdateWorkspaceLastUsedAtParams{ - ID: workspace.ID, - LastUsedAt: now, - }) + // update prometheus metrics + if r.opts.UpdateAgentMetricsFn != nil { + user, err := r.opts.Database.GetUserByID(ctx, workspace.OwnerID) if err != nil { - return xerrors.Errorf("update workspace LastUsedAt: %w", err) + return xerrors.Errorf("get user: %w", err) } + + r.opts.UpdateAgentMetricsFn(ctx, prometheusmetrics.AgentMetricLabels{ + Username: user.Username, + WorkspaceName: workspace.Name, + AgentName: workspaceAgent.Name, + TemplateName: templateName, + }, stats.Metrics) + } + + // workspace activity: if no sessions we do not bump activity + if usage && stats.SessionCountVscode == 0 && stats.SessionCountJetbrains == 0 && stats.SessionCountReconnectingPty == 0 && stats.SessionCountSsh == 0 { return nil - }) - if r.opts.UpdateAgentMetricsFn != nil { - errGroup.Go(func() error { - user, err := r.opts.Database.GetUserByID(ctx, workspace.OwnerID) - if err != nil { - return xerrors.Errorf("get user: %w", err) - } + } - r.opts.UpdateAgentMetricsFn(ctx, prometheusmetrics.AgentMetricLabels{ - Username: user.Username, - WorkspaceName: workspace.Name, - AgentName: workspaceAgent.Name, - TemplateName: templateName, - }, stats.Metrics) - return nil - }) + // legacy stats: if no active connections we do not bump activity + if !usage && stats.ConnectionCount == 0 { + return nil } - err := errGroup.Wait() - if err != nil { - return xerrors.Errorf("update stats in database: %w", err) + + // check next autostart + var nextAutostart time.Time + if workspace.AutostartSchedule.String != "" { + templateSchedule, err := (*(r.opts.TemplateScheduleStore.Load())).Get(ctx, r.opts.Database, workspace.TemplateID) + // If the template schedule fails to load, just default to bumping + // without the next transition and log it. + if err != nil { + r.opts.Logger.Error(ctx, "failed to load template schedule bumping activity, defaulting to bumping by 60min", + slog.F("workspace_id", workspace.ID), + slog.F("template_id", workspace.TemplateID), + slog.Error(err), + ) + } else { + next, allowed := schedule.NextAutostart(now, workspace.AutostartSchedule.String, templateSchedule) + if allowed { + nextAutostart = next + } + } } - err = r.opts.Pubsub.Publish(codersdk.WorkspaceNotifyChannel(workspace.ID), []byte{}) + // bump workspace activity + ActivityBumpWorkspace(ctx, r.opts.Logger.Named("activity_bump"), r.opts.Database, workspace.ID, nextAutostart) + + // bump workspace last_used_at + r.opts.UsageTracker.Add(workspace.ID) + + // notify workspace update + err := r.opts.Pubsub.Publish(codersdk.WorkspaceNotifyChannel(workspace.ID), []byte{}) if err != nil { r.opts.Logger.Warn(ctx, "failed to publish workspace agent stats", slog.F("workspace_id", workspace.ID), slog.Error(err)) diff --git a/coderd/workspacestats/tracker.go b/coderd/workspacestats/tracker.go index 33532247b36e0..f55edde3b57e6 100644 --- a/coderd/workspacestats/tracker.go +++ b/coderd/workspacestats/tracker.go @@ -130,7 +130,6 @@ func (tr *UsageTracker) flush(now time.Time) { authCtx := dbauthz.AsSystemRestricted(ctx) tr.flushLock.Lock() defer tr.flushLock.Unlock() - // nolint:gocritic // (#13146) Will be moved soon as part of refactor. if err := tr.s.BatchUpdateWorkspaceLastUsedAt(authCtx, database.BatchUpdateWorkspaceLastUsedAtParams{ LastUsedAt: now, IDs: ids, diff --git a/coderd/workspacestats/workspacestatstest/batcher.go b/coderd/workspacestats/workspacestatstest/batcher.go index 2f5dd7d13aa0a..592e244518790 100644 --- a/coderd/workspacestats/workspacestatstest/batcher.go +++ b/coderd/workspacestats/workspacestatstest/batcher.go @@ -25,7 +25,7 @@ type StatsBatcher struct { var _ workspacestats.Batcher = &StatsBatcher{} -func (b *StatsBatcher) Add(now time.Time, agentID uuid.UUID, templateID uuid.UUID, userID uuid.UUID, workspaceID uuid.UUID, st *agentproto.Stats, usage bool) error { +func (b *StatsBatcher) Add(now time.Time, agentID uuid.UUID, templateID uuid.UUID, userID uuid.UUID, workspaceID uuid.UUID, st *agentproto.Stats, usage bool) { b.Mu.Lock() defer b.Mu.Unlock() b.Called++ @@ -36,5 +36,4 @@ func (b *StatsBatcher) Add(now time.Time, agentID uuid.UUID, templateID uuid.UUI b.LastWorkspaceID = workspaceID b.LastStats = st b.LastUsage = usage - return nil } diff --git a/coderd/wsbuilder/wsbuilder_test.go b/coderd/wsbuilder/wsbuilder_test.go index ad53cd7d45609..dd532467bbc92 100644 --- a/coderd/wsbuilder/wsbuilder_test.go +++ b/coderd/wsbuilder/wsbuilder_test.go @@ -735,9 +735,9 @@ func expectDB(t *testing.T, opts ...txExpect) *dbmock.MockStore { // we expect to be run in a transaction; we use mTx to record the // "in transaction" calls. mDB.EXPECT().InTx( - gomock.Any(), gomock.Eq(&sql.TxOptions{Isolation: sql.LevelRepeatableRead}), + gomock.Any(), gomock.Eq(&database.TxOptions{Isolation: sql.LevelRepeatableRead}), ). - DoAndReturn(func(f func(database.Store) error, _ *sql.TxOptions) error { + DoAndReturn(func(f func(database.Store) error, _ *database.TxOptions) error { err := f(mTx) return err }) @@ -763,7 +763,7 @@ func withTemplate(mTx *dbmock.MockStore) { // withInTx runs the given functions on the same db mock. func withInTx(mTx *dbmock.MockStore) { mTx.EXPECT().InTx(gomock.Any(), gomock.Any()).Times(1).DoAndReturn( - func(f func(store database.Store) error, _ *sql.TxOptions) error { + func(f func(store database.Store) error, _ *database.TxOptions) error { return f(mTx) }, ) diff --git a/codersdk/deployment.go b/codersdk/deployment.go index d6840df504b85..3ba09bd38d1a4 100644 --- a/codersdk/deployment.go +++ b/codersdk/deployment.go @@ -926,6 +926,23 @@ when required by your organization's security policy.`, Name: "Config", Description: `Use a YAML configuration file when your server launch become unwieldy.`, } + deploymentGroupEmail = serpent.Group{ + Name: "Email", + Description: "Configure how emails are sent.", + YAML: "email", + } + deploymentGroupEmailAuth = serpent.Group{ + Name: "Email Authentication", + Parent: &deploymentGroupEmail, + Description: "Configure SMTP authentication options.", + YAML: "emailAuth", + } + deploymentGroupEmailTLS = serpent.Group{ + Name: "Email TLS", + Parent: &deploymentGroupEmail, + Description: "Configure TLS for your SMTP server target.", + YAML: "emailTLS", + } deploymentGroupNotifications = serpent.Group{ Name: "Notifications", YAML: "notifications", @@ -997,6 +1014,135 @@ when required by your organization's security policy.`, Group: &deploymentGroupIntrospectionLogging, YAML: "filter", } + emailFrom := serpent.Option{ + Name: "Email: From Address", + Description: "The sender's address to use.", + Flag: "email-from", + Env: "CODER_EMAIL_FROM", + Value: &c.Notifications.SMTP.From, + Group: &deploymentGroupEmail, + YAML: "from", + } + emailSmarthost := serpent.Option{ + Name: "Email: Smarthost", + Description: "The intermediary SMTP host through which emails are sent.", + Flag: "email-smarthost", + Env: "CODER_EMAIL_SMARTHOST", + Default: "localhost:587", // To pass validation. + Value: &c.Notifications.SMTP.Smarthost, + Group: &deploymentGroupEmail, + YAML: "smarthost", + } + emailHello := serpent.Option{ + Name: "Email: Hello", + Description: "The hostname identifying the SMTP server.", + Flag: "email-hello", + Env: "CODER_EMAIL_HELLO", + Default: "localhost", + Value: &c.Notifications.SMTP.Hello, + Group: &deploymentGroupEmail, + YAML: "hello", + } + emailForceTLS := serpent.Option{ + Name: "Email: Force TLS", + Description: "Force a TLS connection to the configured SMTP smarthost.", + Flag: "email-force-tls", + Env: "CODER_EMAIL_FORCE_TLS", + Default: "false", + Value: &c.Notifications.SMTP.ForceTLS, + Group: &deploymentGroupEmail, + YAML: "forceTLS", + } + emailAuthIdentity := serpent.Option{ + Name: "Email Auth: Identity", + Description: "Identity to use with PLAIN authentication.", + Flag: "email-auth-identity", + Env: "CODER_EMAIL_AUTH_IDENTITY", + Value: &c.Notifications.SMTP.Auth.Identity, + Group: &deploymentGroupEmailAuth, + YAML: "identity", + } + emailAuthUsername := serpent.Option{ + Name: "Email Auth: Username", + Description: "Username to use with PLAIN/LOGIN authentication.", + Flag: "email-auth-username", + Env: "CODER_EMAIL_AUTH_USERNAME", + Value: &c.Notifications.SMTP.Auth.Username, + Group: &deploymentGroupEmailAuth, + YAML: "username", + } + emailAuthPassword := serpent.Option{ + Name: "Email Auth: Password", + Description: "Password to use with PLAIN/LOGIN authentication.", + Flag: "email-auth-password", + Env: "CODER_EMAIL_AUTH_PASSWORD", + Annotations: serpent.Annotations{}.Mark(annotationSecretKey, "true"), + Value: &c.Notifications.SMTP.Auth.Password, + Group: &deploymentGroupEmailAuth, + } + emailAuthPasswordFile := serpent.Option{ + Name: "Email Auth: Password File", + Description: "File from which to load password for use with PLAIN/LOGIN authentication.", + Flag: "email-auth-password-file", + Env: "CODER_EMAIL_AUTH_PASSWORD_FILE", + Value: &c.Notifications.SMTP.Auth.PasswordFile, + Group: &deploymentGroupEmailAuth, + YAML: "passwordFile", + } + emailTLSStartTLS := serpent.Option{ + Name: "Email TLS: StartTLS", + Description: "Enable STARTTLS to upgrade insecure SMTP connections using TLS.", + Flag: "email-tls-starttls", + Env: "CODER_EMAIL_TLS_STARTTLS", + Value: &c.Notifications.SMTP.TLS.StartTLS, + Group: &deploymentGroupEmailTLS, + YAML: "startTLS", + } + emailTLSServerName := serpent.Option{ + Name: "Email TLS: Server Name", + Description: "Server name to verify against the target certificate.", + Flag: "email-tls-server-name", + Env: "CODER_EMAIL_TLS_SERVERNAME", + Value: &c.Notifications.SMTP.TLS.ServerName, + Group: &deploymentGroupEmailTLS, + YAML: "serverName", + } + emailTLSSkipCertVerify := serpent.Option{ + Name: "Email TLS: Skip Certificate Verification (Insecure)", + Description: "Skip verification of the target server's certificate (insecure).", + Flag: "email-tls-skip-verify", + Env: "CODER_EMAIL_TLS_SKIPVERIFY", + Value: &c.Notifications.SMTP.TLS.InsecureSkipVerify, + Group: &deploymentGroupEmailTLS, + YAML: "insecureSkipVerify", + } + emailTLSCertAuthorityFile := serpent.Option{ + Name: "Email TLS: Certificate Authority File", + Description: "CA certificate file to use.", + Flag: "email-tls-ca-cert-file", + Env: "CODER_EMAIL_TLS_CACERTFILE", + Value: &c.Notifications.SMTP.TLS.CAFile, + Group: &deploymentGroupEmailTLS, + YAML: "caCertFile", + } + emailTLSCertFile := serpent.Option{ + Name: "Email TLS: Certificate File", + Description: "Certificate file to use.", + Flag: "email-tls-cert-file", + Env: "CODER_EMAIL_TLS_CERTFILE", + Value: &c.Notifications.SMTP.TLS.CertFile, + Group: &deploymentGroupEmailTLS, + YAML: "certFile", + } + emailTLSCertKeyFile := serpent.Option{ + Name: "Email TLS: Certificate Key File", + Description: "Certificate key file to use.", + Flag: "email-tls-cert-key-file", + Env: "CODER_EMAIL_TLS_CERTKEYFILE", + Value: &c.Notifications.SMTP.TLS.KeyFile, + Group: &deploymentGroupEmailTLS, + YAML: "certKeyFile", + } opts := serpent.OptionSet{ { Name: "Access URL", @@ -1357,14 +1503,18 @@ when required by your organization's security policy.`, Default: strings.Join(agentmetrics.LabelAll, ","), }, { - Name: "Prometheus Collect Database Metrics", - Description: "Collect database metrics (may increase charges for metrics storage).", - Flag: "prometheus-collect-db-metrics", - Env: "CODER_PROMETHEUS_COLLECT_DB_METRICS", - Value: &c.Prometheus.CollectDBMetrics, - Group: &deploymentGroupIntrospectionPrometheus, - YAML: "collect_db_metrics", - Default: "false", + Name: "Prometheus Collect Database Metrics", + // Some db metrics like transaction information will still be collected. + // Query metrics blow up the number of unique time series with labels + // and can be very expensive. So default to not capturing query metrics. + Description: "Collect database query metrics (may increase charges for metrics storage). " + + "If set to false, a reduced set of database metrics are still collected.", + Flag: "prometheus-collect-db-metrics", + Env: "CODER_PROMETHEUS_COLLECT_DB_METRICS", + Value: &c.Prometheus.CollectDBMetrics, + Group: &deploymentGroupIntrospectionPrometheus, + YAML: "collect_db_metrics", + Default: "false", }, // Pprof settings { @@ -2428,6 +2578,21 @@ Write out the current server config as YAML to stdout.`, YAML: "thresholdDatabase", Annotations: serpent.Annotations{}.Mark(annotationFormatDuration, "true"), }, + // Email options + emailFrom, + emailSmarthost, + emailHello, + emailForceTLS, + emailAuthIdentity, + emailAuthUsername, + emailAuthPassword, + emailAuthPasswordFile, + emailTLSStartTLS, + emailTLSServerName, + emailTLSSkipCertVerify, + emailTLSCertAuthorityFile, + emailTLSCertFile, + emailTLSCertKeyFile, // Notifications Options { Name: "Notifications: Method", @@ -2458,36 +2623,37 @@ Write out the current server config as YAML to stdout.`, Value: &c.Notifications.SMTP.From, Group: &deploymentGroupNotificationsEmail, YAML: "from", + UseInstead: serpent.OptionSet{emailFrom}, }, { Name: "Notifications: Email: Smarthost", Description: "The intermediary SMTP host through which emails are sent.", Flag: "notifications-email-smarthost", Env: "CODER_NOTIFICATIONS_EMAIL_SMARTHOST", - Default: "localhost:587", // To pass validation. Value: &c.Notifications.SMTP.Smarthost, Group: &deploymentGroupNotificationsEmail, YAML: "smarthost", + UseInstead: serpent.OptionSet{emailSmarthost}, }, { Name: "Notifications: Email: Hello", Description: "The hostname identifying the SMTP server.", Flag: "notifications-email-hello", Env: "CODER_NOTIFICATIONS_EMAIL_HELLO", - Default: "localhost", Value: &c.Notifications.SMTP.Hello, Group: &deploymentGroupNotificationsEmail, YAML: "hello", + UseInstead: serpent.OptionSet{emailHello}, }, { Name: "Notifications: Email: Force TLS", Description: "Force a TLS connection to the configured SMTP smarthost.", Flag: "notifications-email-force-tls", Env: "CODER_NOTIFICATIONS_EMAIL_FORCE_TLS", - Default: "false", Value: &c.Notifications.SMTP.ForceTLS, Group: &deploymentGroupNotificationsEmail, YAML: "forceTLS", + UseInstead: serpent.OptionSet{emailForceTLS}, }, { Name: "Notifications: Email Auth: Identity", @@ -2497,6 +2663,7 @@ Write out the current server config as YAML to stdout.`, Value: &c.Notifications.SMTP.Auth.Identity, Group: &deploymentGroupNotificationsEmailAuth, YAML: "identity", + UseInstead: serpent.OptionSet{emailAuthIdentity}, }, { Name: "Notifications: Email Auth: Username", @@ -2506,6 +2673,7 @@ Write out the current server config as YAML to stdout.`, Value: &c.Notifications.SMTP.Auth.Username, Group: &deploymentGroupNotificationsEmailAuth, YAML: "username", + UseInstead: serpent.OptionSet{emailAuthUsername}, }, { Name: "Notifications: Email Auth: Password", @@ -2515,6 +2683,7 @@ Write out the current server config as YAML to stdout.`, Annotations: serpent.Annotations{}.Mark(annotationSecretKey, "true"), Value: &c.Notifications.SMTP.Auth.Password, Group: &deploymentGroupNotificationsEmailAuth, + UseInstead: serpent.OptionSet{emailAuthPassword}, }, { Name: "Notifications: Email Auth: Password File", @@ -2524,6 +2693,7 @@ Write out the current server config as YAML to stdout.`, Value: &c.Notifications.SMTP.Auth.PasswordFile, Group: &deploymentGroupNotificationsEmailAuth, YAML: "passwordFile", + UseInstead: serpent.OptionSet{emailAuthPasswordFile}, }, { Name: "Notifications: Email TLS: StartTLS", @@ -2533,6 +2703,7 @@ Write out the current server config as YAML to stdout.`, Value: &c.Notifications.SMTP.TLS.StartTLS, Group: &deploymentGroupNotificationsEmailTLS, YAML: "startTLS", + UseInstead: serpent.OptionSet{emailTLSStartTLS}, }, { Name: "Notifications: Email TLS: Server Name", @@ -2542,6 +2713,7 @@ Write out the current server config as YAML to stdout.`, Value: &c.Notifications.SMTP.TLS.ServerName, Group: &deploymentGroupNotificationsEmailTLS, YAML: "serverName", + UseInstead: serpent.OptionSet{emailTLSServerName}, }, { Name: "Notifications: Email TLS: Skip Certificate Verification (Insecure)", @@ -2551,6 +2723,7 @@ Write out the current server config as YAML to stdout.`, Value: &c.Notifications.SMTP.TLS.InsecureSkipVerify, Group: &deploymentGroupNotificationsEmailTLS, YAML: "insecureSkipVerify", + UseInstead: serpent.OptionSet{emailTLSSkipCertVerify}, }, { Name: "Notifications: Email TLS: Certificate Authority File", @@ -2560,6 +2733,7 @@ Write out the current server config as YAML to stdout.`, Value: &c.Notifications.SMTP.TLS.CAFile, Group: &deploymentGroupNotificationsEmailTLS, YAML: "caCertFile", + UseInstead: serpent.OptionSet{emailTLSCertAuthorityFile}, }, { Name: "Notifications: Email TLS: Certificate File", @@ -2569,6 +2743,7 @@ Write out the current server config as YAML to stdout.`, Value: &c.Notifications.SMTP.TLS.CertFile, Group: &deploymentGroupNotificationsEmailTLS, YAML: "certFile", + UseInstead: serpent.OptionSet{emailTLSCertFile}, }, { Name: "Notifications: Email TLS: Certificate Key File", @@ -2578,6 +2753,7 @@ Write out the current server config as YAML to stdout.`, Value: &c.Notifications.SMTP.TLS.KeyFile, Group: &deploymentGroupNotificationsEmailTLS, YAML: "certKeyFile", + UseInstead: serpent.OptionSet{emailTLSCertKeyFile}, }, { Name: "Notifications: Webhook: Endpoint", @@ -3109,9 +3285,11 @@ func (c *Client) SSHConfiguration(ctx context.Context) (SSHConfigResponse, error type CryptoKeyFeature string const ( - CryptoKeyFeatureWorkspaceApp CryptoKeyFeature = "workspace_apps" - CryptoKeyFeatureOIDCConvert CryptoKeyFeature = "oidc_convert" - CryptoKeyFeatureTailnetResume CryptoKeyFeature = "tailnet_resume" + CryptoKeyFeatureWorkspaceAppsAPIKey CryptoKeyFeature = "workspace_apps_api_key" + //nolint:gosec // This denotes a type of key, not a literal. + CryptoKeyFeatureWorkspaceAppsToken CryptoKeyFeature = "workspace_apps_token" + CryptoKeyFeatureOIDCConvert CryptoKeyFeature = "oidc_convert" + CryptoKeyFeatureTailnetResume CryptoKeyFeature = "tailnet_resume" ) type CryptoKey struct { diff --git a/codersdk/deployment_test.go b/codersdk/deployment_test.go index d7eca6323000c..61474a3b77ea1 100644 --- a/codersdk/deployment_test.go +++ b/codersdk/deployment_test.go @@ -78,6 +78,9 @@ func TestDeploymentValues_HighlyConfigurable(t *testing.T) { "Provisioner Daemon Pre-shared Key (PSK)": { yaml: true, }, + "Email Auth: Password": { + yaml: true, + }, "Notifications: Email Auth: Password": { yaml: true, }, diff --git a/codersdk/workspacesdk/connector_internal_test.go b/codersdk/workspacesdk/connector_internal_test.go index 7a339a0079ba2..19f1930c89bc5 100644 --- a/codersdk/workspacesdk/connector_internal_test.go +++ b/codersdk/workspacesdk/connector_internal_test.go @@ -25,6 +25,7 @@ import ( "cdr.dev/slog/sloggers/slogtest" "github.com/coder/coder/v2/apiversion" "github.com/coder/coder/v2/coderd/httpapi" + "github.com/coder/coder/v2/coderd/jwtutils" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/tailnet" "github.com/coder/coder/v2/tailnet/proto" @@ -61,7 +62,7 @@ func TestTailnetAPIConnector_Disconnects(t *testing.T) { CoordPtr: &coordPtr, DERPMapUpdateFrequency: time.Millisecond, DERPMapFn: func() *tailcfg.DERPMap { return <-derpMapCh }, - NetworkTelemetryHandler: func(batch []*proto.TelemetryEvent) {}, + NetworkTelemetryHandler: func([]*proto.TelemetryEvent) {}, ResumeTokenProvider: tailnet.NewInsecureTestResumeTokenProvider(), }) require.NoError(t, err) @@ -165,13 +166,17 @@ func TestTailnetAPIConnector_ResumeToken(t *testing.T) { clock := quartz.NewMock(t) resumeTokenSigningKey, err := tailnet.GenerateResumeTokenSigningKey() require.NoError(t, err) - resumeTokenProvider := tailnet.NewResumeTokenKeyProvider(resumeTokenSigningKey, clock, time.Hour) + mgr := jwtutils.StaticKey{ + ID: "123", + Key: resumeTokenSigningKey[:], + } + resumeTokenProvider := tailnet.NewResumeTokenKeyProvider(mgr, clock, time.Hour) svc, err := tailnet.NewClientService(tailnet.ClientServiceOptions{ Logger: logger, CoordPtr: &coordPtr, DERPMapUpdateFrequency: time.Millisecond, DERPMapFn: func() *tailcfg.DERPMap { return <-derpMapCh }, - NetworkTelemetryHandler: func(batch []*proto.TelemetryEvent) {}, + NetworkTelemetryHandler: func([]*proto.TelemetryEvent) {}, ResumeTokenProvider: resumeTokenProvider, }) require.NoError(t, err) @@ -190,7 +195,7 @@ func TestTailnetAPIConnector_ResumeToken(t *testing.T) { t.Logf("received resume token: %s", resumeToken) assert.Equal(t, expectResumeToken, resumeToken) if resumeToken != "" { - peerID, err = resumeTokenProvider.VerifyResumeToken(resumeToken) + peerID, err = resumeTokenProvider.VerifyResumeToken(ctx, resumeToken) assert.NoError(t, err, "failed to parse resume token") if err != nil { httpapi.Write(ctx, w, http.StatusUnauthorized, codersdk.Response{ @@ -280,13 +285,17 @@ func TestTailnetAPIConnector_ResumeTokenFailure(t *testing.T) { clock := quartz.NewMock(t) resumeTokenSigningKey, err := tailnet.GenerateResumeTokenSigningKey() require.NoError(t, err) - resumeTokenProvider := tailnet.NewResumeTokenKeyProvider(resumeTokenSigningKey, clock, time.Hour) + mgr := jwtutils.StaticKey{ + ID: uuid.New().String(), + Key: resumeTokenSigningKey[:], + } + resumeTokenProvider := tailnet.NewResumeTokenKeyProvider(mgr, clock, time.Hour) svc, err := tailnet.NewClientService(tailnet.ClientServiceOptions{ Logger: logger, CoordPtr: &coordPtr, DERPMapUpdateFrequency: time.Millisecond, DERPMapFn: func() *tailcfg.DERPMap { return <-derpMapCh }, - NetworkTelemetryHandler: func(batch []*proto.TelemetryEvent) {}, + NetworkTelemetryHandler: func(_ []*proto.TelemetryEvent) {}, ResumeTokenProvider: resumeTokenProvider, }) require.NoError(t, err) diff --git a/docs/admin/infrastructure/architecture.md b/docs/admin/infrastructure/architecture.md index 3c4e0b1511031..fb351e4da2d18 100644 --- a/docs/admin/infrastructure/architecture.md +++ b/docs/admin/infrastructure/architecture.md @@ -10,11 +10,11 @@ page describes possible deployments, challenges, and risks associated with them. ![Architecture Diagram](../../images/architecture-diagram.png) -## Enterprise +## Premium ![Single Region Architecture Diagram](../../images/architecture-single-region.png) -## Multi-Region Enterprise +## Multi-Region Premium ![Multi Region Architecture Diagram](../../images/architecture-multi-region.png) diff --git a/docs/admin/infrastructure/scale-testing.md b/docs/admin/infrastructure/scale-testing.md index 75d3f00b35f5d..c371f23fd5559 100644 --- a/docs/admin/infrastructure/scale-testing.md +++ b/docs/admin/infrastructure/scale-testing.md @@ -173,8 +173,8 @@ example, running 10 provisioner containers will allow 10 users to start workspaces at the same time. By default, the Coder server runs 3 built-in provisioner daemons, but the -_Enterprise_ Coder release allows for running external provisioners to separate -the load caused by workspace provisioning on the `coderd` nodes. +_Premium_ Coder release allows for running external provisioners to separate the +load caused by workspace provisioning on the `coderd` nodes. #### Scaling formula diff --git a/docs/admin/infrastructure/validated-architectures/index.md b/docs/admin/infrastructure/validated-architectures/index.md index 85cbe430cc566..f0baa7c632b98 100644 --- a/docs/admin/infrastructure/validated-architectures/index.md +++ b/docs/admin/infrastructure/validated-architectures/index.md @@ -340,7 +340,7 @@ could affect workspace users experience once the platform is live. 1. Maintain Coder templates using [version control](../../templates/managing-templates/change-management.md). 1. Consider implementing a GitOps workflow to automatically push new template - versions into Coder from git. For example, on Github, you can use the + versions into Coder from git. For example, on GitHub, you can use the [Setup Coder](https://github.com/marketplace/actions/setup-coder) action. 1. Evaluate enabling [automatic template updates](../../templates/managing-templates/index.md#template-update-policies-enterprise-premium) diff --git a/docs/admin/integrations/opentofu.md b/docs/admin/integrations/opentofu.md index 6268a228e5d03..1867f03e8e2ed 100644 --- a/docs/admin/integrations/opentofu.md +++ b/docs/admin/integrations/opentofu.md @@ -4,7 +4,7 @@ > ⚠️ This guide is a work in progress. We do not officially support using custom > Terraform binaries in your Coder deployment. To track progress on the work, -> see this related [Github Issue](https://github.com/coder/coder/issues/12009). +> see this related [GitHub Issue](https://github.com/coder/coder/issues/12009). Coder deployments support any custom Terraform binary, including [OpenTofu](https://opentofu.org/docs/) - an open source alternative to diff --git a/docs/admin/monitoring/notifications/index.md b/docs/admin/monitoring/notifications/index.md index 48a1d95e0b412..eabc09438d7b9 100644 --- a/docs/admin/monitoring/notifications/index.md +++ b/docs/admin/monitoring/notifications/index.md @@ -76,7 +76,7 @@ can only be delivered to one method, and this method is configured globally with [`CODER_NOTIFICATIONS_METHOD`](../../../reference/cli/server.md#--notifications-method) (default: `smtp`). -Enterprise customers can configure which method to use for each of the supported +Premium customers can configure which method to use for each of the supported [Events](#workspace-events); see the [Preferences](#delivery-preferences-enterprise-premium) section below for more details. @@ -89,34 +89,34 @@ existing one. **Server Settings:** -| Required | CLI | Env | Type | Description | Default | -| :------: | --------------------------------- | ------------------------------------- | ----------- | ----------------------------------------- | ------------- | -| ✔️ | `--notifications-email-from` | `CODER_NOTIFICATIONS_EMAIL_FROM` | `string` | The sender's address to use. | | -| ✔️ | `--notifications-email-smarthost` | `CODER_NOTIFICATIONS_EMAIL_SMARTHOST` | `host:port` | The SMTP relay to send messages through. | localhost:587 | -| ✔️ | `--notifications-email-hello` | `CODER_NOTIFICATIONS_EMAIL_HELLO` | `string` | The hostname identifying the SMTP server. | localhost | +| Required | CLI | Env | Type | Description | Default | +| :------: | ------------------- | ----------------------- | ----------- | ----------------------------------------- | ------------- | +| ✔️ | `--email-from` | `CODER_EMAIL_FROM` | `string` | The sender's address to use. | | +| ✔️ | `--email-smarthost` | `CODER_EMAIL_SMARTHOST` | `host:port` | The SMTP relay to send messages through. | localhost:587 | +| ✔️ | `--email-hello` | `CODER_EMAIL_HELLO` | `string` | The hostname identifying the SMTP server. | localhost | **Authentication Settings:** -| Required | CLI | Env | Type | Description | -| :------: | ------------------------------------------ | ---------------------------------------------- | -------- | ------------------------------------------------------------------------- | -| - | `--notifications-email-auth-username` | `CODER_NOTIFICATIONS_EMAIL_AUTH_USERNAME` | `string` | Username to use with PLAIN/LOGIN authentication. | -| - | `--notifications-email-auth-password` | `CODER_NOTIFICATIONS_EMAIL_AUTH_PASSWORD` | `string` | Password to use with PLAIN/LOGIN authentication. | -| - | `--notifications-email-auth-password-file` | `CODER_NOTIFICATIONS_EMAIL_AUTH_PASSWORD_FILE` | `string` | File from which to load password for use with PLAIN/LOGIN authentication. | -| - | `--notifications-email-auth-identity` | `CODER_NOTIFICATIONS_EMAIL_AUTH_IDENTITY` | `string` | Identity to use with PLAIN authentication. | +| Required | CLI | Env | Type | Description | +| :------: | ---------------------------- | -------------------------------- | -------- | ------------------------------------------------------------------------- | +| - | `--email-auth-username` | `CODER_EMAIL_AUTH_USERNAME` | `string` | Username to use with PLAIN/LOGIN authentication. | +| - | `--email-auth-password` | `CODER_EMAIL_AUTH_PASSWORD` | `string` | Password to use with PLAIN/LOGIN authentication. | +| - | `--email-auth-password-file` | `CODER_EMAIL_AUTH_PASSWORD_FILE` | `string` | File from which to load password for use with PLAIN/LOGIN authentication. | +| - | `--email-auth-identity` | `CODER_EMAIL_AUTH_IDENTITY` | `string` | Identity to use with PLAIN authentication. | **TLS Settings:** -| Required | CLI | Env | Type | Description | Default | -| :------: | ----------------------------------------- | ------------------------------------------- | -------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------- | -| - | `--notifications-email-force-tls` | `CODER_NOTIFICATIONS_EMAIL_FORCE_TLS` | `bool` | Force a TLS connection to the configured SMTP smarthost. If port 465 is used, TLS will be forced. See https://datatracker.ietf.org/doc/html/rfc8314#section-3.3. | false | -| - | `--notifications-email-tls-starttls` | `CODER_NOTIFICATIONS_EMAIL_TLS_STARTTLS` | `bool` | Enable STARTTLS to upgrade insecure SMTP connections using TLS. Ignored if `CODER_NOTIFICATIONS_EMAIL_FORCE_TLS` is set. | false | -| - | `--notifications-email-tls-skip-verify` | `CODER_NOTIFICATIONS_EMAIL_TLS_SKIPVERIFY` | `bool` | Skip verification of the target server's certificate (**insecure**). | false | -| - | `--notifications-email-tls-server-name` | `CODER_NOTIFICATIONS_EMAIL_TLS_SERVERNAME` | `string` | Server name to verify against the target certificate. | | -| - | `--notifications-email-tls-cert-file` | `CODER_NOTIFICATIONS_EMAIL_TLS_CERTFILE` | `string` | Certificate file to use. | | -| - | `--notifications-email-tls-cert-key-file` | `CODER_NOTIFICATIONS_EMAIL_TLS_CERTKEYFILE` | `string` | Certificate key file to use. | | +| Required | CLI | Env | Type | Description | Default | +| :------: | --------------------------- | ----------------------------- | -------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------- | +| - | `--email-force-tls` | `CODER_EMAIL_FORCE_TLS` | `bool` | Force a TLS connection to the configured SMTP smarthost. If port 465 is used, TLS will be forced. See https://datatracker.ietf.org/doc/html/rfc8314#section-3.3. | false | +| - | `--email-tls-starttls` | `CODER_EMAIL_TLS_STARTTLS` | `bool` | Enable STARTTLS to upgrade insecure SMTP connections using TLS. Ignored if `CODER_NOTIFICATIONS_EMAIL_FORCE_TLS` is set. | false | +| - | `--email-tls-skip-verify` | `CODER_EMAIL_TLS_SKIPVERIFY` | `bool` | Skip verification of the target server's certificate (**insecure**). | false | +| - | `--email-tls-server-name` | `CODER_EMAIL_TLS_SERVERNAME` | `string` | Server name to verify against the target certificate. | | +| - | `--email-tls-cert-file` | `CODER_EMAIL_TLS_CERTFILE` | `string` | Certificate file to use. | | +| - | `--email-tls-cert-key-file` | `CODER_EMAIL_TLS_CERTKEYFILE` | `string` | Certificate key file to use. | | -**NOTE:** you _MUST_ use `CODER_NOTIFICATIONS_EMAIL_FORCE_TLS` if your smarthost -supports TLS on a port other than `465`. +**NOTE:** you _MUST_ use `CODER_EMAIL_FORCE_TLS` if your smarthost supports TLS +on a port other than `465`. ### Send emails using G-Suite @@ -126,9 +126,9 @@ After setting the required fields above: account you wish to send from 2. Set the following configuration options: ``` - CODER_NOTIFICATIONS_EMAIL_SMARTHOST=smtp.gmail.com:465 - CODER_NOTIFICATIONS_EMAIL_AUTH_USERNAME=@ - CODER_NOTIFICATIONS_EMAIL_AUTH_PASSWORD="" + CODER_EMAIL_SMARTHOST=smtp.gmail.com:465 + CODER_EMAIL_AUTH_USERNAME=@ + CODER_EMAIL_AUTH_PASSWORD="" ``` See @@ -142,10 +142,10 @@ After setting the required fields above: 1. Setup an account on Microsoft 365 or outlook.com 2. Set the following configuration options: ``` - CODER_NOTIFICATIONS_EMAIL_SMARTHOST=smtp-mail.outlook.com:587 - CODER_NOTIFICATIONS_EMAIL_TLS_STARTTLS=true - CODER_NOTIFICATIONS_EMAIL_AUTH_USERNAME=@ - CODER_NOTIFICATIONS_EMAIL_AUTH_PASSWORD="" + CODER_EMAIL_SMARTHOST=smtp-mail.outlook.com:587 + CODER_EMAIL_TLS_STARTTLS=true + CODER_EMAIL_AUTH_USERNAME=@ + CODER_EMAIL_AUTH_PASSWORD="" ``` See diff --git a/docs/admin/networking/index.md b/docs/admin/networking/index.md index d33a8534eacef..2e07a7e6e4ac8 100644 --- a/docs/admin/networking/index.md +++ b/docs/admin/networking/index.md @@ -173,7 +173,7 @@ $ coder server --derp-config-path derpmap.json The dashboard (and web apps opened through the dashboard) are served from the coder server, so they can only be geo-distributed with High Availability mode in -our Enterprise Edition. [Reach out to Sales](https://coder.com/contact) to learn +our Premium Edition. [Reach out to Sales](https://coder.com/contact) to learn more. ## Browser-only connections (enterprise) (premium) diff --git a/docs/admin/networking/port-forwarding.md b/docs/admin/networking/port-forwarding.md index a0db8715a01e7..692f933658538 100644 --- a/docs/admin/networking/port-forwarding.md +++ b/docs/admin/networking/port-forwarding.md @@ -121,7 +121,7 @@ not it is still accessible. ![Annotated port controls in the UI](../../images/networking/annotatedports.png) The sharing level is limited by the maximum level enforced in the template -settings in enterprise deployments, and not restricted in OSS deployments. +settings in premium deployments, and not restricted in OSS deployments. This can also be used to change the sharing level of `coder_app`s by entering their port number in the sharable ports UI. The `share` attribute on `coder_app` @@ -131,8 +131,8 @@ to the app. ### Configure maximum port sharing level (enterprise) (premium) -Enterprise-licensed template admins can control the maximum port sharing level -for workspaces under a given template in the template settings. By default, the +Premium-licensed template admins can control the maximum port sharing level for +workspaces under a given template in the template settings. By default, the maximum sharing level is set to `Owner`, meaning port sharing is disabled for end-users. OSS deployments allow all workspaces to share ports at both the `authenticated` and `public` levels. diff --git a/docs/admin/security/audit-logs.md b/docs/admin/security/audit-logs.md index 602710289261f..3ea4e145d13eb 100644 --- a/docs/admin/security/audit-logs.md +++ b/docs/admin/security/audit-logs.md @@ -25,7 +25,7 @@ We track the following resources: | Organization
|
FieldTracked
created_atfalse
descriptiontrue
display_nametrue
icontrue
idfalse
is_defaulttrue
nametrue
updated_attrue
| | Template
write, delete |
FieldTracked
active_version_idtrue
activity_bumptrue
allow_user_autostarttrue
allow_user_autostoptrue
allow_user_cancel_workspace_jobstrue
autostart_block_days_of_weektrue
autostop_requirement_days_of_weektrue
autostop_requirement_weekstrue
created_atfalse
created_bytrue
created_by_avatar_urlfalse
created_by_usernamefalse
default_ttltrue
deletedfalse
deprecatedtrue
descriptiontrue
display_nametrue
failure_ttltrue
group_acltrue
icontrue
idtrue
max_port_sharing_leveltrue
nametrue
organization_display_namefalse
organization_iconfalse
organization_idfalse
organization_namefalse
provisionertrue
require_active_versiontrue
time_til_dormanttrue
time_til_dormant_autodeletetrue
updated_atfalse
user_acltrue
| | TemplateVersion
create, write |
FieldTracked
archivedtrue
created_atfalse
created_bytrue
created_by_avatar_urlfalse
created_by_usernamefalse
external_auth_providersfalse
idtrue
job_idfalse
messagefalse
nametrue
organization_idfalse
readmetrue
template_idtrue
updated_atfalse
| -| User
create, write, delete |
FieldTracked
avatar_urlfalse
created_atfalse
deletedtrue
emailtrue
github_com_user_idfalse
hashed_one_time_passcodefalse
hashed_passwordtrue
idtrue
last_seen_atfalse
login_typetrue
must_reset_passwordtrue
nametrue
one_time_passcode_expires_attrue
quiet_hours_scheduletrue
rbac_rolestrue
statustrue
theme_preferencefalse
updated_atfalse
usernametrue
| +| User
create, write, delete |
FieldTracked
avatar_urlfalse
created_atfalse
deletedtrue
emailtrue
github_com_user_idfalse
hashed_one_time_passcodefalse
hashed_passwordtrue
idtrue
last_seen_atfalse
login_typetrue
nametrue
one_time_passcode_expires_attrue
quiet_hours_scheduletrue
rbac_rolestrue
statustrue
theme_preferencefalse
updated_atfalse
usernametrue
| | WorkspaceBuild
start, stop |
FieldTracked
build_numberfalse
created_atfalse
daily_costfalse
deadlinefalse
idfalse
initiator_by_avatar_urlfalse
initiator_by_usernamefalse
initiator_idfalse
job_idfalse
max_deadlinefalse
provisioner_statefalse
reasonfalse
template_version_idtrue
transitionfalse
updated_atfalse
workspace_idfalse
| | WorkspaceProxy
|
FieldTracked
created_attrue
deletedfalse
derp_enabledtrue
derp_onlytrue
display_nametrue
icontrue
idtrue
nametrue
region_idtrue
token_hashed_secrettrue
updated_atfalse
urltrue
versiontrue
wildcard_hostnametrue
| | WorkspaceTable
|
FieldTracked
automatic_updatestrue
autostart_scheduletrue
created_atfalse
deletedfalse
deleting_attrue
dormant_attrue
favoritetrue
idtrue
last_used_atfalse
nametrue
organization_idfalse
owner_idtrue
template_idtrue
ttltrue
updated_atfalse
| @@ -122,5 +122,5 @@ log entry: ## Enabling this feature -This feature is only available with an enterprise license. +This feature is only available with an premium license. [Learn more](../licensing/index.md) diff --git a/docs/admin/templates/extending-templates/process-logging.md b/docs/admin/templates/extending-templates/process-logging.md index b5010f29a672b..989bdd8572ae5 100644 --- a/docs/admin/templates/extending-templates/process-logging.md +++ b/docs/admin/templates/extending-templates/process-logging.md @@ -17,7 +17,7 @@ Please note that these logs are not recorded or captured by the Coder organization in any way, shape, or form. > This is an [Premium or Enterprise](https://coder.com/pricing) feature. To -> learn more about Coder Enterprise, please +> learn more about Coder licensing, please > [contact sales](https://coder.com/contact). ## How this works diff --git a/docs/admin/templates/managing-templates/index.md b/docs/admin/templates/managing-templates/index.md index bee246b82f3d5..0abbac60487a6 100644 --- a/docs/admin/templates/managing-templates/index.md +++ b/docs/admin/templates/managing-templates/index.md @@ -60,7 +60,7 @@ infrastructure, software, or security patches. Learn more about ### Template update policies (enterprise) (premium) -Enterprise template admins may want workspaces to always remain on the latest +Licensed template admins may want workspaces to always remain on the latest version of their parent template. To do so, enable **Template Update Policies** in the template's general settings. All non-admin users of the template will be forced to update their workspaces before starting them once the setting is diff --git a/docs/admin/templates/managing-templates/schedule.md b/docs/admin/templates/managing-templates/schedule.md index b213ce9668313..4fa285dfa74f3 100644 --- a/docs/admin/templates/managing-templates/schedule.md +++ b/docs/admin/templates/managing-templates/schedule.md @@ -30,8 +30,8 @@ manage infrastructure costs. ## Failure cleanup (enterprise) (premium) Failure cleanup defines how long a workspace is permitted to remain in the -failed state prior to being automatically stopped. Failure cleanup is an -enterprise-only feature. +failed state prior to being automatically stopped. Failure cleanup is only +available for licensed customers. ## Dormancy threshold (enterprise) (premium) @@ -41,13 +41,13 @@ by the time elapsed since a user last accessed the workspace. A workspace in the dormant state is not eligible for autostart and must be manually activated by the user before being accessible. Coder stops workspaces during their transition to the dormant state if they are detected to be running. Dormancy Threshold is -an enterprise-only feature. +only available for licensed customers. ## Dormancy auto-deletion (enterprise) (premium) Dormancy Auto-Deletion allows a template admin to dictate how long a workspace is permitted to remain dormant before it is automatically deleted. Dormancy -Auto-Deletion is an enterprise-only feature. +Auto-Deletion is only available for licensed customers. ## Autostop requirement (enterprise) (premium) diff --git a/docs/admin/templates/template-permissions.md b/docs/admin/templates/template-permissions.md index 8bb16adbd4b08..e09acdfb3124c 100644 --- a/docs/admin/templates/template-permissions.md +++ b/docs/admin/templates/template-permissions.md @@ -18,4 +18,4 @@ user can use the template to create a workspace. To prevent this, disable the ![Create Template Permissions](../../images/templates/create-template-permissions.png) -Permissions is an enterprise-only feature. +Permissions is a premium-only feature. diff --git a/docs/admin/users/groups-roles.md b/docs/admin/users/groups-roles.md index 77dd35bf9dd89..e40efb0bd5a10 100644 --- a/docs/admin/users/groups-roles.md +++ b/docs/admin/users/groups-roles.md @@ -31,6 +31,49 @@ Roles determine which actions users can take within the platform. A user may have one or more roles. All users have an implicit Member role that may use personal workspaces. +## Custom Roles (Premium) (Beta) + +Starting in v2.16.0, Premium Coder deployments can configure custom roles on the +[Organization](./organizations.md) level. You can create and assign custom roles +in the dashboard under **Organizations** -> **My Organization** -> **Roles**. + +> Note: This requires a Premium license. +> [Contact your account team](https://coder.com/contact) for more details. + +![Custom roles](../../images/admin/users/roles/custom-roles.PNG) + +### Example roles + +- The `Banking Compliance Auditor` custom role cannot create workspaces, but can + read template source code and view audit logs +- The `Organization Lead` role can access user workspaces for troubleshooting + purposes, but cannot edit templates +- The `Platform Member` role cannot edit or create workspaces as they are + created via a third-party system + +Custom roles can also be applied to +[headless user accounts](./headless-auth.md): + +- A `Health Check` role can view deployment status but cannot create workspaces, + manage templates, or view users +- A `CI` role can update manage templates but cannot create workspaces or view + users + +### Creating custom roles + +Clicking "Create custom role" opens a UI to select the desired permissions for a +given persona. + +![Creating a custom role](../../images/admin/users/roles/creating-custom-role.PNG) + +From there, you can assign the custom role to any user in the organization under +the **Users** settings in the dashboard. + +![Assigning a custom role](../../images/admin/users/roles/assigning-custom-role.PNG) + +Note that these permissions only apply to the scope of an +[organization](./organizations.md), not across the deployment. + ### Security notes A malicious Template Admin could write a template that executes commands on the diff --git a/docs/admin/users/index.md b/docs/admin/users/index.md index 6b500ea68ac66..a00030a514f05 100644 --- a/docs/admin/users/index.md +++ b/docs/admin/users/index.md @@ -143,7 +143,12 @@ Confirm the user activation by typing **yes** and pressing **enter**. ## Reset a password -To reset a user's via the web UI: +As of 2.17.0, users can reset their password independently on the login screen +by clicking "Forgot Password." This feature requires +[email notifications](../monitoring/notifications/index.md#smtp-email) to be +configured on the deployment. + +To reset a user's password as an administrator via the web UI: 1. Go to **Users**. 2. Find the user whose password you want to reset, click the vertical ellipsis diff --git a/docs/changelogs/v2.1.5.md b/docs/changelogs/v2.1.5.md index bb73d31f9acff..f23eff4b67b25 100644 --- a/docs/changelogs/v2.1.5.md +++ b/docs/changelogs/v2.1.5.md @@ -36,7 +36,7 @@ (@spikecurtis) - Fix null pointer on external provisioner daemons with daily_cost (#9401) (@spikecurtis) -- Hide OIDC and Github auth settings when they are disabled (#9447) (@aslilac) +- Hide OIDC and GitHub auth settings when they are disabled (#9447) (@aslilac) - Generate username with uuid to prevent collision (#9496) (@kylecarbs) - Make 'NoRefresh' honor unlimited tokens in gitauth (#9472) (@Emyrk) - Dotfiles: add an exception for `.gitconfig` (#9515) (@matifali) diff --git a/docs/images/admin/users/roles/assigning-custom-role.PNG b/docs/images/admin/users/roles/assigning-custom-role.PNG new file mode 100644 index 0000000000000..271f1bcae7781 Binary files /dev/null and b/docs/images/admin/users/roles/assigning-custom-role.PNG differ diff --git a/docs/images/admin/users/roles/creating-custom-role.PNG b/docs/images/admin/users/roles/creating-custom-role.PNG new file mode 100644 index 0000000000000..a10725f9e0a71 Binary files /dev/null and b/docs/images/admin/users/roles/creating-custom-role.PNG differ diff --git a/docs/images/admin/users/roles/custom-roles.PNG b/docs/images/admin/users/roles/custom-roles.PNG new file mode 100644 index 0000000000000..14c50dba7d1e7 Binary files /dev/null and b/docs/images/admin/users/roles/custom-roles.PNG differ diff --git a/docs/install/offline.md b/docs/install/offline.md index 5a06388a992ee..6a4aae1af0daa 100644 --- a/docs/install/offline.md +++ b/docs/install/offline.md @@ -54,7 +54,7 @@ RUN mkdir -p /opt/terraform # The below step is optional if you wish to keep the existing version. # See https://github.com/coder/coder/blob/main/provisioner/terraform/install.go#L23-L24 # for supported Terraform versions. -ARG TERRAFORM_VERSION=1.9.2 +ARG TERRAFORM_VERSION=1.9.8 RUN apk update && \ apk del terraform && \ curl -LOs https://releases.hashicorp.com/terraform/${TERRAFORM_VERSION}/terraform_${TERRAFORM_VERSION}_linux_amd64.zip \ diff --git a/docs/install/releases.md b/docs/install/releases.md index 261d8c43dc42c..51950f9d1edc6 100644 --- a/docs/install/releases.md +++ b/docs/install/releases.md @@ -1,7 +1,7 @@ # Releases Coder releases are cut directly from main in our -[Github](https://github.com/coder/coder) on the first Tuesday of each month. +[GitHub](https://github.com/coder/coder) on the first Tuesday of each month. We recommend enterprise customers test the compatibility of new releases with their infrastructure on a staging environment before upgrading a production @@ -38,7 +38,7 @@ only for security issues or CVEs. ## Installing stable When installing Coder, we generally advise specifying the desired version from -our Github [releases page](https://github.com/coder/coder/releases). +our GitHub [releases page](https://github.com/coder/coder/releases). You can also use our `install.sh` script with the `stable` flag to install the latest stable release: diff --git a/docs/reference/api/enterprise.md b/docs/reference/api/enterprise.md index 96256b30aeed6..57ffa5260edde 100644 --- a/docs/reference/api/enterprise.md +++ b/docs/reference/api/enterprise.md @@ -2007,6 +2007,24 @@ Status Code **200** To perform this operation, you must be authenticated. [Learn more](authentication.md). +## SCIM 2.0: Service Provider Config + +### Code samples + +```shell +# Example request using curl +curl -X GET http://coder-server:8080/api/v2/scim/v2/ServiceProviderConfig + +``` + +`GET /scim/v2/ServiceProviderConfig` + +### Responses + +| Status | Meaning | Description | Schema | +| ------ | ------------------------------------------------------- | ----------- | ------ | +| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | | + ## SCIM 2.0: Get users ### Code samples @@ -2014,7 +2032,7 @@ To perform this operation, you must be authenticated. [Learn more](authenticatio ```shell # Example request using curl curl -X GET http://coder-server:8080/api/v2/scim/v2/Users \ - -H 'Coder-Session-Token: API_KEY' + -H 'Authorizaiton: API_KEY' ``` `GET /scim/v2/Users` @@ -2036,7 +2054,7 @@ To perform this operation, you must be authenticated. [Learn more](authenticatio curl -X POST http://coder-server:8080/api/v2/scim/v2/Users \ -H 'Content-Type: application/json' \ -H 'Accept: application/json' \ - -H 'Coder-Session-Token: API_KEY' + -H 'Authorizaiton: API_KEY' ``` `POST /scim/v2/Users` @@ -2118,7 +2136,7 @@ To perform this operation, you must be authenticated. [Learn more](authenticatio ```shell # Example request using curl curl -X GET http://coder-server:8080/api/v2/scim/v2/Users/{id} \ - -H 'Coder-Session-Token: API_KEY' + -H 'Authorizaiton: API_KEY' ``` `GET /scim/v2/Users/{id}` @@ -2146,7 +2164,7 @@ To perform this operation, you must be authenticated. [Learn more](authenticatio curl -X PATCH http://coder-server:8080/api/v2/scim/v2/Users/{id} \ -H 'Content-Type: application/json' \ -H 'Accept: application/scim+json' \ - -H 'Coder-Session-Token: API_KEY' + -H 'Authorizaiton: API_KEY' ``` `PATCH /scim/v2/Users/{id}` diff --git a/docs/reference/api/schemas.md b/docs/reference/api/schemas.md index ed3800b3a27cd..f4e683305029b 100644 --- a/docs/reference/api/schemas.md +++ b/docs/reference/api/schemas.md @@ -1454,7 +1454,7 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o ```json { "deletes_at": "2019-08-24T14:15:22Z", - "feature": "workspace_apps", + "feature": "workspace_apps_api_key", "secret": "string", "sequence": 0, "starts_at": "2019-08-24T14:15:22Z" @@ -1474,18 +1474,19 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o ## codersdk.CryptoKeyFeature ```json -"workspace_apps" +"workspace_apps_api_key" ``` ### Properties #### Enumerated Values -| Value | -| ---------------- | -| `workspace_apps` | -| `oidc_convert` | -| `tailnet_resume` | +| Value | +| ------------------------ | +| `workspace_apps_api_key` | +| `workspace_apps_token` | +| `oidc_convert` | +| `tailnet_resume` | ## codersdk.CustomRoleRequest @@ -9893,7 +9894,7 @@ _None_ "crypto_keys": [ { "deletes_at": "2019-08-24T14:15:22Z", - "feature": "workspace_apps", + "feature": "workspace_apps_api_key", "secret": "string", "sequence": 0, "starts_at": "2019-08-24T14:15:22Z" @@ -9971,7 +9972,6 @@ _None_ ```json { - "app_security_key": "string", "derp_force_websockets": true, "derp_map": { "homeParams": { @@ -10052,7 +10052,6 @@ _None_ | Name | Type | Required | Restrictions | Description | | ----------------------- | --------------------------------------------- | -------- | ------------ | -------------------------------------------------------------------------------------- | -| `app_security_key` | string | false | | | | `derp_force_websockets` | boolean | false | | | | `derp_map` | [tailcfg.DERPMap](#tailcfgderpmap) | false | | | | `derp_mesh_key` | string | false | | | diff --git a/docs/reference/cli/server.md b/docs/reference/cli/server.md index 17906465d2e3f..42ef7f7418b45 100644 --- a/docs/reference/cli/server.md +++ b/docs/reference/cli/server.md @@ -321,7 +321,7 @@ When collecting agent stats, aggregate metrics by a given set of comma-separated | YAML | introspection.prometheus.collect_db_metrics | | Default | false | -Collect database metrics (may increase charges for metrics storage). +Collect database query metrics (may increase charges for metrics storage). If set to false, a reduced set of database metrics are still collected. ### --pprof-enable @@ -1249,6 +1249,148 @@ Refresh interval for healthchecks. The threshold for the database health check. If the median latency of the database exceeds this threshold over 5 attempts, the database is considered unhealthy. The default value is 15ms. +### --email-from + +| | | +| ----------- | ------------------------------ | +| Type | string | +| Environment | $CODER_EMAIL_FROM | +| YAML | email.from | + +The sender's address to use. + +### --email-smarthost + +| | | +| ----------- | ----------------------------------- | +| Type | host:port | +| Environment | $CODER_EMAIL_SMARTHOST | +| YAML | email.smarthost | +| Default | localhost:587 | + +The intermediary SMTP host through which emails are sent. + +### --email-hello + +| | | +| ----------- | ------------------------------- | +| Type | string | +| Environment | $CODER_EMAIL_HELLO | +| YAML | email.hello | +| Default | localhost | + +The hostname identifying the SMTP server. + +### --email-force-tls + +| | | +| ----------- | ----------------------------------- | +| Type | bool | +| Environment | $CODER_EMAIL_FORCE_TLS | +| YAML | email.forceTLS | +| Default | false | + +Force a TLS connection to the configured SMTP smarthost. + +### --email-auth-identity + +| | | +| ----------- | --------------------------------------- | +| Type | string | +| Environment | $CODER_EMAIL_AUTH_IDENTITY | +| YAML | email.emailAuth.identity | + +Identity to use with PLAIN authentication. + +### --email-auth-username + +| | | +| ----------- | --------------------------------------- | +| Type | string | +| Environment | $CODER_EMAIL_AUTH_USERNAME | +| YAML | email.emailAuth.username | + +Username to use with PLAIN/LOGIN authentication. + +### --email-auth-password + +| | | +| ----------- | --------------------------------------- | +| Type | string | +| Environment | $CODER_EMAIL_AUTH_PASSWORD | + +Password to use with PLAIN/LOGIN authentication. + +### --email-auth-password-file + +| | | +| ----------- | -------------------------------------------- | +| Type | string | +| Environment | $CODER_EMAIL_AUTH_PASSWORD_FILE | +| YAML | email.emailAuth.passwordFile | + +File from which to load password for use with PLAIN/LOGIN authentication. + +### --email-tls-starttls + +| | | +| ----------- | -------------------------------------- | +| Type | bool | +| Environment | $CODER_EMAIL_TLS_STARTTLS | +| YAML | email.emailTLS.startTLS | + +Enable STARTTLS to upgrade insecure SMTP connections using TLS. + +### --email-tls-server-name + +| | | +| ----------- | ---------------------------------------- | +| Type | string | +| Environment | $CODER_EMAIL_TLS_SERVERNAME | +| YAML | email.emailTLS.serverName | + +Server name to verify against the target certificate. + +### --email-tls-skip-verify + +| | | +| ----------- | ---------------------------------------------- | +| Type | bool | +| Environment | $CODER_EMAIL_TLS_SKIPVERIFY | +| YAML | email.emailTLS.insecureSkipVerify | + +Skip verification of the target server's certificate (insecure). + +### --email-tls-ca-cert-file + +| | | +| ----------- | ---------------------------------------- | +| Type | string | +| Environment | $CODER_EMAIL_TLS_CACERTFILE | +| YAML | email.emailTLS.caCertFile | + +CA certificate file to use. + +### --email-tls-cert-file + +| | | +| ----------- | -------------------------------------- | +| Type | string | +| Environment | $CODER_EMAIL_TLS_CERTFILE | +| YAML | email.emailTLS.certFile | + +Certificate file to use. + +### --email-tls-cert-key-file + +| | | +| ----------- | ----------------------------------------- | +| Type | string | +| Environment | $CODER_EMAIL_TLS_CERTKEYFILE | +| YAML | email.emailTLS.certKeyFile | + +Certificate key file to use. + ### --notifications-method | | | @@ -1288,7 +1430,6 @@ The sender's address to use. | Type | host:port | | Environment | $CODER_NOTIFICATIONS_EMAIL_SMARTHOST | | YAML | notifications.email.smarthost | -| Default | localhost:587 | The intermediary SMTP host through which emails are sent. @@ -1299,7 +1440,6 @@ The intermediary SMTP host through which emails are sent. | Type | string | | Environment | $CODER_NOTIFICATIONS_EMAIL_HELLO | | YAML | notifications.email.hello | -| Default | localhost | The hostname identifying the SMTP server. @@ -1310,7 +1450,6 @@ The hostname identifying the SMTP server. | Type | bool | | Environment | $CODER_NOTIFICATIONS_EMAIL_FORCE_TLS | | YAML | notifications.email.forceTLS | -| Default | false | Force a TLS connection to the configured SMTP smarthost. diff --git a/docs/tutorials/example-guide.md b/docs/tutorials/example-guide.md index b0a9de5e8dafd..f60ce6972710b 100644 --- a/docs/tutorials/example-guide.md +++ b/docs/tutorials/example-guide.md @@ -1,4 +1,4 @@ -# Guide Title (Only Visible in Github) +# Guide Title (Only Visible in GitHub)
@@ -28,7 +28,7 @@ Use relative imports in the markdown and store photos in ### Setting the author data At the top of this example you will find a small html snippet that nicely -renders the author's name and photo, while linking to their Github profile. +renders the author's name and photo, while linking to their GitHub profile. Before submitting your guide in a PR, replace `your_github_handle`, `your_github_profile_photo_url` and "Your Name". The entire `` element can be omitted. diff --git a/docs/tutorials/faqs.md b/docs/tutorials/faqs.md index 96dccf8047334..b982d8bc25566 100644 --- a/docs/tutorials/faqs.md +++ b/docs/tutorials/faqs.md @@ -1,11 +1,11 @@ # FAQs -Frequently asked questions on Coder OSS and Enterprise deployments. These FAQs -come from our community and enterprise customers, feel free to +Frequently asked questions on Coder OSS and licensed deployments. These FAQs +come from our community and customers, feel free to [contribute to this page](https://github.com/coder/coder/edit/main/docs/tutorials/faqs.md). For other community resources, see our -[Github discussions](https://github.com/coder/coder/discussions), or join our +[GitHub discussions](https://github.com/coder/coder/discussions), or join our [Discord server](https://discord.gg/coder). ### How do I add a Premium trial license? @@ -291,8 +291,8 @@ tar -cvh -C ./template_1 | coder templates -d - References: -- [Public Github Issue 6117](https://github.com/coder/coder/issues/6117) -- [Public Github Issue 5677](https://github.com/coder/coder/issues/5677) +- [Public GitHub Issue 6117](https://github.com/coder/coder/issues/6117) +- [Public GitHub Issue 5677](https://github.com/coder/coder/issues/5677) - [Coder docs: Templates/Change Management](../admin/templates/managing-templates/change-management.md) ### Can I run Coder in an air-gapped or offline mode? (no Internet)? diff --git a/docs/tutorials/index.md b/docs/tutorials/index.md index b849120f8497e..0e75ce50ab29c 100644 --- a/docs/tutorials/index.md +++ b/docs/tutorials/index.md @@ -1,9 +1,8 @@ # Guides and Tutorials -Here you can find a list of employee-written guides on Coder for OSS and -Enterprise. These tutorials are hosted on our -[Github](https://github.com/coder/coder/) where you can leave feedback or -request new topics to be covered. +Here you can find a list of employee-written guides on Coder. These tutorials +are hosted on our [GitHub](https://github.com/coder/coder/) where you can leave +feedback or request new topics to be covered. This page is rendered on . Refer to the other documents in the `docs/tutorials/` directory for specific employee-written guides. diff --git a/docs/user-guides/workspace-access/port-forwarding.md b/docs/user-guides/workspace-access/port-forwarding.md index 9980b21455fca..969446be36836 100644 --- a/docs/user-guides/workspace-access/port-forwarding.md +++ b/docs/user-guides/workspace-access/port-forwarding.md @@ -123,7 +123,7 @@ it is still accessible. ![Annotated port controls in the UI](../../images/networking/annotatedports.png) > The sharing level is limited by the maximum level enforced in the template -> settings in enterprise deployments, and not restricted in OSS deployments. +> settings in licensed deployments, and not restricted in OSS deployments. This can also be used to change the sharing level of port-based `coder_app`s by entering their port number in the sharable ports UI. The `share` attribute on diff --git a/docs/user-guides/workspace-access/vscode.md b/docs/user-guides/workspace-access/vscode.md index 54d3fcf9c0aad..dc3cac46be0e8 100644 --- a/docs/user-guides/workspace-access/vscode.md +++ b/docs/user-guides/workspace-access/vscode.md @@ -99,7 +99,7 @@ Web or using the workspace's terminal. ```tf resource "coder_agent" "main" { ... - startup_script = "code-server --install-extension /vsix/Github.copilot.vsix" + startup_script = "code-server --install-extension /vsix/GitHub.copilot.vsix" } ``` @@ -130,7 +130,7 @@ Using the workspace's terminal or the terminal available inside `code-server`, you can install an extension whose files you've downloaded from a marketplace: ```console -/path/to/code-server --install-extension /vsix/Github.copilot.vsix +/path/to/code-server --install-extension /vsix/GitHub.copilot.vsix ``` ### Installing from a marketplace at the command line diff --git a/docs/user-guides/workspace-management.md b/docs/user-guides/workspace-management.md index ab55e79c2d2b4..4d4f30f2f9026 100644 --- a/docs/user-guides/workspace-management.md +++ b/docs/user-guides/workspace-management.md @@ -90,7 +90,7 @@ manually updated the workspace. ## Bulk operations (enterprise) (premium) -Enterprise admins may apply bulk operations (update, delete, start, stop) in the +Licensed admins may apply bulk operations (update, delete, start, stop) in the **Workspaces** tab. Select the workspaces you'd like to modify with the checkboxes on the left, then use the top-right **Actions** dropdown to apply the operation. diff --git a/docs/user-guides/workspace-scheduling.md b/docs/user-guides/workspace-scheduling.md index 240134c183888..322b7739def97 100644 --- a/docs/user-guides/workspace-scheduling.md +++ b/docs/user-guides/workspace-scheduling.md @@ -51,7 +51,7 @@ for your workspace. ## Autostop requirement (enterprise) (premium) -Enterprise template admins may enforce a required stop for workspaces to apply +Licensed template admins may enforce a required stop for workspaces to apply updates or undergo maintenance. These stops ignore any active connections or inactivity bumps. Rather than being specified with a CRON, admins set a frequency for updates, either in **days** or **weeks**. Workspaces will apply @@ -106,5 +106,5 @@ durations. Template admins configure an inactivity period after which your workspaces will gain a `dormant` badge. A separate period determines how long workspaces will remain in the dormant state before automatic deletion. -Enterprise admins may also configure failure cleanup, which will automatically +Licensed admins may also configure failure cleanup, which will automatically delete workspaces that remain in a `failed` state for too long. diff --git a/dogfood/contents/Dockerfile b/dogfood/contents/Dockerfile index 059b76dcf0d68..bef5bccbaa423 100644 --- a/dogfood/contents/Dockerfile +++ b/dogfood/contents/Dockerfile @@ -189,9 +189,9 @@ RUN apt-get update --quiet && apt-get install --yes \ # Configure FIPS-compliant policies update-crypto-policies --set FIPS -# NOTE: In scripts/Dockerfile.base we specifically install Terraform version 1.9.2. +# NOTE: In scripts/Dockerfile.base we specifically install Terraform version 1.9.8. # Installing the same version here to match. -RUN wget -O /tmp/terraform.zip "https://releases.hashicorp.com/terraform/1.9.2/terraform_1.9.2_linux_amd64.zip" && \ +RUN wget -O /tmp/terraform.zip "https://releases.hashicorp.com/terraform/1.9.8/terraform_1.9.8_linux_amd64.zip" && \ unzip /tmp/terraform.zip -d /usr/local/bin && \ rm -f /tmp/terraform.zip && \ chmod +x /usr/local/bin/terraform && \ diff --git a/dogfood/contents/main.tf b/dogfood/contents/main.tf index 5b026a46f934b..c2709e0faf6c1 100644 --- a/dogfood/contents/main.tf +++ b/dogfood/contents/main.tf @@ -275,6 +275,12 @@ resource "coder_agent" "dev" { EOT } +# Add a cost so we get some quota usage in dev.coder.com +resource "coder_metadata" "home_volume" { + resource_id = docker_volume.home_volume.id + daily_cost = 1 +} + resource "docker_volume" "home_volume" { name = "coder-${data.coder_workspace.me.id}-home" # Protect the volume from being deleted due to changes in attributes. diff --git a/enterprise/audit/table.go b/enterprise/audit/table.go index 2de2d918dc0aa..f9e74959f2a28 100644 --- a/enterprise/audit/table.go +++ b/enterprise/audit/table.go @@ -147,7 +147,6 @@ var auditableResourcesTypes = map[any]map[string]Action{ "github_com_user_id": ActionIgnore, "hashed_one_time_passcode": ActionIgnore, "one_time_passcode_expires_at": ActionTrack, - "must_reset_password": ActionTrack, }, &database.WorkspaceTable{}: { "id": ActionTrack, diff --git a/enterprise/cli/testdata/coder_server_--help.golden b/enterprise/cli/testdata/coder_server_--help.golden index 95c0c957d80f6..a6398586fa972 100644 --- a/enterprise/cli/testdata/coder_server_--help.golden +++ b/enterprise/cli/testdata/coder_server_--help.golden @@ -107,6 +107,58 @@ Use a YAML configuration file when your server launch become unwieldy. Write out the current server config as YAML to stdout. +EMAIL OPTIONS: +Configure how emails are sent. + + --email-force-tls bool, $CODER_EMAIL_FORCE_TLS (default: false) + Force a TLS connection to the configured SMTP smarthost. + + --email-from string, $CODER_EMAIL_FROM + The sender's address to use. + + --email-hello string, $CODER_EMAIL_HELLO (default: localhost) + The hostname identifying the SMTP server. + + --email-smarthost host:port, $CODER_EMAIL_SMARTHOST (default: localhost:587) + The intermediary SMTP host through which emails are sent. + +EMAIL / EMAIL AUTHENTICATION OPTIONS: +Configure SMTP authentication options. + + --email-auth-identity string, $CODER_EMAIL_AUTH_IDENTITY + Identity to use with PLAIN authentication. + + --email-auth-password string, $CODER_EMAIL_AUTH_PASSWORD + Password to use with PLAIN/LOGIN authentication. + + --email-auth-password-file string, $CODER_EMAIL_AUTH_PASSWORD_FILE + File from which to load password for use with PLAIN/LOGIN + authentication. + + --email-auth-username string, $CODER_EMAIL_AUTH_USERNAME + Username to use with PLAIN/LOGIN authentication. + +EMAIL / EMAIL TLS OPTIONS: +Configure TLS for your SMTP server target. + + --email-tls-ca-cert-file string, $CODER_EMAIL_TLS_CACERTFILE + CA certificate file to use. + + --email-tls-cert-file string, $CODER_EMAIL_TLS_CERTFILE + Certificate file to use. + + --email-tls-cert-key-file string, $CODER_EMAIL_TLS_CERTKEYFILE + Certificate key file to use. + + --email-tls-server-name string, $CODER_EMAIL_TLS_SERVERNAME + Server name to verify against the target certificate. + + --email-tls-skip-verify bool, $CODER_EMAIL_TLS_SKIPVERIFY + Skip verification of the target server's certificate (insecure). + + --email-tls-starttls bool, $CODER_EMAIL_TLS_STARTTLS + Enable STARTTLS to upgrade insecure SMTP connections using TLS. + INTROSPECTION / HEALTH CHECK OPTIONS: --health-check-refresh duration, $CODER_HEALTH_CHECK_REFRESH (default: 10m0s) Refresh interval for healthchecks. @@ -146,7 +198,9 @@ INTROSPECTION / PROMETHEUS OPTIONS: Collect agent stats (may increase charges for metrics storage). --prometheus-collect-db-metrics bool, $CODER_PROMETHEUS_COLLECT_DB_METRICS (default: false) - Collect database metrics (may increase charges for metrics storage). + Collect database query metrics (may increase charges for metrics + storage). If set to false, a reduced set of database metrics are still + collected. --prometheus-enable bool, $CODER_PROMETHEUS_ENABLE Serve prometheus metrics on the address defined by prometheus address. @@ -348,54 +402,68 @@ Configure how notifications are processed and delivered. NOTIFICATIONS / EMAIL OPTIONS: Configure how email notifications are sent. - --notifications-email-force-tls bool, $CODER_NOTIFICATIONS_EMAIL_FORCE_TLS (default: false) + --notifications-email-force-tls bool, $CODER_NOTIFICATIONS_EMAIL_FORCE_TLS Force a TLS connection to the configured SMTP smarthost. + DEPRECATED: Use --email-force-tls instead. --notifications-email-from string, $CODER_NOTIFICATIONS_EMAIL_FROM The sender's address to use. + DEPRECATED: Use --email-from instead. - --notifications-email-hello string, $CODER_NOTIFICATIONS_EMAIL_HELLO (default: localhost) + --notifications-email-hello string, $CODER_NOTIFICATIONS_EMAIL_HELLO The hostname identifying the SMTP server. + DEPRECATED: Use --email-hello instead. - --notifications-email-smarthost host:port, $CODER_NOTIFICATIONS_EMAIL_SMARTHOST (default: localhost:587) + --notifications-email-smarthost host:port, $CODER_NOTIFICATIONS_EMAIL_SMARTHOST The intermediary SMTP host through which emails are sent. + DEPRECATED: Use --email-smarthost instead. NOTIFICATIONS / EMAIL / EMAIL AUTHENTICATION OPTIONS: Configure SMTP authentication options. --notifications-email-auth-identity string, $CODER_NOTIFICATIONS_EMAIL_AUTH_IDENTITY Identity to use with PLAIN authentication. + DEPRECATED: Use --email-auth-identity instead. --notifications-email-auth-password string, $CODER_NOTIFICATIONS_EMAIL_AUTH_PASSWORD Password to use with PLAIN/LOGIN authentication. + DEPRECATED: Use --email-auth-password instead. --notifications-email-auth-password-file string, $CODER_NOTIFICATIONS_EMAIL_AUTH_PASSWORD_FILE File from which to load password for use with PLAIN/LOGIN authentication. + DEPRECATED: Use --email-auth-password-file instead. --notifications-email-auth-username string, $CODER_NOTIFICATIONS_EMAIL_AUTH_USERNAME Username to use with PLAIN/LOGIN authentication. + DEPRECATED: Use --email-auth-username instead. NOTIFICATIONS / EMAIL / EMAIL TLS OPTIONS: Configure TLS for your SMTP server target. --notifications-email-tls-ca-cert-file string, $CODER_NOTIFICATIONS_EMAIL_TLS_CACERTFILE CA certificate file to use. + DEPRECATED: Use --email-tls-ca-cert-file instead. --notifications-email-tls-cert-file string, $CODER_NOTIFICATIONS_EMAIL_TLS_CERTFILE Certificate file to use. + DEPRECATED: Use --email-tls-cert-file instead. --notifications-email-tls-cert-key-file string, $CODER_NOTIFICATIONS_EMAIL_TLS_CERTKEYFILE Certificate key file to use. + DEPRECATED: Use --email-tls-cert-key-file instead. --notifications-email-tls-server-name string, $CODER_NOTIFICATIONS_EMAIL_TLS_SERVERNAME Server name to verify against the target certificate. + DEPRECATED: Use --email-tls-server-name instead. --notifications-email-tls-skip-verify bool, $CODER_NOTIFICATIONS_EMAIL_TLS_SKIPVERIFY Skip verification of the target server's certificate (insecure). + DEPRECATED: Use --email-tls-skip-verify instead. --notifications-email-tls-starttls bool, $CODER_NOTIFICATIONS_EMAIL_TLS_STARTTLS Enable STARTTLS to upgrade insecure SMTP connections using TLS. + DEPRECATED: Use --email-tls-starttls instead. NOTIFICATIONS / WEBHOOK OPTIONS: --notifications-webhook-endpoint url, $CODER_NOTIFICATIONS_WEBHOOK_ENDPOINT diff --git a/enterprise/coderd/coderd.go b/enterprise/coderd/coderd.go index 79453d617ed6e..7e59eb341411f 100644 --- a/enterprise/coderd/coderd.go +++ b/enterprise/coderd/coderd.go @@ -455,8 +455,9 @@ func New(ctx context.Context, options *Options) (_ *API, err error) { if len(options.SCIMAPIKey) != 0 { api.AGPL.RootHandler.Route("/scim/v2", func(r chi.Router) { r.Use( - api.scimEnabledMW, + api.RequireFeatureMW(codersdk.FeatureSCIM), ) + r.Get("/ServiceProviderConfig", api.scimServiceProviderConfig) r.Post("/Users", api.scimPostUser) r.Route("/Users", func(r chi.Router) { r.Get("/", api.scimGetUsers) @@ -464,6 +465,13 @@ func New(ctx context.Context, options *Options) (_ *API, err error) { r.Get("/{id}", api.scimGetUser) r.Patch("/{id}", api.scimPatchUser) }) + r.NotFound(func(w http.ResponseWriter, r *http.Request) { + u := r.URL.String() + httpapi.Write(r.Context(), w, http.StatusNotFound, codersdk.Response{ + Message: fmt.Sprintf("SCIM endpoint %s not found", u), + Detail: "This endpoint is not implemented. If it is correct and required, please contact support.", + }) + }) }) } else { // Show a helpful 404 error. Because this is not under the /api/v2 routes, diff --git a/enterprise/coderd/coderdenttest/proxytest.go b/enterprise/coderd/coderdenttest/proxytest.go index 6e5a822bdf251..a6f2c7384b16f 100644 --- a/enterprise/coderd/coderdenttest/proxytest.go +++ b/enterprise/coderd/coderdenttest/proxytest.go @@ -65,6 +65,8 @@ type WorkspaceProxy struct { // owner client. If a token is provided, the proxy will become a replica of the // existing proxy region. func NewWorkspaceProxyReplica(t *testing.T, coderdAPI *coderd.API, owner *codersdk.Client, options *ProxyOptions) WorkspaceProxy { + t.Helper() + ctx, cancelFunc := context.WithCancel(context.Background()) t.Cleanup(cancelFunc) @@ -142,8 +144,10 @@ func NewWorkspaceProxyReplica(t *testing.T, coderdAPI *coderd.API, owner *coders statsCollectorOptions.Flush = options.FlushStats } + logger := slogtest.Make(t, nil).Leveled(slog.LevelDebug).With(slog.F("server_url", serverURL.String())) + wssrv, err := wsproxy.New(ctx, &wsproxy.Options{ - Logger: slogtest.Make(t, nil).Leveled(slog.LevelDebug).With(slog.F("server_url", serverURL.String())), + Logger: logger, Experiments: options.Experiments, DashboardURL: coderdAPI.AccessURL, AccessURL: accessURL, diff --git a/enterprise/coderd/scim.go b/enterprise/coderd/scim.go index 45390b6014a6a..439e6ca3225de 100644 --- a/enterprise/coderd/scim.go +++ b/enterprise/coderd/scim.go @@ -5,6 +5,7 @@ import ( "database/sql" "encoding/json" "net/http" + "time" "github.com/go-chi/chi/v5" "github.com/google/uuid" @@ -21,32 +22,90 @@ import ( "github.com/coder/coder/v2/coderd/database/dbtime" "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/codersdk" + "github.com/coder/coder/v2/enterprise/coderd/scim" ) -func (api *API) scimEnabledMW(next http.Handler) http.Handler { - return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { - if !api.Entitlements.Enabled(codersdk.FeatureSCIM) { - httpapi.RouteNotFound(rw) - return - } - - next.ServeHTTP(rw, r) - }) -} - func (api *API) scimVerifyAuthHeader(r *http.Request) bool { + bearer := []byte("Bearer ") hdr := []byte(r.Header.Get("Authorization")) + if len(hdr) >= len(bearer) && subtle.ConstantTimeCompare(hdr[:len(bearer)], bearer) == 1 { + hdr = hdr[len(bearer):] + } + return len(api.SCIMAPIKey) != 0 && subtle.ConstantTimeCompare(hdr, api.SCIMAPIKey) == 1 } +// scimServiceProviderConfig returns a static SCIM service provider configuration. +// +// @Summary SCIM 2.0: Service Provider Config +// @ID scim-get-service-provider-config +// @Produce application/scim+json +// @Tags Enterprise +// @Success 200 +// @Router /scim/v2/ServiceProviderConfig [get] +func (api *API) scimServiceProviderConfig(rw http.ResponseWriter, _ *http.Request) { + // No auth needed to query this endpoint. + + rw.Header().Set("Content-Type", spec.ApplicationScimJson) + rw.WriteHeader(http.StatusOK) + + // providerUpdated is the last time the static provider config was updated. + // Increment this time if you make any changes to the provider config. + providerUpdated := time.Date(2024, 10, 25, 17, 0, 0, 0, time.UTC) + var location string + locURL, err := api.AccessURL.Parse("/scim/v2/ServiceProviderConfig") + if err == nil { + location = locURL.String() + } + + enc := json.NewEncoder(rw) + enc.SetEscapeHTML(true) + _ = enc.Encode(scim.ServiceProviderConfig{ + Schemas: []string{"urn:ietf:params:scim:schemas:core:2.0:ServiceProviderConfig"}, + DocURI: "https://coder.com/docs/admin/users/oidc-auth#scim-enterprise-premium", + Patch: scim.Supported{ + Supported: true, + }, + Bulk: scim.BulkSupported{ + Supported: false, + }, + Filter: scim.FilterSupported{ + Supported: false, + }, + ChangePassword: scim.Supported{ + Supported: false, + }, + Sort: scim.Supported{ + Supported: false, + }, + ETag: scim.Supported{ + Supported: false, + }, + AuthSchemes: []scim.AuthenticationScheme{ + { + Type: "oauthbearertoken", + Name: "HTTP Header Authentication", + Description: "Authentication scheme using the Authorization header with the shared token", + DocURI: "https://coder.com/docs/admin/users/oidc-auth#scim-enterprise-premium", + }, + }, + Meta: scim.ServiceProviderMeta{ + Created: providerUpdated, + LastModified: providerUpdated, + Location: location, + ResourceType: "ServiceProviderConfig", + }, + }) +} + // scimGetUsers intentionally always returns no users. This is done to always force // Okta to try and create each user individually, this way we don't need to // implement fetching users twice. // // @Summary SCIM 2.0: Get users // @ID scim-get-users -// @Security CoderSessionToken +// @Security Authorization // @Produce application/scim+json // @Tags Enterprise // @Success 200 @@ -73,7 +132,7 @@ func (api *API) scimGetUsers(rw http.ResponseWriter, r *http.Request) { // // @Summary SCIM 2.0: Get user by ID // @ID scim-get-user-by-id -// @Security CoderSessionToken +// @Security Authorization // @Produce application/scim+json // @Tags Enterprise // @Param id path string true "User ID" format(uuid) @@ -124,7 +183,7 @@ var SCIMAuditAdditionalFields = map[string]string{ // // @Summary SCIM 2.0: Create new user // @ID scim-create-new-user -// @Security CoderSessionToken +// @Security Authorization // @Produce json // @Tags Enterprise // @Param request body coderd.SCIMUser true "New user" @@ -260,7 +319,7 @@ func (api *API) scimPostUser(rw http.ResponseWriter, r *http.Request) { // // @Summary SCIM 2.0: Update user account // @ID scim-update-user-status -// @Security CoderSessionToken +// @Security Authorization // @Produce application/scim+json // @Tags Enterprise // @Param id path string true "User ID" format(uuid) diff --git a/enterprise/coderd/scim/scimtypes.go b/enterprise/coderd/scim/scimtypes.go new file mode 100644 index 0000000000000..e78b70b3e9f3f --- /dev/null +++ b/enterprise/coderd/scim/scimtypes.go @@ -0,0 +1,46 @@ +package scim + +import "time" + +type ServiceProviderConfig struct { + Schemas []string `json:"schemas"` + DocURI string `json:"documentationUri"` + Patch Supported `json:"patch"` + Bulk BulkSupported `json:"bulk"` + Filter FilterSupported `json:"filter"` + ChangePassword Supported `json:"changePassword"` + Sort Supported `json:"sort"` + ETag Supported `json:"etag"` + AuthSchemes []AuthenticationScheme `json:"authenticationSchemes"` + Meta ServiceProviderMeta `json:"meta"` +} + +type ServiceProviderMeta struct { + Created time.Time `json:"created"` + LastModified time.Time `json:"lastModified"` + Location string `json:"location"` + ResourceType string `json:"resourceType"` +} + +type Supported struct { + Supported bool `json:"supported"` +} + +type BulkSupported struct { + Supported bool `json:"supported"` + MaxOp int `json:"maxOperations"` + MaxPayload int `json:"maxPayloadSize"` +} + +type FilterSupported struct { + Supported bool `json:"supported"` + MaxResults int `json:"maxResults"` +} + +type AuthenticationScheme struct { + Type string `json:"type"` + Name string `json:"name"` + Description string `json:"description"` + SpecURI string `json:"specUri"` + DocURI string `json:"documentationUri"` +} diff --git a/enterprise/coderd/scim_test.go b/enterprise/coderd/scim_test.go index 8d65d9bb34531..82355c3a3b9c0 100644 --- a/enterprise/coderd/scim_test.go +++ b/enterprise/coderd/scim_test.go @@ -56,6 +56,12 @@ func setScimAuth(key []byte) func(*http.Request) { } } +func setScimAuthBearer(key []byte) func(*http.Request) { + return func(r *http.Request) { + r.Header.Set("Authorization", "Bearer "+string(key)) + } +} + //nolint:gocritic // SCIM authenticates via a special header and bypasses internal RBAC. func TestScim(t *testing.T) { t.Parallel() @@ -82,7 +88,7 @@ func TestScim(t *testing.T) { res, err := client.Request(ctx, "POST", "/scim/v2/Users", struct{}{}) require.NoError(t, err) defer res.Body.Close() - assert.Equal(t, http.StatusNotFound, res.StatusCode) + assert.Equal(t, http.StatusForbidden, res.StatusCode) }) t.Run("noAuth", func(t *testing.T) { @@ -134,9 +140,71 @@ func TestScim(t *testing.T) { }) mockAudit.ResetLogs() + // verify scim is enabled + res, err := client.Request(ctx, http.MethodGet, "/scim/v2/ServiceProviderConfig", nil) + require.NoError(t, err) + defer res.Body.Close() + require.Equal(t, http.StatusOK, res.StatusCode) + // when sUser := makeScimUser(t) - res, err := client.Request(ctx, "POST", "/scim/v2/Users", sUser, setScimAuth(scimAPIKey)) + res, err = client.Request(ctx, http.MethodPost, "/scim/v2/Users", sUser, setScimAuth(scimAPIKey)) + require.NoError(t, err) + defer res.Body.Close() + require.Equal(t, http.StatusOK, res.StatusCode) + + // then + // Expect audit logs + aLogs := mockAudit.AuditLogs() + require.Len(t, aLogs, 1) + af := map[string]string{} + err = json.Unmarshal([]byte(aLogs[0].AdditionalFields), &af) + require.NoError(t, err) + assert.Equal(t, coderd.SCIMAuditAdditionalFields, af) + assert.Equal(t, database.AuditActionCreate, aLogs[0].Action) + + // Expect users exposed over API + userRes, err := client.Users(ctx, codersdk.UsersRequest{Search: sUser.Emails[0].Value}) + require.NoError(t, err) + require.Len(t, userRes.Users, 1) + assert.Equal(t, sUser.Emails[0].Value, userRes.Users[0].Email) + assert.Equal(t, sUser.UserName, userRes.Users[0].Username) + assert.Len(t, userRes.Users[0].OrganizationIDs, 1) + + // Expect zero notifications (SkipNotifications = true) + require.Empty(t, notifyEnq.Sent) + }) + + t.Run("OK_Bearer", func(t *testing.T) { + t.Parallel() + + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) + defer cancel() + + // given + scimAPIKey := []byte("hi") + mockAudit := audit.NewMock() + notifyEnq := &testutil.FakeNotificationsEnqueuer{} + client, _ := coderdenttest.New(t, &coderdenttest.Options{ + Options: &coderdtest.Options{ + Auditor: mockAudit, + NotificationsEnqueuer: notifyEnq, + }, + SCIMAPIKey: scimAPIKey, + AuditLogging: true, + LicenseOptions: &coderdenttest.LicenseOptions{ + AccountID: "coolin", + Features: license.Features{ + codersdk.FeatureSCIM: 1, + codersdk.FeatureAuditLog: 1, + }, + }, + }) + mockAudit.ResetLogs() + + // when + sUser := makeScimUser(t) + res, err := client.Request(ctx, "POST", "/scim/v2/Users", sUser, setScimAuthBearer(scimAPIKey)) require.NoError(t, err) defer res.Body.Close() require.Equal(t, http.StatusOK, res.StatusCode) @@ -362,7 +430,7 @@ func TestScim(t *testing.T) { require.NoError(t, err) _, _ = io.Copy(io.Discard, res.Body) _ = res.Body.Close() - assert.Equal(t, http.StatusNotFound, res.StatusCode) + assert.Equal(t, http.StatusForbidden, res.StatusCode) }) t.Run("noAuth", func(t *testing.T) { diff --git a/enterprise/coderd/workspaceproxy.go b/enterprise/coderd/workspaceproxy.go index 47bdf53493489..4008de69e4faa 100644 --- a/enterprise/coderd/workspaceproxy.go +++ b/enterprise/coderd/workspaceproxy.go @@ -7,6 +7,7 @@ import ( "fmt" "net/http" "net/url" + "slices" "strings" "time" @@ -33,6 +34,13 @@ import ( "github.com/coder/coder/v2/enterprise/wsproxy/wsproxysdk" ) +// whitelistedCryptoKeyFeatures is a list of crypto key features that are +// allowed to be queried with workspace proxies. +var whitelistedCryptoKeyFeatures = []database.CryptoKeyFeature{ + database.CryptoKeyFeatureWorkspaceAppsToken, + database.CryptoKeyFeatureWorkspaceAppsAPIKey, +} + // forceWorkspaceProxyHealthUpdate forces an update of the proxy health. // This is useful when a proxy is created or deleted. Errors will be logged. func (api *API) forceWorkspaceProxyHealthUpdate(ctx context.Context) { @@ -700,7 +708,6 @@ func (api *API) workspaceProxyRegister(rw http.ResponseWriter, r *http.Request) } httpapi.Write(ctx, rw, http.StatusCreated, wsproxysdk.RegisterWorkspaceProxyResponse{ - AppSecurityKey: api.AppSecurityKey.String(), DERPMeshKey: api.DERPServer.MeshKey(), DERPRegionID: regionID, DERPMap: api.AGPL.DERPMap(), @@ -721,13 +728,29 @@ func (api *API) workspaceProxyRegister(rw http.ResponseWriter, r *http.Request) // @Security CoderSessionToken // @Produce json // @Tags Enterprise +// @Param feature query string true "Feature key" // @Success 200 {object} wsproxysdk.CryptoKeysResponse // @Router /workspaceproxies/me/crypto-keys [get] // @x-apidocgen {"skip": true} func (api *API) workspaceProxyCryptoKeys(rw http.ResponseWriter, r *http.Request) { ctx := r.Context() - keys, err := api.Database.GetCryptoKeysByFeature(ctx, database.CryptoKeyFeatureWorkspaceApps) + feature := database.CryptoKeyFeature(r.URL.Query().Get("feature")) + if feature == "" { + httpapi.Write(r.Context(), rw, http.StatusBadRequest, codersdk.Response{ + Message: "Missing feature query parameter.", + }) + return + } + + if !slices.Contains(whitelistedCryptoKeyFeatures, feature) { + httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ + Message: fmt.Sprintf("Invalid feature: %q", feature), + }) + return + } + + keys, err := api.Database.GetCryptoKeysByFeature(ctx, feature) if err != nil { httpapi.InternalServerError(rw, err) return diff --git a/enterprise/coderd/workspaceproxy_test.go b/enterprise/coderd/workspaceproxy_test.go index 5231a0b0c4241..0be112b532b7a 100644 --- a/enterprise/coderd/workspaceproxy_test.go +++ b/enterprise/coderd/workspaceproxy_test.go @@ -320,7 +320,6 @@ func TestProxyRegisterDeregister(t *testing.T) { } registerRes1, err := proxyClient.RegisterWorkspaceProxy(ctx, req) require.NoError(t, err) - require.NotEmpty(t, registerRes1.AppSecurityKey) require.NotEmpty(t, registerRes1.DERPMeshKey) require.EqualValues(t, 10001, registerRes1.DERPRegionID) require.Empty(t, registerRes1.SiblingReplicas) @@ -609,11 +608,8 @@ func TestProxyRegisterDeregister(t *testing.T) { func TestIssueSignedAppToken(t *testing.T) { t.Parallel() - db, pubsub := dbtestutil.NewDB(t) client, user := coderdenttest.New(t, &coderdenttest.Options{ Options: &coderdtest.Options{ - Database: db, - Pubsub: pubsub, IncludeProvisionerDaemon: true, }, LicenseOptions: &coderdenttest.LicenseOptions{ @@ -716,6 +712,10 @@ func TestReconnectingPTYSignedToken(t *testing.T) { closer.Close() }) + _ = dbgen.CryptoKey(t, db, database.CryptoKey{ + Feature: database.CryptoKeyFeatureWorkspaceAppsToken, + }) + // Create a workspace + apps authToken := uuid.NewString() version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, &echo.Responses{ @@ -915,51 +915,86 @@ func TestGetCryptoKeys(t *testing.T) { now := time.Now() expectedKey1 := dbgen.CryptoKey(t, db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, StartsAt: now.Add(-time.Hour), Sequence: 2, }) - key1 := db2sdk.CryptoKey(expectedKey1) + encryptionKey := db2sdk.CryptoKey(expectedKey1) expectedKey2 := dbgen.CryptoKey(t, db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsToken, StartsAt: now, Sequence: 3, }) - key2 := db2sdk.CryptoKey(expectedKey2) + signingKey := db2sdk.CryptoKey(expectedKey2) // Create a deleted key. _ = dbgen.CryptoKey(t, db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, StartsAt: now.Add(-time.Hour), Secret: sql.NullString{ String: "secret1", Valid: false, }, - Sequence: 1, - }) - - // Create a key with different features. - _ = dbgen.CryptoKey(t, db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureTailnetResume, - StartsAt: now.Add(-time.Hour), - Sequence: 1, - }) - _ = dbgen.CryptoKey(t, db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureOidcConvert, - StartsAt: now.Add(-time.Hour), - Sequence: 1, + Sequence: 4, }) proxy := coderdenttest.NewWorkspaceProxyReplica(t, api, cclient, &coderdenttest.ProxyOptions{ Name: testutil.GetRandomName(t), }) - keys, err := proxy.SDKClient.CryptoKeys(ctx) + keys, err := proxy.SDKClient.CryptoKeys(ctx, codersdk.CryptoKeyFeatureWorkspaceAppsAPIKey) require.NoError(t, err) require.NotEmpty(t, keys) + // 1 key is generated on startup, the other we manually generated. require.Equal(t, 2, len(keys.CryptoKeys)) - requireContainsKeys(t, keys.CryptoKeys, key1, key2) + requireContainsKeys(t, keys.CryptoKeys, encryptionKey) + requireNotContainsKeys(t, keys.CryptoKeys, signingKey) + + keys, err = proxy.SDKClient.CryptoKeys(ctx, codersdk.CryptoKeyFeatureWorkspaceAppsToken) + require.NoError(t, err) + require.NotEmpty(t, keys) + // 1 key is generated on startup, the other we manually generated. + require.Equal(t, 2, len(keys.CryptoKeys)) + requireContainsKeys(t, keys.CryptoKeys, signingKey) + requireNotContainsKeys(t, keys.CryptoKeys, encryptionKey) + }) + + t.Run("InvalidFeature", func(t *testing.T) { + t.Parallel() + + ctx := testutil.Context(t, testutil.WaitMedium) + db, pubsub := dbtestutil.NewDB(t) + cclient, _, api, _ := coderdenttest.NewWithAPI(t, &coderdenttest.Options{ + Options: &coderdtest.Options{ + Database: db, + Pubsub: pubsub, + IncludeProvisionerDaemon: true, + }, + LicenseOptions: &coderdenttest.LicenseOptions{ + Features: license.Features{ + codersdk.FeatureWorkspaceProxy: 1, + }, + }, + }) + + proxy := coderdenttest.NewWorkspaceProxyReplica(t, api, cclient, &coderdenttest.ProxyOptions{ + Name: testutil.GetRandomName(t), + }) + + _, err := proxy.SDKClient.CryptoKeys(ctx, codersdk.CryptoKeyFeatureOIDCConvert) + require.Error(t, err) + var sdkErr *codersdk.Error + require.ErrorAs(t, err, &sdkErr) + require.Equal(t, http.StatusBadRequest, sdkErr.StatusCode()) + _, err = proxy.SDKClient.CryptoKeys(ctx, codersdk.CryptoKeyFeatureTailnetResume) + require.Error(t, err) + require.ErrorAs(t, err, &sdkErr) + require.Equal(t, http.StatusBadRequest, sdkErr.StatusCode()) + _, err = proxy.SDKClient.CryptoKeys(ctx, "invalid") + require.Error(t, err) + require.ErrorAs(t, err, &sdkErr) + require.Equal(t, http.StatusBadRequest, sdkErr.StatusCode()) }) t.Run("Unauthorized", func(t *testing.T) { @@ -987,7 +1022,7 @@ func TestGetCryptoKeys(t *testing.T) { client := wsproxysdk.New(cclient.URL) client.SetSessionToken(cclient.SessionToken()) - _, err := client.CryptoKeys(ctx) + _, err := client.CryptoKeys(ctx, codersdk.CryptoKeyFeatureWorkspaceAppsAPIKey) require.Error(t, err) var sdkErr *codersdk.Error require.ErrorAs(t, err, &sdkErr) @@ -995,6 +1030,18 @@ func TestGetCryptoKeys(t *testing.T) { }) } +func requireNotContainsKeys(t *testing.T, keys []codersdk.CryptoKey, unexpected ...codersdk.CryptoKey) { + t.Helper() + + for _, unexpectedKey := range unexpected { + for _, key := range keys { + if key.Feature == unexpectedKey.Feature && key.Sequence == unexpectedKey.Sequence { + t.Fatalf("unexpected key %+v found", unexpectedKey) + } + } + } +} + func requireContainsKeys(t *testing.T, keys []codersdk.CryptoKey, expected ...codersdk.CryptoKey) { t.Helper() diff --git a/enterprise/coderd/workspacequota.go b/enterprise/coderd/workspacequota.go index 8178f6304a947..7ea42ea24f491 100644 --- a/enterprise/coderd/workspacequota.go +++ b/enterprise/coderd/workspacequota.go @@ -104,8 +104,9 @@ func (c *committer) CommitQuota( permit = true consumed = newConsumed return nil - }, &sql.TxOptions{ - Isolation: sql.LevelSerializable, + }, &database.TxOptions{ + Isolation: sql.LevelSerializable, + TxIdentifier: "commit_quota", }) if err != nil { return nil, err diff --git a/enterprise/dbcrypt/cliutil.go b/enterprise/dbcrypt/cliutil.go index 4d8e7e7b0340f..47045f9bfefab 100644 --- a/enterprise/dbcrypt/cliutil.go +++ b/enterprise/dbcrypt/cliutil.go @@ -73,7 +73,7 @@ func Rotate(ctx context.Context, log slog.Logger, sqlDB *sql.DB, ciphers []Ciphe } } return nil - }, &sql.TxOptions{ + }, &database.TxOptions{ Isolation: sql.LevelRepeatableRead, }) if err != nil { @@ -163,7 +163,7 @@ func Decrypt(ctx context.Context, log slog.Logger, sqlDB *sql.DB, ciphers []Ciph } } return nil - }, &sql.TxOptions{ + }, &database.TxOptions{ Isolation: sql.LevelRepeatableRead, }) if err != nil { diff --git a/enterprise/dbcrypt/dbcrypt.go b/enterprise/dbcrypt/dbcrypt.go index 979a8ad137e6d..77a7d5cb78738 100644 --- a/enterprise/dbcrypt/dbcrypt.go +++ b/enterprise/dbcrypt/dbcrypt.go @@ -60,7 +60,7 @@ type dbCrypt struct { database.Store } -func (db *dbCrypt) InTx(function func(database.Store) error, txOpts *sql.TxOptions) error { +func (db *dbCrypt) InTx(function func(database.Store) error, txOpts *database.TxOptions) error { return db.Store.InTx(func(s database.Store) error { return function(&dbCrypt{ primaryCipherDigest: db.primaryCipherDigest, @@ -445,5 +445,5 @@ func (db *dbCrypt) ensureEncrypted(ctx context.Context) error { ActiveKeyDigest: db.primaryCipherDigest, Test: testValue, }) - }, &sql.TxOptions{Isolation: sql.LevelRepeatableRead}) + }, &database.TxOptions{Isolation: sql.LevelRepeatableRead}) } diff --git a/enterprise/dbcrypt/dbcrypt_internal_test.go b/enterprise/dbcrypt/dbcrypt_internal_test.go index 432dc90061677..8800180493d12 100644 --- a/enterprise/dbcrypt/dbcrypt_internal_test.go +++ b/enterprise/dbcrypt/dbcrypt_internal_test.go @@ -397,12 +397,12 @@ func TestCryptoKeys(t *testing.T) { _ = dbgen.CryptoKey(t, crypt, database.CryptoKey{ Secret: sql.NullString{String: "test", Valid: true}, }) - key, err := crypt.GetLatestCryptoKeyByFeature(ctx, database.CryptoKeyFeatureWorkspaceApps) + key, err := crypt.GetLatestCryptoKeyByFeature(ctx, database.CryptoKeyFeatureWorkspaceAppsAPIKey) require.NoError(t, err) require.Equal(t, "test", key.Secret.String) require.Equal(t, ciphers[0].HexDigest(), key.SecretKeyID.String) - key, err = db.GetLatestCryptoKeyByFeature(ctx, database.CryptoKeyFeatureWorkspaceApps) + key, err = db.GetLatestCryptoKeyByFeature(ctx, database.CryptoKeyFeatureWorkspaceAppsAPIKey) require.NoError(t, err) requireEncryptedEquals(t, ciphers[0], key.Secret.String, "test") require.Equal(t, ciphers[0].HexDigest(), key.SecretKeyID.String) @@ -415,7 +415,7 @@ func TestCryptoKeys(t *testing.T) { Secret: sql.NullString{String: "test", Valid: true}, }) key, err := crypt.GetCryptoKeyByFeatureAndSequence(ctx, database.GetCryptoKeyByFeatureAndSequenceParams{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, Sequence: key.Sequence, }) require.NoError(t, err) @@ -423,7 +423,7 @@ func TestCryptoKeys(t *testing.T) { require.Equal(t, ciphers[0].HexDigest(), key.SecretKeyID.String) key, err = db.GetCryptoKeyByFeatureAndSequence(ctx, database.GetCryptoKeyByFeatureAndSequenceParams{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, Sequence: key.Sequence, }) require.NoError(t, err) @@ -459,7 +459,7 @@ func TestCryptoKeys(t *testing.T) { Secret: sql.NullString{String: "test", Valid: true}, }) _ = dbgen.CryptoKey(t, crypt, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, Sequence: 43, }) keys, err := crypt.GetCryptoKeysByFeature(ctx, database.CryptoKeyFeatureTailnetResume) @@ -773,7 +773,7 @@ func TestEncryptDecryptField(t *testing.T) { func expectInTx(mdb *dbmock.MockStore) *gomock.Call { return mdb.EXPECT().InTx(gomock.Any(), gomock.Any()).Times(1).DoAndReturn( - func(f func(store database.Store) error, _ *sql.TxOptions) error { + func(f func(store database.Store) error, _ *database.TxOptions) error { return f(mdb) }, ) diff --git a/enterprise/workspaceapps_test.go b/enterprise/workspaceapps_test.go index f4ba577f13e33..51d0314c45767 100644 --- a/enterprise/workspaceapps_test.go +++ b/enterprise/workspaceapps_test.go @@ -5,6 +5,7 @@ import ( "testing" "github.com/coder/coder/v2/coderd/coderdtest" + "github.com/coder/coder/v2/coderd/database/dbtestutil" "github.com/coder/coder/v2/coderd/httpmw" "github.com/coder/coder/v2/coderd/workspaceapps/apptest" "github.com/coder/coder/v2/codersdk" @@ -36,6 +37,9 @@ func TestWorkspaceApps(t *testing.T) { flushStatsCollectorCh <- flushStatsCollectorDone <-flushStatsCollectorDone } + + db, pubsub := dbtestutil.NewDB(t) + client, _, _, user := coderdenttest.NewWithAPI(t, &coderdenttest.Options{ Options: &coderdtest.Options{ DeploymentValues: deploymentValues, @@ -51,6 +55,8 @@ func TestWorkspaceApps(t *testing.T) { }, }, WorkspaceAppsStatsCollectorOptions: opts.StatsCollectorOptions, + Database: db, + Pubsub: pubsub, }, LicenseOptions: &coderdenttest.LicenseOptions{ Features: license.Features{ diff --git a/enterprise/wsproxy/keyfetcher.go b/enterprise/wsproxy/keyfetcher.go index f30fffb2cd093..1a1745d6ccd2d 100644 --- a/enterprise/wsproxy/keyfetcher.go +++ b/enterprise/wsproxy/keyfetcher.go @@ -13,12 +13,11 @@ import ( var _ cryptokeys.Fetcher = &ProxyFetcher{} type ProxyFetcher struct { - Client *wsproxysdk.Client - Feature codersdk.CryptoKeyFeature + Client *wsproxysdk.Client } -func (p *ProxyFetcher) Fetch(ctx context.Context) ([]codersdk.CryptoKey, error) { - keys, err := p.Client.CryptoKeys(ctx) +func (p *ProxyFetcher) Fetch(ctx context.Context, feature codersdk.CryptoKeyFeature) ([]codersdk.CryptoKey, error) { + keys, err := p.Client.CryptoKeys(ctx, feature) if err != nil { return nil, xerrors.Errorf("crypto keys: %w", err) } diff --git a/enterprise/wsproxy/tokenprovider.go b/enterprise/wsproxy/tokenprovider.go index 38822a4e7a22d..5093c6015725e 100644 --- a/enterprise/wsproxy/tokenprovider.go +++ b/enterprise/wsproxy/tokenprovider.go @@ -7,6 +7,8 @@ import ( "cdr.dev/slog" + "github.com/coder/coder/v2/coderd/cryptokeys" + "github.com/coder/coder/v2/coderd/jwtutils" "github.com/coder/coder/v2/coderd/workspaceapps" "github.com/coder/coder/v2/enterprise/wsproxy/wsproxysdk" ) @@ -18,18 +20,19 @@ type TokenProvider struct { AccessURL *url.URL AppHostname string - Client *wsproxysdk.Client - SecurityKey workspaceapps.SecurityKey - Logger slog.Logger + Client *wsproxysdk.Client + TokenSigningKeycache cryptokeys.SigningKeycache + APIKeyEncryptionKeycache cryptokeys.EncryptionKeycache + Logger slog.Logger } func (p *TokenProvider) FromRequest(r *http.Request) (*workspaceapps.SignedToken, bool) { - return workspaceapps.FromRequest(r, p.SecurityKey) + return workspaceapps.FromRequest(r, p.TokenSigningKeycache) } func (p *TokenProvider) Issue(ctx context.Context, rw http.ResponseWriter, r *http.Request, issueReq workspaceapps.IssueTokenRequest) (*workspaceapps.SignedToken, string, bool) { appReq := issueReq.AppRequest.Normalize() - err := appReq.Validate() + err := appReq.Check() if err != nil { workspaceapps.WriteWorkspaceApp500(p.Logger, p.DashboardURL, rw, r, &appReq, err, "invalid app request") return nil, "", false @@ -42,7 +45,8 @@ func (p *TokenProvider) Issue(ctx context.Context, rw http.ResponseWriter, r *ht } // Check that it verifies properly and matches the string. - token, err := p.SecurityKey.VerifySignedToken(resp.SignedTokenStr) + var token workspaceapps.SignedToken + err = jwtutils.Verify(ctx, p.TokenSigningKeycache, resp.SignedTokenStr, &token) if err != nil { workspaceapps.WriteWorkspaceApp500(p.Logger, p.DashboardURL, rw, r, &appReq, err, "failed to verify newly generated signed token") return nil, "", false diff --git a/enterprise/wsproxy/wsproxy.go b/enterprise/wsproxy/wsproxy.go index 2a7e9e81e0cda..fe900fa433530 100644 --- a/enterprise/wsproxy/wsproxy.go +++ b/enterprise/wsproxy/wsproxy.go @@ -31,6 +31,7 @@ import ( "github.com/coder/coder/v2/buildinfo" "github.com/coder/coder/v2/cli/cliutil" "github.com/coder/coder/v2/coderd" + "github.com/coder/coder/v2/coderd/cryptokeys" "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/coderd/httpmw" "github.com/coder/coder/v2/coderd/tracing" @@ -130,6 +131,13 @@ type Server struct { // the moon's token. SDKClient *wsproxysdk.Client + // apiKeyEncryptionKeycache manages the encryption keys for smuggling API + // tokens to the alternate domain when using workspace apps. + apiKeyEncryptionKeycache cryptokeys.EncryptionKeycache + // appTokenSigningKeycache manages the signing keys for signing the app + // tokens we use for workspace apps. + appTokenSigningKeycache cryptokeys.SigningKeycache + // DERP derpMesh *derpmesh.Mesh derpMeshTLSConfig *tls.Config @@ -195,19 +203,42 @@ func New(ctx context.Context, opts *Options) (*Server, error) { derpServer := derp.NewServer(key.NewNode(), tailnet.Logger(opts.Logger.Named("net.derp"))) ctx, cancel := context.WithCancel(context.Background()) + + encryptionCache, err := cryptokeys.NewEncryptionCache(ctx, + opts.Logger, + &ProxyFetcher{Client: client}, + codersdk.CryptoKeyFeatureWorkspaceAppsAPIKey, + ) + if err != nil { + cancel() + return nil, xerrors.Errorf("create api key encryption cache: %w", err) + } + signingCache, err := cryptokeys.NewSigningCache(ctx, + opts.Logger, + &ProxyFetcher{Client: client}, + codersdk.CryptoKeyFeatureWorkspaceAppsToken, + ) + if err != nil { + cancel() + return nil, xerrors.Errorf("create api token signing cache: %w", err) + } + r := chi.NewRouter() s := &Server{ - Options: opts, - Handler: r, - DashboardURL: opts.DashboardURL, - Logger: opts.Logger.Named("net.workspace-proxy"), - TracerProvider: opts.Tracing, - PrometheusRegistry: opts.PrometheusRegistry, - SDKClient: client, - derpMesh: derpmesh.New(opts.Logger.Named("net.derpmesh"), derpServer, meshTLSConfig), - derpMeshTLSConfig: meshTLSConfig, - ctx: ctx, - cancel: cancel, + ctx: ctx, + cancel: cancel, + + Options: opts, + Handler: r, + DashboardURL: opts.DashboardURL, + Logger: opts.Logger.Named("net.workspace-proxy"), + TracerProvider: opts.Tracing, + PrometheusRegistry: opts.PrometheusRegistry, + SDKClient: client, + derpMesh: derpmesh.New(opts.Logger.Named("net.derpmesh"), derpServer, meshTLSConfig), + derpMeshTLSConfig: meshTLSConfig, + apiKeyEncryptionKeycache: encryptionCache, + appTokenSigningKeycache: signingCache, } // Register the workspace proxy with the primary coderd instance and start a @@ -240,11 +271,6 @@ func New(ctx context.Context, opts *Options) (*Server, error) { return nil, xerrors.Errorf("handle register: %w", err) } - secKey, err := workspaceapps.KeyFromString(regResp.AppSecurityKey) - if err != nil { - return nil, xerrors.Errorf("parse app security key: %w", err) - } - agentProvider, err := coderd.NewServerTailnet(ctx, s.Logger, nil, @@ -277,20 +303,21 @@ func New(ctx context.Context, opts *Options) (*Server, error) { HostnameRegex: opts.AppHostnameRegex, RealIPConfig: opts.RealIPConfig, SignedTokenProvider: &TokenProvider{ - DashboardURL: opts.DashboardURL, - AccessURL: opts.AccessURL, - AppHostname: opts.AppHostname, - Client: client, - SecurityKey: secKey, - Logger: s.Logger.Named("proxy_token_provider"), + DashboardURL: opts.DashboardURL, + AccessURL: opts.AccessURL, + AppHostname: opts.AppHostname, + Client: client, + TokenSigningKeycache: signingCache, + APIKeyEncryptionKeycache: encryptionCache, + Logger: s.Logger.Named("proxy_token_provider"), }, - AppSecurityKey: secKey, DisablePathApps: opts.DisablePathApps, SecureAuthCookie: opts.SecureAuthCookie, - AgentProvider: agentProvider, - StatsCollector: workspaceapps.NewStatsCollector(opts.StatsCollectorOptions), + AgentProvider: agentProvider, + StatsCollector: workspaceapps.NewStatsCollector(opts.StatsCollectorOptions), + APIKeyEncryptionKeycache: encryptionCache, } derpHandler := derphttp.Handler(derpServer) @@ -435,6 +462,8 @@ func (s *Server) Close() error { err = multierror.Append(err, agentProviderErr) } s.SDKClient.SDKClient.HTTPClient.CloseIdleConnections() + _ = s.appTokenSigningKeycache.Close() + _ = s.apiKeyEncryptionKeycache.Close() return err } diff --git a/enterprise/wsproxy/wsproxy_test.go b/enterprise/wsproxy/wsproxy_test.go index 3d3926c5afae7..4add46af9bc0a 100644 --- a/enterprise/wsproxy/wsproxy_test.go +++ b/enterprise/wsproxy/wsproxy_test.go @@ -25,6 +25,9 @@ import ( "github.com/coder/coder/v2/agent/agenttest" "github.com/coder/coder/v2/buildinfo" "github.com/coder/coder/v2/coderd/coderdtest" + "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/database/dbgen" + "github.com/coder/coder/v2/coderd/database/dbtestutil" "github.com/coder/coder/v2/coderd/healthcheck/derphealth" "github.com/coder/coder/v2/coderd/httpmw" "github.com/coder/coder/v2/coderd/workspaceapps/apptest" @@ -932,6 +935,9 @@ func TestWorkspaceProxyWorkspaceApps(t *testing.T) { if opts.PrimaryAppHost == "" { opts.PrimaryAppHost = "*.primary.test.coder.com" } + + db, pubsub := dbtestutil.NewDB(t) + client, closer, api, user := coderdenttest.NewWithAPI(t, &coderdenttest.Options{ Options: &coderdtest.Options{ DeploymentValues: deploymentValues, @@ -947,6 +953,8 @@ func TestWorkspaceProxyWorkspaceApps(t *testing.T) { }, }, WorkspaceAppsStatsCollectorOptions: opts.StatsCollectorOptions, + Database: db, + Pubsub: pubsub, }, LicenseOptions: &coderdenttest.LicenseOptions{ Features: license.Features{ @@ -959,6 +967,13 @@ func TestWorkspaceProxyWorkspaceApps(t *testing.T) { _ = closer.Close() }) + _ = dbgen.CryptoKey(t, db, database.CryptoKey{ + Feature: database.CryptoKeyFeatureWorkspaceAppsToken, + }) + _ = dbgen.CryptoKey(t, db, database.CryptoKey{ + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, + }) + // Create the external proxy if opts.DisableSubdomainApps { opts.AppHost = "" @@ -1002,6 +1017,8 @@ func TestWorkspaceProxyWorkspaceApps_BlockDirect(t *testing.T) { if opts.PrimaryAppHost == "" { opts.PrimaryAppHost = "*.primary.test.coder.com" } + + db, pubsub := dbtestutil.NewDB(t) client, closer, api, user := coderdenttest.NewWithAPI(t, &coderdenttest.Options{ Options: &coderdtest.Options{ DeploymentValues: deploymentValues, @@ -1017,6 +1034,8 @@ func TestWorkspaceProxyWorkspaceApps_BlockDirect(t *testing.T) { }, }, WorkspaceAppsStatsCollectorOptions: opts.StatsCollectorOptions, + Database: db, + Pubsub: pubsub, }, LicenseOptions: &coderdenttest.LicenseOptions{ Features: license.Features{ @@ -1029,6 +1048,13 @@ func TestWorkspaceProxyWorkspaceApps_BlockDirect(t *testing.T) { _ = closer.Close() }) + _ = dbgen.CryptoKey(t, db, database.CryptoKey{ + Feature: database.CryptoKeyFeatureWorkspaceAppsToken, + }) + _ = dbgen.CryptoKey(t, db, database.CryptoKey{ + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, + }) + // Create the external proxy if opts.DisableSubdomainApps { opts.AppHost = "" diff --git a/enterprise/wsproxy/wsproxysdk/wsproxysdk.go b/enterprise/wsproxy/wsproxysdk/wsproxysdk.go index 77d36561c6de8..a8f22c2b93063 100644 --- a/enterprise/wsproxy/wsproxysdk/wsproxysdk.go +++ b/enterprise/wsproxy/wsproxysdk/wsproxysdk.go @@ -205,7 +205,6 @@ type RegisterWorkspaceProxyRequest struct { } type RegisterWorkspaceProxyResponse struct { - AppSecurityKey string `json:"app_security_key"` DERPMeshKey string `json:"derp_mesh_key"` DERPRegionID int32 `json:"derp_region_id"` DERPMap *tailcfg.DERPMap `json:"derp_map"` @@ -372,12 +371,6 @@ func (l *RegisterWorkspaceProxyLoop) Start(ctx context.Context) (RegisterWorkspa } failedAttempts = 0 - // Check for consistency. - if originalRes.AppSecurityKey != resp.AppSecurityKey { - l.failureFn(xerrors.New("app security key has changed, proxy must be restarted")) - return - } - if originalRes.DERPMeshKey != resp.DERPMeshKey { l.failureFn(xerrors.New("DERP mesh key has changed, proxy must be restarted")) return @@ -586,10 +579,10 @@ type CryptoKeysResponse struct { CryptoKeys []codersdk.CryptoKey `json:"crypto_keys"` } -func (c *Client) CryptoKeys(ctx context.Context) (CryptoKeysResponse, error) { +func (c *Client) CryptoKeys(ctx context.Context, feature codersdk.CryptoKeyFeature) (CryptoKeysResponse, error) { res, err := c.Request(ctx, http.MethodGet, - "/api/v2/workspaceproxies/me/crypto-keys", - nil, + "/api/v2/workspaceproxies/me/crypto-keys", nil, + codersdk.WithQueryParam("feature", string(feature)), ) if err != nil { return CryptoKeysResponse{}, xerrors.Errorf("make request: %w", err) diff --git a/examples/templates/aws-devcontainer/cloud-init/cloud-config.yaml.tftpl b/examples/templates/aws-devcontainer/cloud-init/cloud-config.yaml.tftpl new file mode 100644 index 0000000000000..af6b35171ca30 --- /dev/null +++ b/examples/templates/aws-devcontainer/cloud-init/cloud-config.yaml.tftpl @@ -0,0 +1,15 @@ +#cloud-config +cloud_final_modules: + - [scripts-user, always] +hostname: ${hostname} +users: + - name: ${linux_user} + sudo: ALL=(ALL) NOPASSWD:ALL + shell: /bin/bash + ssh_authorized_keys: + - "${ssh_pubkey}" +# Automatically grow the partition +growpart: + mode: auto + devices: ['/'] + ignore_growroot_disabled: false diff --git a/examples/templates/aws-devcontainer/cloud-init/userdata.sh.tftpl b/examples/templates/aws-devcontainer/cloud-init/userdata.sh.tftpl new file mode 100644 index 0000000000000..67c166cb6c164 --- /dev/null +++ b/examples/templates/aws-devcontainer/cloud-init/userdata.sh.tftpl @@ -0,0 +1,37 @@ +#!/bin/bash +# Install Docker +if ! command -v docker &> /dev/null +then + echo "Docker not found, installing..." + curl -fsSL https://get.docker.com -o get-docker.sh && sh get-docker.sh 2>&1 >/dev/null + usermod -aG docker ${linux_user} + newgrp docker +else + echo "Docker is already installed." +fi + +# Set up Docker credentials +mkdir -p "/home/${linux_user}/.docker" + +if [ -n "${docker_config_json_base64}" ]; then + # Write the Docker config JSON to disk if it is provided. + printf "%s" "${docker_config_json_base64}" | base64 -d | tee "/home/${linux_user}/.docker/config.json" +else + # Assume that we're going to use the instance IAM role to pull from the cache repo if we need to. + # Set up the ecr credential helper. + apt-get update -y && apt-get install -y amazon-ecr-credential-helper + mkdir -p .docker + printf '{"credsStore": "ecr-login"}' | tee "/home/${linux_user}/.docker/config.json" +fi +chown -R ${linux_user}:${linux_user} "/home/${linux_user}/.docker" + +# Start envbuilder +sudo -u coder docker run \ + --rm \ + --net=host \ + -h ${hostname} \ + -v /home/${linux_user}/envbuilder:/workspaces \ + %{ for key, value in environment ~} + -e ${key}="${value}" \ + %{ endfor ~} + ${builder_image} diff --git a/examples/templates/aws-devcontainer/main.tf b/examples/templates/aws-devcontainer/main.tf index 27434385c647b..a8f6a2bbd4b46 100644 --- a/examples/templates/aws-devcontainer/main.tf +++ b/examples/templates/aws-devcontainer/main.tf @@ -6,6 +6,9 @@ terraform { aws = { source = "hashicorp/aws" } + cloudinit = { + source = "hashicorp/cloudinit" + } envbuilder = { source = "coder/envbuilder" } @@ -153,13 +156,16 @@ data "aws_iam_instance_profile" "vm_instance_profile" { locals { # TODO: provide a way to pick the availability zone. aws_availability_zone = "${module.aws_region.value}a" - linux_user = "coder" - # Name the container after the workspace and owner. - container_name = "coder-${data.coder_workspace_owner.me.name}-${lower(data.coder_workspace.me.name)}" + + hostname = lower(data.coder_workspace.me.name) + linux_user = "coder" + # The devcontainer builder image is the image that will build the devcontainer. devcontainer_builder_image = data.coder_parameter.devcontainer_builder.value + # We may need to authenticate with a registry. If so, the user will provide a path to a docker config.json. docker_config_json_base64 = try(data.local_sensitive_file.cache_repo_dockerconfigjson[0].content_base64, "") + # The envbuilder provider requires a key-value map of environment variables. Build this here. envbuilder_env = { # ENVBUILDER_GIT_URL and ENVBUILDER_CACHE_REPO will be overridden by the provider @@ -172,7 +178,7 @@ locals { # The agent init script is required for the agent to start up. We base64 encode it here # to avoid quoting issues. "ENVBUILDER_INIT_SCRIPT" : "echo ${base64encode(try(coder_agent.dev[0].init_script, ""))} | base64 -d | sh", - "ENVBUILDER_DOCKER_CONFIG_BASE64" : try(data.local_sensitive_file.cache_repo_dockerconfigjson[0].content_base64, ""), + "ENVBUILDER_DOCKER_CONFIG_BASE64" : local.docker_config_json_base64, # The fallback image is the image that will run if the devcontainer fails to build. "ENVBUILDER_FALLBACK_IMAGE" : data.coder_parameter.fallback_image.value, # The following are used to push the image to the cache repo, if defined. @@ -181,87 +187,6 @@ locals { # You can add other required environment variables here. # See: https://github.com/coder/envbuilder/?tab=readme-ov-file#environment-variables } - # If we have a cached image, use the cached image's environment variables. Otherwise, just use - # the environment variables we've defined above. - docker_env_input = try(envbuilder_cached_image.cached.0.env_map, local.envbuilder_env) - # Convert the above to the list of arguments for the Docker run command. - # The startup script will write this to a file, which the Docker run command will reference. - docker_env_list_base64 = base64encode(join("\n", [for k, v in local.docker_env_input : "${k}=${v}"])) - # Builder image will either be the builder image parameter, or the cached image, if cache is provided. - builder_image = try(envbuilder_cached_image.cached[0].image, data.coder_parameter.devcontainer_builder.value) - # User data to start the workspace. - user_data = <<-EOT - Content-Type: multipart/mixed; boundary="//" - MIME-Version: 1.0 - - --// - Content-Type: text/cloud-config; charset="us-ascii" - MIME-Version: 1.0 - Content-Transfer-Encoding: 7bit - Content-Disposition: attachment; filename="cloud-config.txt" - - #cloud-config - cloud_final_modules: - - [scripts-user, always] - hostname: ${lower(data.coder_workspace.me.name)} - users: - - name: ${local.linux_user} - sudo: ALL=(ALL) NOPASSWD:ALL - shell: /bin/bash - ssh_authorized_keys: - - "${data.coder_parameter.ssh_pubkey.value}" - # Automatically grow the partition - growpart: - mode: auto - devices: ['/'] - ignore_growroot_disabled: false - - --// - Content-Type: text/x-shellscript; charset="us-ascii" - MIME-Version: 1.0 - Content-Transfer-Encoding: 7bit - Content-Disposition: attachment; filename="userdata.txt" - - #!/bin/bash - # Install Docker - if ! command -v docker &> /dev/null - then - echo "Docker not found, installing..." - curl -fsSL https://get.docker.com -o get-docker.sh && sh get-docker.sh 2>&1 >/dev/null - usermod -aG docker ${local.linux_user} - newgrp docker - else - echo "Docker is already installed." - fi - - # Set up Docker credentials - mkdir -p "/home/${local.linux_user}/.docker" - if [ -n "${local.docker_config_json_base64}" ]; then - # Write the Docker config JSON to disk if it is provided. - printf "%s" "${local.docker_config_json_base64}" | base64 -d | tee "/home/${local.linux_user}/.docker/config.json" - else - # Assume that we're going to use the instance IAM role to pull from the cache repo if we need to. - # Set up the ecr credential helper. - apt-get update -y && apt-get install -y amazon-ecr-credential-helper - mkdir -p .docker - printf '{"credsStore": "ecr-login"}' | tee "/home/${local.linux_user}/.docker/config.json" - fi - chown -R ${local.linux_user}:${local.linux_user} "/home/${local.linux_user}/.docker" - - # Write the container env to disk. - printf "%s" "${local.docker_env_list_base64}" | base64 -d | tee "/home/${local.linux_user}/env.txt" - - # Start envbuilder - sudo -u coder docker run \ - --rm \ - --net=host \ - -h ${lower(data.coder_workspace.me.name)} \ - -v /home/${local.linux_user}/envbuilder:/workspaces \ - -v /var/run/docker.sock:/var/run/docker.sock \ - --env-file /home/${local.linux_user}/env.txt \ - ${local.builder_image} - --//-- - EOT } # Check for the presence of a prebuilt image in the cache repo @@ -274,9 +199,47 @@ resource "envbuilder_cached_image" "cached" { extra_env = local.envbuilder_env } +data "cloudinit_config" "user_data" { + gzip = false + base64_encode = false + + boundary = "//" + + part { + filename = "cloud-config.yaml" + content_type = "text/cloud-config" + + content = templatefile("${path.module}/cloud-init/cloud-config.yaml.tftpl", { + hostname = local.hostname + linux_user = local.linux_user + + ssh_pubkey = data.coder_parameter.ssh_pubkey.value + }) + } + + part { + filename = "userdata.sh" + content_type = "text/x-shellscript" + + content = templatefile("${path.module}/cloud-init/userdata.sh.tftpl", { + hostname = local.hostname + linux_user = local.linux_user + + # If we have a cached image, use the cached image's environment variables. + # Otherwise, just use the environment variables we've defined in locals. + environment = try(envbuilder_cached_image.cached[0].env_map, local.envbuilder_env) + + # Builder image will either be the builder image parameter, or the cached image, if cache is provided. + builder_image = try(envbuilder_cached_image.cached[0].image, data.coder_parameter.devcontainer_builder.value) + + docker_config_json_base64 = local.docker_config_json_base64 + }) + } +} + # This is useful for debugging the startup script. Left here for reference. # resource local_file "startup_script" { -# content = local.user_data +# content = data.cloudinit_config.user_data.rendered # filename = "${path.module}/user_data.txt" # } @@ -289,9 +252,9 @@ resource "aws_instance" "vm" { volume_size = data.coder_parameter.root_volume_size_gb.value } - user_data = local.user_data + user_data = data.cloudinit_config.user_data.rendered tags = { - Name = "coder-${data.coder_workspace_owner.me.name}-${data.coder_workspace.me.name}" + Name = "coder-${data.coder_workspace_owner.me.name}-${lower(data.coder_workspace.me.name)}" # Required if you are using our example policy, see template README Coder_Provisioned = "true" } diff --git a/examples/templates/aws-linux/cloud-init/cloud-config.yaml.tftpl b/examples/templates/aws-linux/cloud-init/cloud-config.yaml.tftpl new file mode 100644 index 0000000000000..14da769454eda --- /dev/null +++ b/examples/templates/aws-linux/cloud-init/cloud-config.yaml.tftpl @@ -0,0 +1,8 @@ +#cloud-config +cloud_final_modules: + - [scripts-user, always] +hostname: ${hostname} +users: + - name: ${linux_user} + sudo: ALL=(ALL) NOPASSWD:ALL + shell: /bin/bash diff --git a/examples/templates/aws-linux/cloud-init/userdata.sh.tftpl b/examples/templates/aws-linux/cloud-init/userdata.sh.tftpl new file mode 100644 index 0000000000000..2070bc4df3de7 --- /dev/null +++ b/examples/templates/aws-linux/cloud-init/userdata.sh.tftpl @@ -0,0 +1,2 @@ +#!/bin/bash +sudo -u '${linux_user}' sh -c '${init_script}' diff --git a/examples/templates/aws-linux/main.tf b/examples/templates/aws-linux/main.tf index 5f0f87420ccfb..b5979ef89e3e4 100644 --- a/examples/templates/aws-linux/main.tf +++ b/examples/templates/aws-linux/main.tf @@ -140,8 +140,7 @@ provider "aws" { region = data.coder_parameter.region.value } -data "coder_workspace" "me" { -} +data "coder_workspace" "me" {} data "coder_workspace_owner" "me" {} data "aws_ami" "ubuntu" { @@ -214,36 +213,36 @@ resource "coder_app" "code-server" { } locals { + hostname = lower(data.coder_workspace.me.name) linux_user = "coder" - user_data = <<-EOT - Content-Type: multipart/mixed; boundary="//" - MIME-Version: 1.0 +} - --// - Content-Type: text/cloud-config; charset="us-ascii" - MIME-Version: 1.0 - Content-Transfer-Encoding: 7bit - Content-Disposition: attachment; filename="cloud-config.txt" +data "cloudinit_config" "user_data" { + gzip = false + base64_encode = false - #cloud-config - cloud_final_modules: - - [scripts-user, always] - hostname: ${lower(data.coder_workspace.me.name)} - users: - - name: ${local.linux_user} - sudo: ALL=(ALL) NOPASSWD:ALL - shell: /bin/bash + boundary = "//" - --// - Content-Type: text/x-shellscript; charset="us-ascii" - MIME-Version: 1.0 - Content-Transfer-Encoding: 7bit - Content-Disposition: attachment; filename="userdata.txt" + part { + filename = "cloud-config.yaml" + content_type = "text/cloud-config" - #!/bin/bash - sudo -u ${local.linux_user} sh -c '${try(coder_agent.dev[0].init_script, "")}' - --//-- - EOT + content = templatefile("${path.module}/cloud-init/cloud-config.yaml.tftpl", { + hostname = local.hostname + linux_user = local.linux_user + }) + } + + part { + filename = "userdata.sh" + content_type = "text/x-shellscript" + + content = templatefile("${path.module}/cloud-init/userdata.sh.tftpl", { + linux_user = local.linux_user + + init_script = try(coder_agent.dev[0].init_script, "") + }) + } } resource "aws_instance" "dev" { @@ -251,7 +250,7 @@ resource "aws_instance" "dev" { availability_zone = "${data.coder_parameter.region.value}a" instance_type = data.coder_parameter.instance_type.value - user_data = local.user_data + user_data = data.cloudinit_config.user_data.rendered tags = { Name = "coder-${data.coder_workspace_owner.me.name}-${data.coder_workspace.me.name}" # Required if you are using our example policy, see template README diff --git a/flake.lock b/flake.lock index 4c7c29d41aa79..b492e1dc9d04c 100644 --- a/flake.lock +++ b/flake.lock @@ -29,11 +29,11 @@ "systems": "systems" }, "locked": { - "lastModified": 1710146030, - "narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=", + "lastModified": 1726560853, + "narHash": "sha256-X6rJYSESBVr3hBoH0WbKE5KvhPU5bloyZ2L4K60/fPQ=", "owner": "numtide", "repo": "flake-utils", - "rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a", + "rev": "c1dfcf08411b08f6b8615f7d8971a2bfa81d5e8a", "type": "github" }, "original": { @@ -44,11 +44,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1720957393, - "narHash": "sha256-oedh2RwpjEa+TNxhg5Je9Ch6d3W1NKi7DbRO1ziHemA=", + "lastModified": 1729880355, + "narHash": "sha256-RP+OQ6koQQLX5nw0NmcDrzvGL8HDLnyXt/jHhL1jwjM=", "owner": "nixos", "repo": "nixpkgs", - "rev": "693bc46d169f5af9c992095736e82c3488bf7dbb", + "rev": "18536bf04cd71abd345f9579158841376fdd0c5a", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index f2d138c2b3aa7..1473db147ce84 100644 --- a/flake.nix +++ b/flake.nix @@ -44,7 +44,7 @@ name = "protoc-gen-go"; owner = "protocolbuffers"; repo = "protobuf-go"; - rev = "v1.30.0"; + rev = "v1.30.0"; src = pkgs.fetchFromGitHub { owner = "protocolbuffers"; repo = "protobuf-go"; @@ -54,11 +54,6 @@ }; subPackages = [ "cmd/protoc-gen-go" ]; vendorHash = null; - proxyVendor = true; - preBuild = '' - export GOPROXY=https://proxy.golang.org,direct - go mod download - ''; }; # The minimal set of packages to build Coder. @@ -138,7 +133,7 @@ name = "coder-${osArch}"; # Updated with ./scripts/update-flake.sh`. # This should be updated whenever go.mod changes! - vendorHash = "sha256-kPXRp7l05iJd4IdvQeOFOgg2UNzBcloy3tA9Meep9VI="; + vendorHash = "sha256-Tsajkkp+NMjYRCpRX5HlSy/sCSpuABIGDM1jeavVe+w="; proxyVendor = true; src = ./.; nativeBuildInputs = with pkgs; [ getopt openssl zstd ]; @@ -172,7 +167,7 @@ ''; }; packages = { - proto_gen_go = proto_gen_go_1_30; + proto_gen_go = proto_gen_go_1_30; all = pkgs.buildEnv { name = "all-packages"; paths = devShellPackages; diff --git a/go.mod b/go.mod index 2a3f6477480fe..cf3b533b35674 100644 --- a/go.mod +++ b/go.mod @@ -1,6 +1,6 @@ module github.com/coder/coder/v2 -go 1.22.6 +go 1.22.8 // Required until a v3 of chroma is created to lazily initialize all XML files. // None of our dependencies seem to use the registries anyways, so this @@ -98,7 +98,7 @@ require ( github.com/dave/dst v0.27.2 github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc github.com/elastic/go-sysinfo v1.14.0 - github.com/fatih/color v1.17.0 + github.com/fatih/color v1.18.0 github.com/fatih/structs v1.1.0 github.com/fatih/structtag v1.2.0 github.com/fergusstrange/embedded-postgres v1.29.0 @@ -114,7 +114,7 @@ require ( github.com/go-ping/ping v1.1.0 github.com/go-playground/validator/v10 v10.22.0 github.com/gofrs/flock v0.12.0 - github.com/gohugoio/hugo v0.134.1 + github.com/gohugoio/hugo v0.136.5 github.com/golang-jwt/jwt/v4 v4.5.0 github.com/golang-migrate/migrate/v4 v4.18.1 github.com/google/go-cmp v0.6.0 @@ -185,7 +185,7 @@ require ( golang.org/x/text v0.19.0 golang.org/x/tools v0.26.0 golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da - google.golang.org/api v0.202.0 + google.golang.org/api v0.203.0 google.golang.org/grpc v1.67.1 google.golang.org/protobuf v1.35.1 gopkg.in/DataDog/dd-trace-go.v1 v1.69.0 @@ -215,7 +215,7 @@ require ( ) require ( - cloud.google.com/go/auth v0.9.8 // indirect + cloud.google.com/go/auth v0.9.9 // indirect cloud.google.com/go/auth/oauth2adapt v0.2.4 // indirect dario.cat/mergo v1.0.0 // indirect github.com/DataDog/go-libddwaf/v3 v3.4.0 // indirect @@ -422,7 +422,7 @@ require ( github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 // indirect github.com/yashtewari/glob-intersection v0.2.0 // indirect github.com/yuin/goldmark v1.7.4 // indirect - github.com/yuin/goldmark-emoji v1.0.3 // indirect + github.com/yuin/goldmark-emoji v1.0.4 // indirect github.com/zclconf/go-cty v1.15.0 github.com/zeebo/errs v1.3.0 // indirect go.opencensus.io v0.24.0 // indirect diff --git a/go.sum b/go.sum index c0af699d2fb08..771268286eebe 100644 --- a/go.sum +++ b/go.sum @@ -1,8 +1,8 @@ cdr.dev/slog v1.6.2-0.20240126064726-20367d4aede6 h1:KHblWIE/KHOwQ6lEbMZt6YpcGve2FEZ1sDtrW1Am5UI= cdr.dev/slog v1.6.2-0.20240126064726-20367d4aede6/go.mod h1:NaoTA7KwopCrnaSb0JXTC0PTp/O/Y83Lndnq0OEV3ZQ= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go/auth v0.9.8 h1:+CSJ0Gw9iVeSENVCKJoLHhdUykDgXSc4Qn+gu2BRtR8= -cloud.google.com/go/auth v0.9.8/go.mod h1:xxA5AqpDrvS+Gkmo9RqrGGRh6WSNKKOXhY3zNOr38tI= +cloud.google.com/go/auth v0.9.9 h1:BmtbpNQozo8ZwW2t7QJjnrQtdganSdmqeIBxHxNkEZQ= +cloud.google.com/go/auth v0.9.9/go.mod h1:xxA5AqpDrvS+Gkmo9RqrGGRh6WSNKKOXhY3zNOr38tI= cloud.google.com/go/auth/oauth2adapt v0.2.4 h1:0GWE/FUsXhf6C+jAkWgYm7X9tK8cuEIfy19DBn6B6bY= cloud.google.com/go/auth/oauth2adapt v0.2.4/go.mod h1:jC/jOpwFP6JBxhB3P5Rr0a9HLMC/Pe3eaL4NmdvqPtc= cloud.google.com/go/compute/metadata v0.5.2 h1:UxK4uu/Tn+I3p2dYWTfiX4wva7aYlKixAHn3fyqngqo= @@ -305,12 +305,12 @@ github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1m github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f h1:Y/CXytFA4m6baUTXGLOoWe4PQhGxaX0KpnayAqC48p4= github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f/go.mod h1:vw97MGsxSvLiUE2X8qFplwetxpGLQrlU1Q9AUEIzCaM= -github.com/evanw/esbuild v0.23.1 h1:ociewhY6arjTarKLdrXfDTgy25oxhTZmzP8pfuBTfTA= -github.com/evanw/esbuild v0.23.1/go.mod h1:D2vIQZqV/vIf/VRHtViaUtViZmG7o+kKmlBfVQuRi48= +github.com/evanw/esbuild v0.24.0 h1:GZ78naTLp7FKr+K7eNuM/SLs5maeiHYRPsTg6kmdsSE= +github.com/evanw/esbuild v0.24.0/go.mod h1:D2vIQZqV/vIf/VRHtViaUtViZmG7o+kKmlBfVQuRi48= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= -github.com/fatih/color v1.17.0 h1:GlRw1BRJxkpqUCBKzKOw098ed57fEsKeNjpTe3cSjK4= -github.com/fatih/color v1.17.0/go.mod h1:YZ7TlrGPkiz6ku9fK3TLD/pl3CpsiFyu8N92HLgmosI= +github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM= +github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU= github.com/fatih/structs v1.1.0 h1:Q7juDM0QtcnhCpeyLGQKyg4TOIghuNXrkL32pHAUMxo= github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= github.com/fatih/structtag v1.2.0 h1:/OdNE99OxoI/PqaW/SuSK9uxxT3f/tcSZgon/ssNSx4= @@ -415,8 +415,8 @@ github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4 h1:qZNfIGkIANxGv/Oq github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4/go.mod h1:kW3HQ4UdaAyrUCSSDR4xUzBKW6O2iA4uHhk7AtyYp10= github.com/go-viper/mapstructure/v2 v2.0.0 h1:dhn8MZ1gZ0mzeodTG3jt5Vj/o87xZKuNAprG2mQfMfc= github.com/go-viper/mapstructure/v2 v2.0.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= -github.com/gobuffalo/flect v1.0.2 h1:eqjPGSo2WmjgY2XlpGwo2NXgL3RucAKo4k4qQMNA5sA= -github.com/gobuffalo/flect v1.0.2/go.mod h1:A5msMlrHtLqh9umBSnvabjsMrCcCpAyzglnDvkbYKHs= +github.com/gobuffalo/flect v1.0.3 h1:xeWBM2nui+qnVvNM4S3foBhCAL2XgPU+a7FdpelbTq4= +github.com/gobuffalo/flect v1.0.3/go.mod h1:A5msMlrHtLqh9umBSnvabjsMrCcCpAyzglnDvkbYKHs= github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y= github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee/go.mod h1:L0fX3K22YWvt/FAX9NnzrNzcI4wNYi9Yku4O0LKYflo= @@ -442,8 +442,8 @@ github.com/gohugoio/hashstructure v0.1.0 h1:kBSTMLMyTXbrJVAxaKI+wv30MMJJxn9Q8kfQ github.com/gohugoio/hashstructure v0.1.0/go.mod h1:8ohPTAfQLTs2WdzB6k9etmQYclDUeNsIHGPAFejbsEA= github.com/gohugoio/httpcache v0.7.0 h1:ukPnn04Rgvx48JIinZvZetBfHaWE7I01JR2Q2RrQ3Vs= github.com/gohugoio/httpcache v0.7.0/go.mod h1:fMlPrdY/vVJhAriLZnrF5QpN3BNAcoBClgAyQd+lGFI= -github.com/gohugoio/hugo v0.134.1 h1:tLFRqDJuAlifwXispNvIHh6K3CT7ughxbBxzfUTStXY= -github.com/gohugoio/hugo v0.134.1/go.mod h1:/1gnGxlWfAzQarxcQ+tMvKw4e/IMBwy0DFbRxORwOtY= +github.com/gohugoio/hugo v0.136.5 h1:1IEDb0jWamc+LL/2dwDzdsGW67d5BxGcvu3gBkg7KQc= +github.com/gohugoio/hugo v0.136.5/go.mod h1:SarsIX7a9RqYY4VbDqIFrqSt57dIst+B1XKh+Q/lC7w= github.com/gohugoio/hugo-goldmark-extensions/extras v0.2.0 h1:MNdY6hYCTQEekY0oAfsxWZU1CDt6iH+tMLgyMJQh/sg= github.com/gohugoio/hugo-goldmark-extensions/extras v0.2.0/go.mod h1:oBdBVuiZ0fv9xd8xflUgt53QxW5jOCb1S+xntcN4SKo= github.com/gohugoio/hugo-goldmark-extensions/passthrough v0.3.0 h1:7PY5PIJ2mck7v6R52yCFvvYHvsPMEbulgRviw3I9lP4= @@ -533,8 +533,8 @@ github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aN github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= github.com/grpc-ecosystem/grpc-gateway/v2 v2.20.0 h1:bkypFPDjIYGfCYD5mRBvpqxfYX1YCS1PXdKYWi8FsN0= github.com/grpc-ecosystem/grpc-gateway/v2 v2.20.0/go.mod h1:P+Lt/0by1T8bfcF3z737NnSbmxQAppXMRziHUxPOC8k= -github.com/hairyhenderson/go-codeowners v0.5.0 h1:dpQB+hVHiRc2VVvc2BHxkuM+tmu9Qej/as3apqUbsWc= -github.com/hairyhenderson/go-codeowners v0.5.0/go.mod h1:R3uW1OQXEj2Gu6/OvZ7bt6hr0qdkLvUWPiqNaWnexpo= +github.com/hairyhenderson/go-codeowners v0.6.0 h1:cRCtmNf9Ni1GIeiAAlHX5IEEB2gr61813Kx5JmXxAAk= +github.com/hairyhenderson/go-codeowners v0.6.0/go.mod h1:RFWbGcjlXhRKNezt7AQHmJucY0alk4osN0+RKOsIAa8= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= @@ -850,8 +850,8 @@ github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUc github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs= github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro= github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= -github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= -github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= +github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII= +github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o= github.com/ryanuber/columnize v2.1.0+incompatible/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= github.com/ryanuber/go-glob v1.0.0 h1:iQh3xXAumdQ+4Ufa5b25cRpC5TYKlno6hsv6Cb3pkBk= github.com/ryanuber/go-glob v1.0.0/go.mod h1:807d1WSdnB0XRJzKNil9Om6lcp/3a0v4qIHxIXzX/Yc= @@ -925,8 +925,8 @@ github.com/tdewolff/parse/v2 v2.7.15/go.mod h1:3FbJWZp3XT9OWVN3Hmfp0p/a08v4h8J9W github.com/tdewolff/test v1.0.11-0.20231101010635-f1265d231d52/go.mod h1:6DAvZliBAAnD7rhVgwaM7DE5/d9NMOAJ09SqYqeK4QE= github.com/tdewolff/test v1.0.11-0.20240106005702-7de5f7df4739 h1:IkjBCtQOOjIn03u/dMQK9g+Iw9ewps4mCl1nB8Sscbo= github.com/tdewolff/test v1.0.11-0.20240106005702-7de5f7df4739/go.mod h1:XPuWBzvdUzhCuxWO1ojpXsyzsA5bFoS3tO/Q3kFuTG8= -github.com/tetratelabs/wazero v1.8.0 h1:iEKu0d4c2Pd+QSRieYbnQC9yiFlMS9D+Jr0LsRmcF4g= -github.com/tetratelabs/wazero v1.8.0/go.mod h1:yAI0XTsMBhREkM/YDAK/zNou3GoiAce1P6+rp/wQhjs= +github.com/tetratelabs/wazero v1.8.1 h1:NrcgVbWfkWvVc4UtT4LRLDf91PsOzDzefMdwhLfA550= +github.com/tetratelabs/wazero v1.8.1/go.mod h1:yAI0XTsMBhREkM/YDAK/zNou3GoiAce1P6+rp/wQhjs= github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY= github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA= @@ -994,8 +994,8 @@ github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5t github.com/yuin/goldmark v1.7.1/go.mod h1:uzxRWxtg69N339t3louHJ7+O03ezfj6PlliRlaOzY1E= github.com/yuin/goldmark v1.7.4 h1:BDXOHExt+A7gwPCJgPIIq7ENvceR7we7rOS9TNoLZeg= github.com/yuin/goldmark v1.7.4/go.mod h1:uzxRWxtg69N339t3louHJ7+O03ezfj6PlliRlaOzY1E= -github.com/yuin/goldmark-emoji v1.0.3 h1:aLRkLHOuBR2czCY4R8olwMjID+tENfhyFDMCRhbIQY4= -github.com/yuin/goldmark-emoji v1.0.3/go.mod h1:tTkZEbwu5wkPmgTcitqddVxY9osFZiavD+r4AzQrh1U= +github.com/yuin/goldmark-emoji v1.0.4 h1:vCwMkPZSNefSUnOW2ZKRUjBSD5Ok3W78IXhGxxAEF90= +github.com/yuin/goldmark-emoji v1.0.4/go.mod h1:tTkZEbwu5wkPmgTcitqddVxY9osFZiavD+r4AzQrh1U= github.com/zclconf/go-cty v1.1.0/go.mod h1:xnAOWiHeOqg2nWS62VtQ7pbOu17FtxJNW8RLEih+O3s= github.com/zclconf/go-cty v1.15.0 h1:tTCRWxsexYUmtt/wVxgDClUe+uQusuI443uL6e+5sXQ= github.com/zclconf/go-cty v1.15.0/go.mod h1:VvMs5i0vgZdhYawQNq5kePSpLAoz8u1xvZgrPIxfnZE= @@ -1067,8 +1067,8 @@ golang.org/x/crypto v0.28.0/go.mod h1:rmgy+3RHxRZMyY0jjAJShp2zgEdOqj2AO7U0pYmeQ7 golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20240808152545-0cdaa3abc0fa h1:ELnwvuAXPNtPk1TJRuGkI9fDTwym6AYBu0qzT8AcHdI= golang.org/x/exp v0.0.0-20240808152545-0cdaa3abc0fa/go.mod h1:akd2r19cwCdwSwWeIdzYQGa/EZZyqcOdwWiwj5L5eKQ= -golang.org/x/image v0.19.0 h1:D9FX4QWkLfkeqaC62SonffIIuYdOk/UE2XKUBgRIBIQ= -golang.org/x/image v0.19.0/go.mod h1:y0zrRqlQRWQ5PXaYCOMLTW2fpsxZ8Qh9I/ohnInJEys= +golang.org/x/image v0.21.0 h1:c5qV36ajHpdj4Qi0GnE0jUc/yuo33OLFaa0d+crTD5s= +golang.org/x/image v0.21.0/go.mod h1:vUbsLavqK/W303ZroQQVKQ+Af3Yl6Uz1Ppu5J/cLz78= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= @@ -1203,8 +1203,8 @@ golang.zx2c4.com/wireguard/wgctrl v0.0.0-20230429144221-925a1e7659e6 h1:CawjfCvY golang.zx2c4.com/wireguard/wgctrl v0.0.0-20230429144221-925a1e7659e6/go.mod h1:3rxYc4HtVcSG9gVaTs2GEBdehh+sYPOwKtyUWEOTb80= golang.zx2c4.com/wireguard/windows v0.5.3 h1:On6j2Rpn3OEMXqBq00QEDC7bWSZrPIHKIus8eIuExIE= golang.zx2c4.com/wireguard/windows v0.5.3/go.mod h1:9TEe8TJmtwyQebdFwAkEWOPr3prrtqm+REGFifP60hI= -google.golang.org/api v0.202.0 h1:y1iuVHMqokQbimW79ZqPZWo4CiyFu6HcCYHwSNyzlfo= -google.golang.org/api v0.202.0/go.mod h1:3Jjeq7M/SFblTNCp7ES2xhq+WvGL0KeXI0joHQBfwTQ= +google.golang.org/api v0.203.0 h1:SrEeuwU3S11Wlscsn+LA1kb/Y5xT8uggJSkIhD08NAU= +google.golang.org/api v0.203.0/go.mod h1:BuOVyCSYEPwJb3npWvDnNmFI92f3GeRnHNkETneT3SI= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM= diff --git a/install.sh b/install.sh index 8fbc4e58f000c..40753f2f9973c 100755 --- a/install.sh +++ b/install.sh @@ -250,7 +250,7 @@ EOF main() { MAINLINE=1 STABLE=0 - TERRAFORM_VERSION="1.9.2" + TERRAFORM_VERSION="1.9.8" if [ "${TRACE-}" ]; then set -x @@ -363,7 +363,7 @@ main() { if [ "${RSH_ARGS-}" ]; then RSH="${RSH-ssh}" echoh "Installing remotely with $RSH $RSH_ARGS" - curl -fsSL https://coder.dev/install.sh | prefix "$RSH_ARGS" "$RSH" "$RSH_ARGS" sh -s -- "$ALL_FLAGS" + curl -fsSL https://coder.com/install.sh | prefix "$RSH_ARGS" "$RSH" "$RSH_ARGS" sh -s -- "$ALL_FLAGS" return fi diff --git a/provisioner/terraform/install.go b/provisioner/terraform/install.go index 8c96be6452a22..af425ec307724 100644 --- a/provisioner/terraform/install.go +++ b/provisioner/terraform/install.go @@ -20,7 +20,7 @@ var ( // when Terraform is not available on the system. // NOTE: Keep this in sync with the version in scripts/Dockerfile.base. // NOTE: Keep this in sync with the version in install.sh. - TerraformVersion = version.Must(version.NewVersion("1.9.2")) + TerraformVersion = version.Must(version.NewVersion("1.9.8")) minTerraformVersion = version.Must(version.NewVersion("1.1.0")) maxTerraformVersion = version.Must(version.NewVersion("1.9.9")) // use .9 to automatically allow patch releases diff --git a/provisioner/terraform/testdata/calling-module/calling-module.tfplan.json b/provisioner/terraform/testdata/calling-module/calling-module.tfplan.json index 7f9464857f723..30bc360bb1940 100644 --- a/provisioner/terraform/testdata/calling-module/calling-module.tfplan.json +++ b/provisioner/terraform/testdata/calling-module/calling-module.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "planned_values": { "root_module": { "resources": [ @@ -260,7 +260,7 @@ ] } ], - "timestamp": "2024-07-15T17:48:23Z", + "timestamp": "2024-10-28T20:07:49Z", "applyable": true, "complete": true, "errored": false diff --git a/provisioner/terraform/testdata/calling-module/calling-module.tfstate.json b/provisioner/terraform/testdata/calling-module/calling-module.tfstate.json index e30cc7513c92b..5ead2c6ace0d5 100644 --- a/provisioner/terraform/testdata/calling-module/calling-module.tfstate.json +++ b/provisioner/terraform/testdata/calling-module/calling-module.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -26,7 +26,7 @@ } ], "env": null, - "id": "487890be-5e3c-4b06-a95b-a1d0a26f45c3", + "id": "04d66dc4-e25a-4f65-af6f-a9af6b907430", "init_script": "", "login_before_ready": true, "metadata": [], @@ -38,7 +38,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "d50589ba-d3df-48e7-8fea-1ce92ea1e4e2", + "token": "10fbd765-b0cc-4d6f-b5de-e5a036b2cb4b", "troubleshooting_url": null }, "sensitive_values": { @@ -69,7 +69,7 @@ "outputs": { "script": "" }, - "random": "2660912917742059845" + "random": "7917595776755902204" }, "sensitive_values": { "inputs": {}, @@ -84,7 +84,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "7409017517144186812", + "id": "2669991968036854745", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/chaining-resources/chaining-resources.tfplan.json b/provisioner/terraform/testdata/chaining-resources/chaining-resources.tfplan.json index 01ebff551b463..38af6827019e7 100644 --- a/provisioner/terraform/testdata/chaining-resources/chaining-resources.tfplan.json +++ b/provisioner/terraform/testdata/chaining-resources/chaining-resources.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "planned_values": { "root_module": { "resources": [ @@ -205,7 +205,7 @@ ] } }, - "timestamp": "2024-07-15T17:48:25Z", + "timestamp": "2024-10-28T20:07:50Z", "applyable": true, "complete": true, "errored": false diff --git a/provisioner/terraform/testdata/chaining-resources/chaining-resources.tfstate.json b/provisioner/terraform/testdata/chaining-resources/chaining-resources.tfstate.json index 109f1a816e7c8..0cee8567db250 100644 --- a/provisioner/terraform/testdata/chaining-resources/chaining-resources.tfstate.json +++ b/provisioner/terraform/testdata/chaining-resources/chaining-resources.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -26,7 +26,7 @@ } ], "env": null, - "id": "d700ca89-c521-478d-a430-833580e60941", + "id": "bcf4bae1-0870-48e9-8bb4-af2f652c4d54", "init_script": "", "login_before_ready": true, "metadata": [], @@ -38,7 +38,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "1ffba24c-49cd-44ca-9855-08086c8f665f", + "token": "afe98f25-25a2-4892-b921-be04bcd71efc", "troubleshooting_url": null }, "sensitive_values": { @@ -57,7 +57,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "8823809151721173831", + "id": "6598177855275264799", "triggers": null }, "sensitive_values": {}, @@ -74,7 +74,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "6260983806355230616", + "id": "4663187895457986148", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tfplan.json b/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tfplan.json index b57638172a90d..3fe9f6c41fa9b 100644 --- a/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tfplan.json +++ b/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "planned_values": { "root_module": { "resources": [ @@ -205,7 +205,7 @@ ] } }, - "timestamp": "2024-07-15T17:48:26Z", + "timestamp": "2024-10-28T20:07:52Z", "applyable": true, "complete": true, "errored": false diff --git a/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tfstate.json b/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tfstate.json index 4e138f7476405..ffd0690db2263 100644 --- a/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tfstate.json +++ b/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -26,7 +26,7 @@ } ], "env": null, - "id": "d2d1c3a3-3315-47ed-a200-290455966190", + "id": "d047c7b6-b69e-4029-ab82-67468a0364f7", "init_script": "", "login_before_ready": true, "metadata": [], @@ -38,7 +38,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "e2076595-5316-47ec-a305-215f2f2a901c", + "token": "ceff37e3-52b9-4c80-af1b-1f9f99184590", "troubleshooting_url": null }, "sensitive_values": { @@ -57,7 +57,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "2887811124246756573", + "id": "3120105803817695206", "triggers": null }, "sensitive_values": {}, @@ -73,7 +73,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "6007238228767050576", + "id": "2942451035046396496", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tfplan.json b/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tfplan.json index 8929284177be8..598d6f1735a84 100644 --- a/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tfplan.json +++ b/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "planned_values": { "root_module": { "resources": [ @@ -204,7 +204,7 @@ ] } }, - "timestamp": "2024-07-15T17:48:30Z", + "timestamp": "2024-10-28T20:07:55Z", "applyable": true, "complete": true, "errored": false diff --git a/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tfstate.json b/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tfstate.json index 4e56df9aa0d7b..7e9bdad7a02bb 100644 --- a/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tfstate.json +++ b/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -26,7 +26,7 @@ } ], "env": null, - "id": "51c9236c-7146-4e6b-85c2-b21361a6a359", + "id": "6ba13739-4a9c-456f-90cf-feba8f194853", "init_script": "", "login_before_ready": true, "metadata": [], @@ -38,7 +38,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "0779e4d7-d9cf-4fa6-b3f7-92e6b83e52ca", + "token": "6e348a4c-ef00-40ab-9732-817fb828045c", "troubleshooting_url": null }, "sensitive_values": { @@ -57,7 +57,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "5801369723993496133", + "id": "3123606937441446452", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/display-apps/display-apps.tfplan.json b/provisioner/terraform/testdata/display-apps/display-apps.tfplan.json index 0371606e527fc..3331a8f282c2b 100644 --- a/provisioner/terraform/testdata/display-apps/display-apps.tfplan.json +++ b/provisioner/terraform/testdata/display-apps/display-apps.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "planned_values": { "root_module": { "resources": [ @@ -204,7 +204,7 @@ ] } }, - "timestamp": "2024-07-15T17:48:28Z", + "timestamp": "2024-10-28T20:07:54Z", "applyable": true, "complete": true, "errored": false diff --git a/provisioner/terraform/testdata/display-apps/display-apps.tfstate.json b/provisioner/terraform/testdata/display-apps/display-apps.tfstate.json index 49efca3f597ce..2b04222e751f2 100644 --- a/provisioner/terraform/testdata/display-apps/display-apps.tfstate.json +++ b/provisioner/terraform/testdata/display-apps/display-apps.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -26,7 +26,7 @@ } ], "env": null, - "id": "ba5352ad-c833-442b-93c8-86e330a65192", + "id": "b7e8dd7a-34aa-41e2-977e-e38577ab2476", "init_script": "", "login_before_ready": true, "metadata": [], @@ -38,7 +38,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "364b1d92-7a4f-475e-956a-90f4b2cfd2eb", + "token": "c6aeeb35-2766-4524-9818-687f7687831d", "troubleshooting_url": null }, "sensitive_values": { @@ -57,7 +57,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "3169937457521011358", + "id": "2407243137316459395", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tfplan.json b/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tfplan.json index b0cacf1cc79f0..5ba9e7b6af80f 100644 --- a/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tfplan.json +++ b/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "planned_values": { "root_module": { "resources": [ @@ -119,7 +119,7 @@ ], "prior_state": { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -228,7 +228,7 @@ ] } }, - "timestamp": "2024-07-15T17:48:32Z", + "timestamp": "2024-10-28T20:07:57Z", "applyable": true, "complete": true, "errored": false diff --git a/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tfstate.json b/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tfstate.json index 5b0424973a840..875d8c9aaf439 100644 --- a/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tfstate.json +++ b/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -54,7 +54,7 @@ } ], "env": null, - "id": "186d9525-cebc-476f-888a-4fb43d443938", + "id": "ec5d36c9-8690-4246-8ab3-2d85a3eacee6", "init_script": "", "login_before_ready": true, "metadata": [], @@ -66,7 +66,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "bdb44728-6909-4b52-ba86-ed6c058b5820", + "token": "78c55fa2-8e3c-4564-950d-e022c76cf05a", "troubleshooting_url": null }, "sensitive_values": { @@ -85,7 +85,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "848898101208151671", + "id": "455343782636271645", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/generate.sh b/provisioner/terraform/testdata/generate.sh index 04ac7bdef3c64..6cc79568582ee 100755 --- a/provisioner/terraform/testdata/generate.sh +++ b/provisioner/terraform/testdata/generate.sh @@ -19,6 +19,11 @@ for d in */; do continue fi + if [[ $name == "timings-aggregation" ]]; then + popd + continue + fi + terraform init -upgrade terraform plan -out terraform.tfplan terraform show -json ./terraform.tfplan | jq >"$name".tfplan.json diff --git a/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tfplan.json b/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tfplan.json index 6ca82aedf141c..fba34f1cb5f4d 100644 --- a/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tfplan.json +++ b/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "planned_values": { "root_module": { "resources": [ @@ -119,7 +119,7 @@ ], "prior_state": { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -223,7 +223,7 @@ ] } }, - "timestamp": "2024-07-15T17:48:34Z", + "timestamp": "2024-10-28T20:07:58Z", "applyable": true, "complete": true, "errored": false diff --git a/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tfstate.json b/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tfstate.json index 0087c31316519..3cf905c0a2948 100644 --- a/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tfstate.json +++ b/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -52,7 +52,7 @@ } ], "env": null, - "id": "30e31610-1801-4837-957e-93bdbbc64ea3", + "id": "ffa1f524-0350-4891-868d-93cad369318a", "init_script": "", "login_before_ready": true, "metadata": [], @@ -64,7 +64,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "825b23c4-4243-4991-ac33-483ee4c50575", + "token": "8ba649af-b498-4f20-8055-b6a0b995837e", "troubleshooting_url": null }, "sensitive_values": { @@ -83,7 +83,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "8892771970332750063", + "id": "7420557451345159984", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/instance-id/instance-id.tfplan.json b/provisioner/terraform/testdata/instance-id/instance-id.tfplan.json index 4c22ab424aeb0..527a2fa05769d 100644 --- a/provisioner/terraform/testdata/instance-id/instance-id.tfplan.json +++ b/provisioner/terraform/testdata/instance-id/instance-id.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "planned_values": { "root_module": { "resources": [ @@ -225,7 +225,7 @@ ] } ], - "timestamp": "2024-07-15T17:48:36Z", + "timestamp": "2024-10-28T20:08:00Z", "applyable": true, "complete": true, "errored": false diff --git a/provisioner/terraform/testdata/instance-id/instance-id.tfstate.json b/provisioner/terraform/testdata/instance-id/instance-id.tfstate.json index 513fe487d181b..929d72365502c 100644 --- a/provisioner/terraform/testdata/instance-id/instance-id.tfstate.json +++ b/provisioner/terraform/testdata/instance-id/instance-id.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -26,7 +26,7 @@ } ], "env": null, - "id": "da0d9673-d232-47f5-8869-ebd78444dde0", + "id": "0389c8a5-cc5c-485d-959c-8738bada65ff", "init_script": "", "login_before_ready": true, "metadata": [], @@ -38,7 +38,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "df57eefc-83d5-444e-bbb5-47b5603156fa", + "token": "097b6128-8d60-4849-969b-03f0b463ac2c", "troubleshooting_url": null }, "sensitive_values": { @@ -57,8 +57,8 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "da0d9673-d232-47f5-8869-ebd78444dde0", - "id": "f4b242e6-f0c9-4cd4-adb0-06062ed8a1b7", + "agent_id": "0389c8a5-cc5c-485d-959c-8738bada65ff", + "id": "0ae6bb98-871c-4091-8098-d32f256d8c05", "instance_id": "example" }, "sensitive_values": {}, @@ -74,7 +74,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "7960015436996479556", + "id": "5569763710827889183", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/mapped-apps/mapped-apps.tfplan.json b/provisioner/terraform/testdata/mapped-apps/mapped-apps.tfplan.json index 100d89f57a080..2151b4631647a 100644 --- a/provisioner/terraform/testdata/mapped-apps/mapped-apps.tfplan.json +++ b/provisioner/terraform/testdata/mapped-apps/mapped-apps.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "planned_values": { "root_module": { "resources": [ @@ -327,7 +327,7 @@ ] } ], - "timestamp": "2024-07-15T17:48:38Z", + "timestamp": "2024-10-28T20:08:02Z", "applyable": true, "complete": true, "errored": false diff --git a/provisioner/terraform/testdata/mapped-apps/mapped-apps.tfstate.json b/provisioner/terraform/testdata/mapped-apps/mapped-apps.tfstate.json index 079f9c54fd818..9aaa7b352f518 100644 --- a/provisioner/terraform/testdata/mapped-apps/mapped-apps.tfstate.json +++ b/provisioner/terraform/testdata/mapped-apps/mapped-apps.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -26,7 +26,7 @@ } ], "env": null, - "id": "ae638ce3-e9a0-4331-ad0d-b81d93975725", + "id": "b3d3e1d7-1f1f-4abf-8475-2058f73f3437", "init_script": "", "login_before_ready": true, "metadata": [], @@ -38,7 +38,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "fdd8d060-455d-471f-a025-72937e049ccd", + "token": "56420fd5-57e5-44e0-a264-53395b74505a", "troubleshooting_url": null }, "sensitive_values": { @@ -58,13 +58,13 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "ae638ce3-e9a0-4331-ad0d-b81d93975725", + "agent_id": "b3d3e1d7-1f1f-4abf-8475-2058f73f3437", "command": null, "display_name": "app1", "external": false, "healthcheck": [], "icon": null, - "id": "65739639-3a6a-43ae-b95b-ba0d5ce07ce8", + "id": "e8163eb0-e56e-46e7-8848-8c6c250ce5b9", "name": null, "order": null, "relative_path": null, @@ -89,13 +89,13 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "ae638ce3-e9a0-4331-ad0d-b81d93975725", + "agent_id": "b3d3e1d7-1f1f-4abf-8475-2058f73f3437", "command": null, "display_name": "app2", "external": false, "healthcheck": [], "icon": null, - "id": "37f6ea39-3c4a-458d-9f0d-1c036bc5f1d7", + "id": "0971e625-7a23-4108-9765-78f7ad045b38", "name": null, "order": null, "relative_path": null, @@ -119,7 +119,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "2485965605399142745", + "id": "60927265551659604", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/multiple-agents-multiple-apps/multiple-agents-multiple-apps.tfplan.json b/provisioner/terraform/testdata/multiple-agents-multiple-apps/multiple-agents-multiple-apps.tfplan.json index 94cf2e79ec738..d8f5a4763518b 100644 --- a/provisioner/terraform/testdata/multiple-agents-multiple-apps/multiple-agents-multiple-apps.tfplan.json +++ b/provisioner/terraform/testdata/multiple-agents-multiple-apps/multiple-agents-multiple-apps.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "planned_values": { "root_module": { "resources": [ @@ -575,19 +575,19 @@ }, "relevant_attributes": [ { - "resource": "coder_agent.dev1", + "resource": "coder_agent.dev2", "attribute": [ "id" ] }, { - "resource": "coder_agent.dev2", + "resource": "coder_agent.dev1", "attribute": [ "id" ] } ], - "timestamp": "2024-07-15T17:48:43Z", + "timestamp": "2024-10-28T20:08:05Z", "applyable": true, "complete": true, "errored": false diff --git a/provisioner/terraform/testdata/multiple-agents-multiple-apps/multiple-agents-multiple-apps.tfstate.json b/provisioner/terraform/testdata/multiple-agents-multiple-apps/multiple-agents-multiple-apps.tfstate.json index db066d1078bbd..4a94e05baa29d 100644 --- a/provisioner/terraform/testdata/multiple-agents-multiple-apps/multiple-agents-multiple-apps.tfstate.json +++ b/provisioner/terraform/testdata/multiple-agents-multiple-apps/multiple-agents-multiple-apps.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -26,7 +26,7 @@ } ], "env": null, - "id": "74d75dac-6a80-4cac-9153-3a387bde6824", + "id": "571523c7-e7a3-420a-b65d-39d15f5f3267", "init_script": "", "login_before_ready": true, "metadata": [], @@ -38,7 +38,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "9683bf91-8de9-419d-8c60-294a81995ad6", + "token": "c18d762d-062d-43d4-b7c2-98be546b39a6", "troubleshooting_url": null }, "sensitive_values": { @@ -71,7 +71,7 @@ } ], "env": null, - "id": "27e6d9dd-6136-42ae-980a-eb299030111e", + "id": "e94994f2-cab5-4288-8ff3-a290c95e4e25", "init_script": "", "login_before_ready": true, "metadata": [], @@ -83,7 +83,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "102429e0-a63a-4b75-9499-596c90f954ea", + "token": "c0757e3a-4be4-4643-b3ba-b27234169eb1", "troubleshooting_url": null }, "sensitive_values": { @@ -102,13 +102,13 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "74d75dac-6a80-4cac-9153-3a387bde6824", + "agent_id": "571523c7-e7a3-420a-b65d-39d15f5f3267", "command": null, "display_name": null, "external": false, "healthcheck": [], "icon": null, - "id": "37e01326-a44b-4042-b042-5b3bd26dff1d", + "id": "bf2b3c44-1b1d-49c5-9149-4f2f18590c60", "name": null, "order": null, "relative_path": null, @@ -132,7 +132,7 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "74d75dac-6a80-4cac-9153-3a387bde6824", + "agent_id": "571523c7-e7a3-420a-b65d-39d15f5f3267", "command": null, "display_name": null, "external": false, @@ -144,7 +144,7 @@ } ], "icon": null, - "id": "31576d00-cd93-452c-a385-ef91d8ebabc1", + "id": "580cf864-a64d-4430-98b7-fa37c44083f8", "name": null, "order": null, "relative_path": null, @@ -170,13 +170,13 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "27e6d9dd-6136-42ae-980a-eb299030111e", + "agent_id": "e94994f2-cab5-4288-8ff3-a290c95e4e25", "command": null, "display_name": null, "external": false, "healthcheck": [], "icon": null, - "id": "c8bb967e-4a36-4ccb-89f6-93cabfba150d", + "id": "182dca7b-12ab-4c58-9424-23b7d61135a9", "name": null, "order": null, "relative_path": null, @@ -200,7 +200,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "4919579386937214358", + "id": "3778543820798621894", "triggers": null }, "sensitive_values": {}, @@ -216,7 +216,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "4338309449618140876", + "id": "1094622314762410115", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/multiple-agents-multiple-envs/multiple-agents-multiple-envs.tfplan.json b/provisioner/terraform/testdata/multiple-agents-multiple-envs/multiple-agents-multiple-envs.tfplan.json index c3ecb1db00d44..4cb28ae592516 100644 --- a/provisioner/terraform/testdata/multiple-agents-multiple-envs/multiple-agents-multiple-envs.tfplan.json +++ b/provisioner/terraform/testdata/multiple-agents-multiple-envs/multiple-agents-multiple-envs.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "planned_values": { "root_module": { "resources": [ @@ -472,19 +472,19 @@ }, "relevant_attributes": [ { - "resource": "coder_agent.dev2", + "resource": "coder_agent.dev1", "attribute": [ "id" ] }, { - "resource": "coder_agent.dev1", + "resource": "coder_agent.dev2", "attribute": [ "id" ] } ], - "timestamp": "2024-07-15T17:48:46Z", + "timestamp": "2024-10-28T20:08:06Z", "applyable": true, "complete": true, "errored": false diff --git a/provisioner/terraform/testdata/multiple-agents-multiple-envs/multiple-agents-multiple-envs.tfstate.json b/provisioner/terraform/testdata/multiple-agents-multiple-envs/multiple-agents-multiple-envs.tfstate.json index a982897075c3a..f87b6f0a9eb56 100644 --- a/provisioner/terraform/testdata/multiple-agents-multiple-envs/multiple-agents-multiple-envs.tfstate.json +++ b/provisioner/terraform/testdata/multiple-agents-multiple-envs/multiple-agents-multiple-envs.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -26,7 +26,7 @@ } ], "env": null, - "id": "d5849a8b-3f84-44d1-80df-d61af159490f", + "id": "702e7cd2-95a0-46cf-8ef7-c1dfbd3e56b9", "init_script": "", "login_before_ready": true, "metadata": [], @@ -38,7 +38,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "1c5f00f4-f48b-4f0d-bd9b-5c97a63ea2d9", + "token": "1cfd79e3-3f9c-4d66-b7c2-42c385c26012", "troubleshooting_url": null }, "sensitive_values": { @@ -71,7 +71,7 @@ } ], "env": null, - "id": "48ddd7f1-ab68-4247-9b8c-09ae1b93debc", + "id": "ca137ba9-45ce-44ff-8e30-59a86565fa7d", "init_script": "", "login_before_ready": true, "metadata": [], @@ -83,7 +83,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "ffc286fe-0f27-46fb-bf0f-613f4e2943a4", + "token": "0d3aa4f8-025c-4044-8053-d077484355fb", "troubleshooting_url": null }, "sensitive_values": { @@ -102,8 +102,8 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "d5849a8b-3f84-44d1-80df-d61af159490f", - "id": "88a1c662-5e5b-4da6-bb60-4e4f4311b9ca", + "agent_id": "702e7cd2-95a0-46cf-8ef7-c1dfbd3e56b9", + "id": "e3d37294-2407-4286-a519-7551b901ba54", "name": "ENV_1", "value": "Env 1" }, @@ -120,8 +120,8 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "d5849a8b-3f84-44d1-80df-d61af159490f", - "id": "bbaea14d-a16b-4b1e-9feb-f445a2a08d14", + "agent_id": "702e7cd2-95a0-46cf-8ef7-c1dfbd3e56b9", + "id": "9451575b-da89-4297-a42d-4aaf0a23775d", "name": "ENV_2", "value": "Env 2" }, @@ -138,8 +138,8 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "48ddd7f1-ab68-4247-9b8c-09ae1b93debc", - "id": "d6bdb1d7-06cd-4802-a860-b5d7a31f7d7b", + "agent_id": "ca137ba9-45ce-44ff-8e30-59a86565fa7d", + "id": "948e3fb5-12a1-454b-b85e-d4dc1f01838f", "name": "ENV_3", "value": "Env 3" }, @@ -156,7 +156,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "1850797469207235208", + "id": "7502424400840788651", "triggers": null }, "sensitive_values": {}, @@ -172,7 +172,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "214998680720912111", + "id": "3916143681500058654", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/multiple-agents-multiple-scripts/multiple-agents-multiple-scripts.tfplan.json b/provisioner/terraform/testdata/multiple-agents-multiple-scripts/multiple-agents-multiple-scripts.tfplan.json index 83d55b1e95056..ab14e49f02989 100644 --- a/provisioner/terraform/testdata/multiple-agents-multiple-scripts/multiple-agents-multiple-scripts.tfplan.json +++ b/provisioner/terraform/testdata/multiple-agents-multiple-scripts/multiple-agents-multiple-scripts.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "planned_values": { "root_module": { "resources": [ @@ -523,19 +523,19 @@ }, "relevant_attributes": [ { - "resource": "coder_agent.dev2", + "resource": "coder_agent.dev1", "attribute": [ "id" ] }, { - "resource": "coder_agent.dev1", + "resource": "coder_agent.dev2", "attribute": [ "id" ] } ], - "timestamp": "2024-07-15T17:48:49Z", + "timestamp": "2024-10-28T20:08:08Z", "applyable": true, "complete": true, "errored": false diff --git a/provisioner/terraform/testdata/multiple-agents-multiple-scripts/multiple-agents-multiple-scripts.tfstate.json b/provisioner/terraform/testdata/multiple-agents-multiple-scripts/multiple-agents-multiple-scripts.tfstate.json index 4fa235cb52eb5..37c4ef13ee6fb 100644 --- a/provisioner/terraform/testdata/multiple-agents-multiple-scripts/multiple-agents-multiple-scripts.tfstate.json +++ b/provisioner/terraform/testdata/multiple-agents-multiple-scripts/multiple-agents-multiple-scripts.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -26,7 +26,7 @@ } ], "env": null, - "id": "a46d73a8-3abc-4dab-84ae-1961772256ff", + "id": "753eb8c0-e2b7-4cbc-b0ff-1370ce2e4022", "init_script": "", "login_before_ready": true, "metadata": [], @@ -38,7 +38,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "75b94908-e753-440a-af7d-2a7a97866360", + "token": "77b179b6-0e2d-4307-9ba0-98325fc96e37", "troubleshooting_url": null }, "sensitive_values": { @@ -71,7 +71,7 @@ } ], "env": null, - "id": "b8cce9b4-6a56-43e1-a547-5526a05f2881", + "id": "86f7e422-1798-4de5-8209-69b023808241", "init_script": "", "login_before_ready": true, "metadata": [], @@ -83,7 +83,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "14aa65f3-0e3f-4e86-bb86-5993c06526c1", + "token": "aa4ae02d-4084-4dff-951c-af10f78a98c2", "troubleshooting_url": null }, "sensitive_values": { @@ -102,11 +102,11 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "a46d73a8-3abc-4dab-84ae-1961772256ff", + "agent_id": "753eb8c0-e2b7-4cbc-b0ff-1370ce2e4022", "cron": null, "display_name": "Foobar Script 1", "icon": null, - "id": "13a60062-28d4-459c-8e53-729a45b4a75a", + "id": "eb1eb8f4-3a4a-4040-bd6a-0abce01d6330", "log_path": null, "run_on_start": true, "run_on_stop": false, @@ -127,11 +127,11 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "a46d73a8-3abc-4dab-84ae-1961772256ff", + "agent_id": "753eb8c0-e2b7-4cbc-b0ff-1370ce2e4022", "cron": null, "display_name": "Foobar Script 2", "icon": null, - "id": "c13a1cc1-dfb5-4fab-a8c9-cd65bafef3c0", + "id": "1de43abc-8416-4455-87ca-23fb425b4eeb", "log_path": null, "run_on_start": true, "run_on_stop": false, @@ -152,11 +152,11 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "b8cce9b4-6a56-43e1-a547-5526a05f2881", + "agent_id": "86f7e422-1798-4de5-8209-69b023808241", "cron": null, "display_name": "Foobar Script 3", "icon": null, - "id": "50d359c9-6fdd-4f29-8292-f547b4e22b32", + "id": "ede835f7-4018-464c-807d-7e07af7de9d3", "log_path": null, "run_on_start": true, "run_on_stop": false, @@ -177,7 +177,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "6599800639836820524", + "id": "4207133259459553257", "triggers": null }, "sensitive_values": {}, @@ -193,7 +193,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "7049016876762601534", + "id": "5647997484430231619", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/multiple-agents/multiple-agents.tfplan.json b/provisioner/terraform/testdata/multiple-agents/multiple-agents.tfplan.json index ecb4729f909b2..67da167932aa4 100644 --- a/provisioner/terraform/testdata/multiple-agents/multiple-agents.tfplan.json +++ b/provisioner/terraform/testdata/multiple-agents/multiple-agents.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "planned_values": { "root_module": { "resources": [ @@ -464,7 +464,7 @@ ] } }, - "timestamp": "2024-07-15T17:48:40Z", + "timestamp": "2024-10-28T20:08:03Z", "applyable": true, "complete": true, "errored": false diff --git a/provisioner/terraform/testdata/multiple-agents/multiple-agents.tfstate.json b/provisioner/terraform/testdata/multiple-agents/multiple-agents.tfstate.json index 04bb862e4be54..cd8edc0ae29bc 100644 --- a/provisioner/terraform/testdata/multiple-agents/multiple-agents.tfstate.json +++ b/provisioner/terraform/testdata/multiple-agents/multiple-agents.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -26,7 +26,7 @@ } ], "env": null, - "id": "a777f1dc-7e43-497d-bac5-56ad5a2d7f7e", + "id": "c76ed902-d4cb-4905-9961-4d58dda135f9", "init_script": "", "login_before_ready": true, "metadata": [], @@ -38,7 +38,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "6df4262d-7ce5-41c7-b9ad-84df6d20070e", + "token": "f1aa99ea-570d-49cf-aef9-a4241e3cb023", "troubleshooting_url": null }, "sensitive_values": { @@ -71,7 +71,7 @@ } ], "env": null, - "id": "2f29a1dd-04ad-4360-bada-51a73dc1d352", + "id": "1b037439-4eb3-408e-83da-28dc93645944", "init_script": "", "login_before_ready": true, "metadata": [], @@ -83,7 +83,7 @@ "startup_script": null, "startup_script_behavior": "non-blocking", "startup_script_timeout": 30, - "token": "52549a72-6199-4fab-beb1-27131129f94d", + "token": "20d4e89e-d6de-4eb7-8877-f9186d684aa5", "troubleshooting_url": null }, "sensitive_values": { @@ -116,7 +116,7 @@ } ], "env": null, - "id": "7df8745b-3cd4-4638-a637-f370fc17973d", + "id": "453b5404-8ea4-4197-8664-3638e6a012ca", "init_script": "", "login_before_ready": true, "metadata": [], @@ -128,7 +128,7 @@ "startup_script": null, "startup_script_behavior": "blocking", "startup_script_timeout": 300, - "token": "bf843f72-6965-4000-b1ec-02f158556f5e", + "token": "0355cb42-9da0-4bad-b2aa-74db1df76fef", "troubleshooting_url": "https://coder.com/troubleshoot" }, "sensitive_values": { @@ -161,7 +161,7 @@ } ], "env": null, - "id": "6a756f61-0050-4372-b458-35d38b595a79", + "id": "c0a68e9b-5b29-4d95-b664-5ac71dd633cf", "init_script": "", "login_before_ready": false, "metadata": [], @@ -173,7 +173,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "4ed633b5-eff0-48ac-8089-57ffeff02bdc", + "token": "34b78439-5d6e-431b-b06c-339f97a1e9cf", "troubleshooting_url": null }, "sensitive_values": { @@ -192,7 +192,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "7329660528883337331", + "id": "5109814714394194897", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/multiple-apps/multiple-apps.tfplan.json b/provisioner/terraform/testdata/multiple-apps/multiple-apps.tfplan.json index dd6f3b247d4b9..b156c3b5068b6 100644 --- a/provisioner/terraform/testdata/multiple-apps/multiple-apps.tfplan.json +++ b/provisioner/terraform/testdata/multiple-apps/multiple-apps.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "planned_values": { "root_module": { "resources": [ @@ -446,7 +446,7 @@ ] } ], - "timestamp": "2024-07-15T17:48:50Z", + "timestamp": "2024-10-28T20:08:10Z", "applyable": true, "complete": true, "errored": false diff --git a/provisioner/terraform/testdata/multiple-apps/multiple-apps.tfstate.json b/provisioner/terraform/testdata/multiple-apps/multiple-apps.tfstate.json index b172a050bebe3..d3fc254bf40b0 100644 --- a/provisioner/terraform/testdata/multiple-apps/multiple-apps.tfstate.json +++ b/provisioner/terraform/testdata/multiple-apps/multiple-apps.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -26,7 +26,7 @@ } ], "env": null, - "id": "af75acda-ef6d-4f1f-97e3-31133118b1b9", + "id": "b3ea3cb0-176c-4642-9bf5-cfa72e0782cc", "init_script": "", "login_before_ready": true, "metadata": [], @@ -38,7 +38,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "eb7478f3-26ff-4c6d-b307-7c5cb78c692d", + "token": "30533677-f04a-493b-b6cb-314d9abf7769", "troubleshooting_url": null }, "sensitive_values": { @@ -57,13 +57,13 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "af75acda-ef6d-4f1f-97e3-31133118b1b9", + "agent_id": "b3ea3cb0-176c-4642-9bf5-cfa72e0782cc", "command": null, "display_name": null, "external": false, "healthcheck": [], "icon": null, - "id": "ae194f56-c14c-4d04-a05b-7cd9c4a95dbe", + "id": "537e9069-492b-4721-96dd-cffba275ecd9", "name": null, "order": null, "relative_path": null, @@ -87,7 +87,7 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "af75acda-ef6d-4f1f-97e3-31133118b1b9", + "agent_id": "b3ea3cb0-176c-4642-9bf5-cfa72e0782cc", "command": null, "display_name": null, "external": false, @@ -99,7 +99,7 @@ } ], "icon": null, - "id": "8254828f-8582-497a-8f9d-c2bc2b3495cc", + "id": "3a4c78a0-7ea3-44aa-9ea8-4e08e387b4b6", "name": null, "order": null, "relative_path": null, @@ -125,13 +125,13 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "af75acda-ef6d-4f1f-97e3-31133118b1b9", + "agent_id": "b3ea3cb0-176c-4642-9bf5-cfa72e0782cc", "command": null, "display_name": null, "external": false, "healthcheck": [], "icon": null, - "id": "ec4dea85-191b-4543-b19c-90f298c514fb", + "id": "23555681-0ecb-4962-8e85-367d3a9d0228", "name": null, "order": null, "relative_path": null, @@ -155,7 +155,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "7610101534452317567", + "id": "2905101599123333983", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tfplan.json b/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tfplan.json index e2ccff05866b0..3b7881701038c 100644 --- a/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tfplan.json +++ b/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "planned_values": { "root_module": { "resources": [ @@ -432,7 +432,7 @@ ] } ], - "timestamp": "2024-07-15T17:48:54Z", + "timestamp": "2024-10-28T20:08:13Z", "applyable": true, "complete": true, "errored": false diff --git a/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tfstate.json b/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tfstate.json index 569f348ec6c3a..170630d0e3103 100644 --- a/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tfstate.json +++ b/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -26,7 +26,7 @@ } ], "env": null, - "id": "8a6eab74-3f83-4551-ab7c-6e2fbae32099", + "id": "0cbc2449-fbaa-447a-8487-6c47367af0be", "init_script": "", "login_before_ready": true, "metadata": [ @@ -47,7 +47,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "c90854c9-a5a6-4794-9470-ef05bbc51491", + "token": "b03606cc-1ed3-4187-964d-389cf2ef223f", "troubleshooting_url": null }, "sensitive_values": { @@ -71,7 +71,7 @@ "daily_cost": 29, "hide": true, "icon": "/icon/server.svg", - "id": "77c46f95-fee8-4587-b6db-5da8d7d562a8", + "id": "d6c33b98-addd-4d97-8659-405350bc06c1", "item": [ { "is_null": false, @@ -86,7 +86,7 @@ "value": "" } ], - "resource_id": "5995054412151645025" + "resource_id": "5673227143105805783" }, "sensitive_values": { "item": [ @@ -110,7 +110,7 @@ "daily_cost": 20, "hide": true, "icon": "/icon/server.svg", - "id": "20faad5d-8891-4ec8-8a94-46967240127f", + "id": "76594f08-2261-4114-a61f-e07107a86f89", "item": [ { "is_null": false, @@ -119,7 +119,7 @@ "value": "world" } ], - "resource_id": "5995054412151645025" + "resource_id": "5673227143105805783" }, "sensitive_values": { "item": [ @@ -139,7 +139,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "5995054412151645025", + "id": "5673227143105805783", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/resource-metadata/resource-metadata.tfplan.json b/provisioner/terraform/testdata/resource-metadata/resource-metadata.tfplan.json index 09639c0768fe1..f9c24830c6ef3 100644 --- a/provisioner/terraform/testdata/resource-metadata/resource-metadata.tfplan.json +++ b/provisioner/terraform/testdata/resource-metadata/resource-metadata.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "planned_values": { "root_module": { "resources": [ @@ -384,7 +384,7 @@ ] } ], - "timestamp": "2024-07-15T17:48:52Z", + "timestamp": "2024-10-28T20:08:11Z", "applyable": true, "complete": true, "errored": false diff --git a/provisioner/terraform/testdata/resource-metadata/resource-metadata.tfstate.json b/provisioner/terraform/testdata/resource-metadata/resource-metadata.tfstate.json index 3efef1ac379e8..a41aff216b11c 100644 --- a/provisioner/terraform/testdata/resource-metadata/resource-metadata.tfstate.json +++ b/provisioner/terraform/testdata/resource-metadata/resource-metadata.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -26,7 +26,7 @@ } ], "env": null, - "id": "cbffc18b-d2e5-4826-b202-5b7158917307", + "id": "3bcbc547-b434-4dbd-b5ed-551edfba1b5c", "init_script": "", "login_before_ready": true, "metadata": [ @@ -47,7 +47,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "3ccecdc6-6947-44f8-bede-f3c8ee8f7afe", + "token": "2d25fcc3-a355-4e92-98c6-ab780894ffee", "troubleshooting_url": null }, "sensitive_values": { @@ -71,7 +71,7 @@ "daily_cost": 29, "hide": true, "icon": "/icon/server.svg", - "id": "bee16745-291f-4209-937f-e8198beefbb2", + "id": "d9ce721c-dff3-44fd-92d1-155f37c84a56", "item": [ { "is_null": false, @@ -98,7 +98,7 @@ "value": "squirrel" } ], - "resource_id": "23022633153502273" + "resource_id": "4099397325680267994" }, "sensitive_values": { "item": [ @@ -121,7 +121,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "23022633153502273", + "id": "4099397325680267994", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tfplan.json b/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tfplan.json index 5a9754c6eb8ef..72120dfaabeec 100644 --- a/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tfplan.json +++ b/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "planned_values": { "root_module": { "resources": [ @@ -119,7 +119,7 @@ ], "prior_state": { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -136,7 +136,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "2505d55b-a9f4-4aaa-90fd-b4f36079e2fd", + "id": "e8805d7c-1636-4416-9520-b83234d68ddc", "mutable": false, "name": "Example", "option": null, @@ -163,7 +163,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "ad73ddbc-2c11-45a1-913c-b73cdd3b9b0f", + "id": "df43829a-49ce-4911-97ef-2fca78456c9f", "mutable": false, "name": "Sample", "option": null, @@ -269,7 +269,7 @@ ] } }, - "timestamp": "2024-07-15T17:48:58Z", + "timestamp": "2024-10-28T20:08:17Z", "applyable": true, "complete": true, "errored": false diff --git a/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tfstate.json b/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tfstate.json index b3fed19aaa61c..1d675d685a37c 100644 --- a/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tfstate.json +++ b/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -17,7 +17,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "84da03d3-81af-43bd-bdc0-6fc2f34e3f4b", + "id": "81ada233-3a30-49d3-a56f-aca92f19c411", "mutable": false, "name": "Example", "option": null, @@ -44,7 +44,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "eeb97e5f-1186-422f-b6db-95b3d4257636", + "id": "4dc1049f-0d54-408a-a412-95629ae5cd84", "mutable": false, "name": "Sample", "option": null, @@ -80,7 +80,7 @@ } ], "env": null, - "id": "ba82266f-8b63-4a31-9158-94b5ca51ceeb", + "id": "86cc4d6e-23b3-4632-9bc9-d3a321e8b906", "init_script": "", "login_before_ready": true, "metadata": [], @@ -92,7 +92,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "e8177f3a-5ce1-41ea-b709-cc8c3624c298", + "token": "0c3e7639-bafc-4e62-8e38-cb4e1b44e3f3", "troubleshooting_url": null }, "sensitive_values": { @@ -111,7 +111,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "8146132740199712825", + "id": "2501594036325466407", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tfplan.json b/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tfplan.json index fb308658d78f1..66153605ee4a0 100644 --- a/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tfplan.json +++ b/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "planned_values": { "root_module": { "resources": [ @@ -119,7 +119,7 @@ ], "prior_state": { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -136,7 +136,7 @@ "display_name": null, "ephemeral": true, "icon": null, - "id": "0c018669-159f-4444-a3ca-3f80c9bb3ce3", + "id": "df8ad066-047d-434d-baa3-e19517ee7395", "mutable": true, "name": "number_example", "option": null, @@ -163,7 +163,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "78ced97f-753b-45e1-b176-5f7f37956363", + "id": "7d9658aa-ff69-477a-9063-e9fd49fd9a9b", "mutable": false, "name": "number_example_max", "option": null, @@ -202,7 +202,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "df27d2cd-6feb-4106-bc0d-dacb33da8547", + "id": "bd6fcaac-db7f-4c4d-a664-fe7f47fad28a", "mutable": false, "name": "number_example_max_zero", "option": null, @@ -241,7 +241,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "35584863-347b-4dc0-8618-b2f7f0e42bbf", + "id": "8d42942d-5a10-43c9-a31d-d3fe9a7814e8", "mutable": false, "name": "number_example_min", "option": null, @@ -280,7 +280,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "cafe4351-a64b-481d-9a0d-e2c9cf057b25", + "id": "695301d0-8325-4685-824d-1ca9591689e3", "mutable": false, "name": "number_example_min_max", "option": null, @@ -319,7 +319,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "41659f9c-8934-4763-8285-9ec401f5ef6b", + "id": "cd921934-d1b1-4370-8a73-2d43658ea877", "mutable": false, "name": "number_example_min_zero", "option": null, @@ -551,7 +551,7 @@ ] } }, - "timestamp": "2024-07-15T17:49:00Z", + "timestamp": "2024-10-28T20:08:18Z", "applyable": true, "complete": true, "errored": false diff --git a/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tfstate.json b/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tfstate.json index 3e18e55b2a735..35b981c3a9b54 100644 --- a/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tfstate.json +++ b/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -17,7 +17,7 @@ "display_name": null, "ephemeral": true, "icon": null, - "id": "d82331f3-56ce-43f5-a6f6-d818c916ac7a", + "id": "e09e9110-2f11-4a45-bc9f-dc7a12834ef0", "mutable": true, "name": "number_example", "option": null, @@ -44,7 +44,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "6ee08f4e-4200-4c4c-b606-7e7d4a6a5fdb", + "id": "7ba6324d-d8fd-43b8-91d2-d970a424db8b", "mutable": false, "name": "number_example_max", "option": null, @@ -83,7 +83,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "f879ade0-27ba-45c8-84dd-d2393a7cdad0", + "id": "64e12007-8479-43bf-956b-86fe7ae73066", "mutable": false, "name": "number_example_max_zero", "option": null, @@ -122,7 +122,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "8d057664-79e1-4f0e-a24e-72b2ac5e3306", + "id": "32681b2b-682f-4a5f-9aa6-c05be9d41a89", "mutable": false, "name": "number_example_min", "option": null, @@ -161,7 +161,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "0249146a-ba5e-4d59-bbd2-48d1027ebb42", + "id": "03b67b89-0d35-449d-8997-f5ce4b7c1518", "mutable": false, "name": "number_example_min_max", "option": null, @@ -200,7 +200,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "edeb33bb-b8d4-4770-9c41-e0e94a4886af", + "id": "2201fc53-38c6-4a68-b3b9-4f6ef3390962", "mutable": false, "name": "number_example_min_zero", "option": null, @@ -248,7 +248,7 @@ } ], "env": null, - "id": "7c672b0d-41f4-45ae-9596-9be1455505a9", + "id": "060ffd05-39a9-4fa3-81a3-7d9d8e655bf8", "init_script": "", "login_before_ready": true, "metadata": [], @@ -260,7 +260,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "4938f98a-bc70-4dae-8825-27d41ba34842", + "token": "58ed35b2-6124-4183-a493-40cb0174f4d2", "troubleshooting_url": null }, "sensitive_values": { @@ -279,7 +279,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "8043802126847197223", + "id": "4610812354433374355", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/rich-parameters/rich-parameters.tfplan.json b/provisioner/terraform/testdata/rich-parameters/rich-parameters.tfplan.json index a37148f2b4d24..1ec2927a40ad1 100644 --- a/provisioner/terraform/testdata/rich-parameters/rich-parameters.tfplan.json +++ b/provisioner/terraform/testdata/rich-parameters/rich-parameters.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "planned_values": { "root_module": { "resources": [ @@ -119,7 +119,7 @@ ], "prior_state": { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -136,7 +136,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "b0837593-03d9-4039-87d3-9170a6513751", + "id": "cbec5bff-b81a-4815-99c0-40c0629779fb", "mutable": false, "name": "Example", "option": [ @@ -180,7 +180,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "aff9e428-f431-4ca1-8c2f-3c1adf662ed7", + "id": "dd1c36b7-a961-4eb2-9687-c32b5ee54fbc", "mutable": false, "name": "number_example", "option": null, @@ -207,7 +207,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "15371ea5-9ffc-4672-8c7b-338eed974655", + "id": "f1bcac54-a58c-44b2-94f5-243a0b1492d3", "mutable": false, "name": "number_example_max_zero", "option": null, @@ -246,7 +246,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "2e77000c-d96f-4110-ad55-3a733fef768c", + "id": "79c76ac1-8e71-4872-9107-d7a9529f7dce", "mutable": false, "name": "number_example_min_max", "option": null, @@ -285,7 +285,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "2c9f5877-7df8-42a8-9d34-20d7a74832e0", + "id": "da7a8aff-ffe3-402f-bf7e-b369ae04b041", "mutable": false, "name": "number_example_min_zero", "option": null, @@ -324,7 +324,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "d9eb4625-889c-4eb7-87d4-80644c5ee57a", + "id": "5fe2dad0-e11f-46f0-80ae-c0c3a29cd1fd", "mutable": false, "name": "Sample", "option": null, @@ -355,7 +355,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "7549ee27-b944-46e8-89c7-66ce22285efc", + "id": "920f98a1-3a6f-4602-8c87-ebbbef0310c5", "mutable": true, "name": "First parameter from module", "option": null, @@ -382,7 +382,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "c5fd9f8a-f83f-450a-b93a-4f4267be580a", + "id": "f438d9ad-6c3e-44f3-95cd-1d423a9b09e5", "mutable": true, "name": "Second parameter from module", "option": null, @@ -414,7 +414,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "1b819f45-1451-45d8-bdf6-80c067be383b", + "id": "b2c53701-be53-4591-aacf-1c83f75bcf15", "mutable": true, "name": "First parameter from child module", "option": null, @@ -441,7 +441,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "103f609f-e7d4-4060-b9dc-cc59afbcc2ad", + "id": "038b18d4-d430-4703-886a-b7e10e01f856", "mutable": true, "name": "Second parameter from child module", "option": null, @@ -794,7 +794,7 @@ } } }, - "timestamp": "2024-07-15T17:48:56Z", + "timestamp": "2024-10-28T20:08:15Z", "applyable": true, "complete": true, "errored": false diff --git a/provisioner/terraform/testdata/rich-parameters/rich-parameters.tfstate.json b/provisioner/terraform/testdata/rich-parameters/rich-parameters.tfstate.json index f3011a94e387c..1bfc1835dfcaf 100644 --- a/provisioner/terraform/testdata/rich-parameters/rich-parameters.tfstate.json +++ b/provisioner/terraform/testdata/rich-parameters/rich-parameters.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -17,7 +17,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "6de72459-12d0-493b-a6de-849e08a80231", + "id": "8586d419-7e61-4e67-b8df-d98d8ac7ffd3", "mutable": false, "name": "Example", "option": [ @@ -61,7 +61,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "4c531563-c935-41ad-8cca-f417c16e5278", + "id": "0cc54450-13a6-486c-b542-6e23a9f3596b", "mutable": false, "name": "number_example", "option": null, @@ -88,7 +88,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "0c77e023-ebfd-4868-a25b-2f6b131c52a3", + "id": "0c0b913a-0bde-4b9e-8a70-06d9b6d38a26", "mutable": false, "name": "number_example_max_zero", "option": null, @@ -127,7 +127,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "d5415c63-b007-4409-8715-8750fcd014c5", + "id": "37fd5372-2741-49dd-bf01-6ba29a24c9dd", "mutable": false, "name": "number_example_min_max", "option": null, @@ -166,7 +166,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "27846e1e-1ea4-463d-a0f1-2f06bd2767ff", + "id": "c0fd84ff-117f-442a-95f7-e8368ba7ce1d", "mutable": false, "name": "number_example_min_zero", "option": null, @@ -205,7 +205,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "e0d43ce9-3377-48ab-8917-960a39fc78aa", + "id": "ab067ffc-99de-4705-97fe-16c713d2d115", "mutable": false, "name": "Sample", "option": null, @@ -241,7 +241,7 @@ } ], "env": null, - "id": "a84d968c-98b8-49e4-878f-8afbfcfcd058", + "id": "7daab302-d00e-48d4-878c-47afbe3a13bc", "init_script": "", "login_before_ready": true, "metadata": [], @@ -253,7 +253,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "494f0e2b-0727-4833-b824-f3c5ae5ec701", + "token": "e98c452d-cbe9-4ae1-8382-a986089dccb4", "troubleshooting_url": null }, "sensitive_values": { @@ -272,7 +272,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "6676147453513335498", + "id": "2355126481625628137", "triggers": null }, "sensitive_values": {}, @@ -297,7 +297,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "28bbdb1b-bbfd-448e-a90d-667372384184", + "id": "0978cc7c-f787-406c-a050-9272bbb52085", "mutable": true, "name": "First parameter from module", "option": null, @@ -324,7 +324,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "edaafb64-16d1-4abc-9016-aa30d7ee3ed1", + "id": "cd01d7da-9f56-460d-b163-e88a0a9a5f67", "mutable": true, "name": "Second parameter from module", "option": null, @@ -356,7 +356,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "85b4aa9c-206a-4708-b12b-f80e8905d178", + "id": "528e845a-843b-48b3-a421-a22340726d5a", "mutable": true, "name": "First parameter from child module", "option": null, @@ -383,7 +383,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "913d7ffb-d406-4a2e-9368-106e0af12d34", + "id": "f486efbb-2fc6-4091-9eca-0088ac6cd3cc", "mutable": true, "name": "Second parameter from child module", "option": null, diff --git a/provisioner/terraform/testdata/version.txt b/provisioner/terraform/testdata/version.txt index 8fdcf3869464a..66beabb5795e7 100644 --- a/provisioner/terraform/testdata/version.txt +++ b/provisioner/terraform/testdata/version.txt @@ -1 +1 @@ -1.9.2 +1.9.8 diff --git a/scripts/Dockerfile.base b/scripts/Dockerfile.base index 1ad37edf20360..33c9f551346a6 100644 --- a/scripts/Dockerfile.base +++ b/scripts/Dockerfile.base @@ -26,7 +26,7 @@ RUN apk add --no-cache \ # Terraform was disabled in the edge repo due to a build issue. # https://gitlab.alpinelinux.org/alpine/aports/-/commit/f3e263d94cfac02d594bef83790c280e045eba35 # Using wget for now. Note that busybox unzip doesn't support streaming. -RUN ARCH="$(arch)"; if [ "${ARCH}" == "x86_64" ]; then ARCH="amd64"; elif [ "${ARCH}" == "aarch64" ]; then ARCH="arm64"; fi; wget -O /tmp/terraform.zip "https://releases.hashicorp.com/terraform/1.9.2/terraform_1.9.2_linux_${ARCH}.zip" && \ +RUN ARCH="$(arch)"; if [ "${ARCH}" == "x86_64" ]; then ARCH="amd64"; elif [ "${ARCH}" == "aarch64" ]; then ARCH="arm64"; fi; wget -O /tmp/terraform.zip "https://releases.hashicorp.com/terraform/1.9.8/terraform_1.9.8_linux_${ARCH}.zip" && \ busybox unzip /tmp/terraform.zip -d /usr/local/bin && \ rm -f /tmp/terraform.zip && \ chmod +x /usr/local/bin/terraform && \ diff --git a/scripts/dbgen/main.go b/scripts/dbgen/main.go index 54b104d04f718..4ec08920e9741 100644 --- a/scripts/dbgen/main.go +++ b/scripts/dbgen/main.go @@ -60,7 +60,7 @@ func run() error { return xerrors.Errorf("stub dbmem: %w", err) } - err = orderAndStubDatabaseFunctions(filepath.Join(databasePath, "dbmetrics", "dbmetrics.go"), "m", "metricsStore", func(params stubParams) string { + err = orderAndStubDatabaseFunctions(filepath.Join(databasePath, "dbmetrics", "querymetrics.go"), "m", "queryMetricsStore", func(params stubParams) string { return fmt.Sprintf(` start := time.Now() %s := m.s.%s(%s) diff --git a/scripts/develop.sh b/scripts/develop.sh index bdaf81c7536e5..7dfad72d2e9f6 100755 --- a/scripts/develop.sh +++ b/scripts/develop.sh @@ -164,7 +164,7 @@ fatal() { if [ ! -f "${PROJECT_ROOT}/.coderv2/developsh-did-first-setup" ]; then # Try to create the initial admin user. - if "${CODER_DEV_SHIM}" login http://127.0.0.1:3000 --first-user-username=admin --first-user-email=admin@coder.com --first-user-password="${password}" --first-user-full-name="Admin User" --first-user-trial=true; then + if "${CODER_DEV_SHIM}" login http://127.0.0.1:3000 --first-user-username=admin --first-user-email=admin@coder.com --first-user-password="${password}" --first-user-full-name="Admin User" --first-user-trial=false; then # Only create this file if an admin user was successfully # created, otherwise we won't retry on a later attempt. touch "${PROJECT_ROOT}/.coderv2/developsh-did-first-setup" diff --git a/site/e2e/global.setup.ts b/site/e2e/global.setup.ts index 6eafd2886de37..f39a2d475804e 100644 --- a/site/e2e/global.setup.ts +++ b/site/e2e/global.setup.ts @@ -35,6 +35,7 @@ test("setup deployment", async ({ page }) => { expect(constants.license.split(".").length).toBe(3); // otherwise it's invalid await page.goto("/deployment/licenses", { waitUntil: "domcontentloaded" }); + await expect(page).toHaveTitle("License Settings - Coder"); await page.getByText("Add a license").click(); await page.getByRole("textbox").fill(constants.license); diff --git a/site/e2e/helpers.ts b/site/e2e/helpers.ts index fd436fa5dad7f..c5ac7f1abde65 100644 --- a/site/e2e/helpers.ts +++ b/site/e2e/helpers.ts @@ -425,7 +425,9 @@ export const startAgentWithCommand = async ( ); }); - await page.getByTestId("agent-status-ready").waitFor({ state: "visible" }); + await page + .getByTestId("agent-status-ready") + .waitFor({ state: "visible", timeout: 45_000 }); return cp; }; @@ -928,7 +930,7 @@ export async function openTerminalWindow( ): Promise { // Wait for the web terminal to open in a new tab const pagePromise = context.waitForEvent("page"); - await page.getByTestId("terminal").click(); + await page.getByTestId("terminal").click({ timeout: 60_000 }); const terminal = await pagePromise; await terminal.waitForLoadState("domcontentloaded"); diff --git a/site/e2e/playwright.config.ts b/site/e2e/playwright.config.ts index 7042ebfcf5bb6..ea55bf398e7df 100644 --- a/site/e2e/playwright.config.ts +++ b/site/e2e/playwright.config.ts @@ -65,16 +65,12 @@ export default defineConfig({ testMatch: /.*\.spec\.ts/, dependencies: ["testsSetup"], use: { storageState }, - timeout: 50_000, + timeout: 30_000, }, ], reporter: [["./reporter.ts"]], use: { - // It'd be very nice to add this, but there are some tests that need - // tweaking to make it work consistently (notably, ones that wait for agent - // stats on the workspace page. The default is like 50 seconds, which is - // way too long and makes it painful to wait for test runs in CI. - // actionTimeout: 5000, // 5 seconds + actionTimeout: 5000, baseURL: `http://localhost:${coderPort}`, video: "retain-on-failure", ...(wsEndpoint diff --git a/site/e2e/tests/app.spec.ts b/site/e2e/tests/app.spec.ts index bf127ce9f21b7..9682fcb5751dc 100644 --- a/site/e2e/tests/app.spec.ts +++ b/site/e2e/tests/app.spec.ts @@ -13,6 +13,8 @@ import { beforeCoderTest } from "../hooks"; test.beforeEach(({ page }) => beforeCoderTest(page)); test("app", async ({ context, page }) => { + test.setTimeout(75_000); + const appContent = "Hello World"; const token = randomUUID(); const srv = http @@ -56,7 +58,7 @@ test("app", async ({ context, page }) => { // Wait for the web terminal to open in a new tab const pagePromise = context.waitForEvent("page"); - await page.getByText(appName).click(); + await page.getByText(appName).click({ timeout: 60_000 }); const app = await pagePromise; await app.waitForLoadState("domcontentloaded"); await app.getByText(appContent).isVisible(); diff --git a/site/e2e/tests/outdatedAgent.spec.ts b/site/e2e/tests/outdatedAgent.spec.ts index a4e42e62ec725..422074d92e341 100644 --- a/site/e2e/tests/outdatedAgent.spec.ts +++ b/site/e2e/tests/outdatedAgent.spec.ts @@ -17,7 +17,7 @@ const agentVersion = "v2.12.1"; test.beforeEach(({ page }) => beforeCoderTest(page)); test(`ssh with agent ${agentVersion}`, async ({ page }) => { - test.setTimeout(40_000); // This is a slow test, 20s may not be enough on Mac. + test.setTimeout(60_000); const token = randomUUID(); const template = await createTemplate(page, { diff --git a/site/e2e/tests/outdatedCLI.spec.ts b/site/e2e/tests/outdatedCLI.spec.ts index 22301483e0977..3470367c63546 100644 --- a/site/e2e/tests/outdatedCLI.spec.ts +++ b/site/e2e/tests/outdatedCLI.spec.ts @@ -17,6 +17,8 @@ const clientVersion = "v2.8.0"; test.beforeEach(({ page }) => beforeCoderTest(page)); test(`ssh with client ${clientVersion}`, async ({ page }) => { + test.setTimeout(60_000); + const token = randomUUID(); const template = await createTemplate(page, { apply: [ diff --git a/site/e2e/tests/webTerminal.spec.ts b/site/e2e/tests/webTerminal.spec.ts index 6db4363a4e360..fc6baec7daa67 100644 --- a/site/e2e/tests/webTerminal.spec.ts +++ b/site/e2e/tests/webTerminal.spec.ts @@ -12,6 +12,8 @@ import { beforeCoderTest } from "../hooks"; test.beforeEach(({ page }) => beforeCoderTest(page)); test("web terminal", async ({ context, page }) => { + test.setTimeout(75_000); + const token = randomUUID(); const template = await createTemplate(page, { apply: [ diff --git a/site/jest.setup.ts b/site/jest.setup.ts index 7a06ebba2592f..7d4b6f0772bc4 100644 --- a/site/jest.setup.ts +++ b/site/jest.setup.ts @@ -1,7 +1,7 @@ import "@testing-library/jest-dom"; import "jest-location-mock"; import { cleanup } from "@testing-library/react"; -import crypto from "crypto"; +import crypto from "node:crypto"; import { useMemo } from "react"; import type { Region } from "api/typesGenerated"; import type { ProxyLatencyReport } from "contexts/useProxyLatency"; @@ -48,9 +48,7 @@ global.ResizeObserver = require("resize-observer-polyfill"); // Polyfill the getRandomValues that is used on utils/random.ts Object.defineProperty(global.self, "crypto", { value: { - getRandomValues: function (buffer: Buffer) { - return crypto.randomFillSync(buffer); - }, + getRandomValues: crypto.randomFillSync, }, }); @@ -72,5 +70,5 @@ afterEach(() => { // Clean up after the tests are finished. afterAll(() => server.close()); -// This is needed because we are compiling under `--isolatedModules` +// biome-ignore lint/complexity/noUselessEmptyExport: This is needed because we are compiling under `--isolatedModules` export {}; diff --git a/site/src/api/typesGenerated.ts b/site/src/api/typesGenerated.ts index e55167ef03f88..d687fb68ec61f 100644 --- a/site/src/api/typesGenerated.ts +++ b/site/src/api/typesGenerated.ts @@ -2110,8 +2110,8 @@ export type BuildReason = "autostart" | "autostop" | "initiator" export const BuildReasons: BuildReason[] = ["autostart", "autostop", "initiator"] // From codersdk/deployment.go -export type CryptoKeyFeature = "oidc_convert" | "tailnet_resume" | "workspace_apps" -export const CryptoKeyFeatures: CryptoKeyFeature[] = ["oidc_convert", "tailnet_resume", "workspace_apps"] +export type CryptoKeyFeature = "oidc_convert" | "tailnet_resume" | "workspace_apps_api_key" | "workspace_apps_token" +export const CryptoKeyFeatures: CryptoKeyFeature[] = ["oidc_convert", "tailnet_resume", "workspace_apps_api_key", "workspace_apps_token"] // From codersdk/workspaceagents.go export type DisplayApp = "port_forwarding_helper" | "ssh_helper" | "vscode" | "vscode_insiders" | "web_terminal" diff --git a/site/src/contexts/auth/permissions.tsx b/site/src/contexts/auth/permissions.tsx index 0c89d81686d2f..d2de7864874f0 100644 --- a/site/src/contexts/auth/permissions.tsx +++ b/site/src/contexts/auth/permissions.tsx @@ -1,3 +1,5 @@ +import type { AuthorizationCheck } from "api/typesGenerated"; + export const checks = { viewAllUsers: "viewAllUsers", updateUsers: "updateUsers", @@ -11,13 +13,20 @@ export const checks = { viewUpdateCheck: "viewUpdateCheck", viewExternalAuthConfig: "viewExternalAuthConfig", viewDeploymentStats: "viewDeploymentStats", + readWorkspaceProxies: "readWorkspaceProxies", editWorkspaceProxies: "editWorkspaceProxies", createOrganization: "createOrganization", editAnyOrganization: "editAnyOrganization", viewAnyGroup: "viewAnyGroup", createGroup: "createGroup", viewAllLicenses: "viewAllLicenses", -} as const; + viewNotificationTemplate: "viewNotificationTemplate", +} as const satisfies Record; + +// Type expression seems a little redundant (`keyof typeof checks` has the same +// result), just because each key-value pair is currently symmetrical; this may +// change down the line +type PermissionValue = (typeof checks)[keyof typeof checks]; export const permissionsToCheck = { [checks.viewAllUsers]: { @@ -94,6 +103,12 @@ export const permissionsToCheck = { }, action: "read", }, + [checks.readWorkspaceProxies]: { + object: { + resource_type: "workspace_proxy", + }, + action: "read", + }, [checks.editWorkspaceProxies]: { object: { resource_type: "workspace_proxy", @@ -116,7 +131,6 @@ export const permissionsToCheck = { [checks.viewAnyGroup]: { object: { resource_type: "group", - org_id: "any", }, action: "read", }, @@ -132,6 +146,12 @@ export const permissionsToCheck = { }, action: "read", }, -} as const; + [checks.viewNotificationTemplate]: { + object: { + resource_type: "notification_template", + }, + action: "read", + }, +} as const satisfies Record; -export type Permissions = Record; +export type Permissions = Record; diff --git a/site/src/modules/management/DeploymentSettingsProvider.tsx b/site/src/modules/management/DeploymentSettingsProvider.tsx new file mode 100644 index 0000000000000..c9f6cd5f4a8ce --- /dev/null +++ b/site/src/modules/management/DeploymentSettingsProvider.tsx @@ -0,0 +1,64 @@ +import type { DeploymentConfig } from "api/api"; +import { deploymentConfig } from "api/queries/deployment"; +import { ErrorAlert } from "components/Alert/ErrorAlert"; +import { Loader } from "components/Loader/Loader"; +import { useAuthenticated } from "contexts/auth/RequireAuth"; +import { RequirePermission } from "contexts/auth/RequirePermission"; +import { type FC, createContext, useContext } from "react"; +import { useQuery } from "react-query"; +import { Outlet } from "react-router-dom"; + +export const DeploymentSettingsContext = createContext< + DeploymentSettingsValue | undefined +>(undefined); + +type DeploymentSettingsValue = Readonly<{ + deploymentConfig: DeploymentConfig; +}>; + +export const useDeploymentSettings = (): DeploymentSettingsValue => { + const context = useContext(DeploymentSettingsContext); + if (!context) { + throw new Error( + `${useDeploymentSettings.name} should be used inside of ${DeploymentSettingsProvider.name}`, + ); + } + + return context; +}; + +const DeploymentSettingsProvider: FC = () => { + const { permissions } = useAuthenticated(); + const deploymentConfigQuery = useQuery(deploymentConfig()); + + // The deployment settings page also contains users, audit logs, groups and + // organizations, so this page must be visible if you can see any of these. + const canViewDeploymentSettingsPage = + permissions.viewDeploymentValues || + permissions.viewAllUsers || + permissions.editAnyOrganization || + permissions.viewAnyAuditLog; + + // Not a huge problem to unload the content in the event of an error, + // because the sidebar rendering isn't tied to this. Even if the user hits + // a 403 error, they'll still have navigation options + if (deploymentConfigQuery.error) { + return ; + } + + if (!deploymentConfigQuery.data) { + return ; + } + + return ( + + + + + + ); +}; + +export default DeploymentSettingsProvider; diff --git a/site/src/modules/management/ManagementSettingsLayout.tsx b/site/src/modules/management/ManagementSettingsLayout.tsx index b9fcbc0936e4b..0cb313f0e53b9 100644 --- a/site/src/modules/management/ManagementSettingsLayout.tsx +++ b/site/src/modules/management/ManagementSettingsLayout.tsx @@ -1,7 +1,4 @@ -import type { DeploymentConfig } from "api/api"; -import { deploymentConfig } from "api/queries/deployment"; import type { AuthorizationResponse, Organization } from "api/typesGenerated"; -import { ErrorAlert } from "components/Alert/ErrorAlert"; import { Loader } from "components/Loader/Loader"; import { Margins } from "components/Margins/Margins"; import { Stack } from "components/Stack/Stack"; @@ -9,7 +6,6 @@ import { useAuthenticated } from "contexts/auth/RequireAuth"; import { RequirePermission } from "contexts/auth/RequirePermission"; import { useDashboard } from "modules/dashboard/useDashboard"; import { type FC, Suspense, createContext, useContext } from "react"; -import { useQuery } from "react-query"; import { Outlet, useParams } from "react-router-dom"; import { Sidebar } from "./Sidebar"; @@ -18,7 +14,6 @@ export const ManagementSettingsContext = createContext< >(undefined); type ManagementSettingsValue = Readonly<{ - deploymentValues: DeploymentConfig; organizations: readonly Organization[]; organization?: Organization; }>; @@ -48,15 +43,8 @@ export const canEditOrganization = ( ); }; -/** - * A multi-org capable settings page layout. - * - * If multi-org is not enabled or licensed, this is the wrong layout to use. - * See DeploySettingsLayoutInner instead. - */ -export const ManagementSettingsLayout: FC = () => { +const ManagementSettingsLayout: FC = () => { const { permissions } = useAuthenticated(); - const deploymentConfigQuery = useQuery(deploymentConfig()); const { organizations } = useDashboard(); const { organization: orgName } = useParams() as { organization?: string; @@ -70,14 +58,6 @@ export const ManagementSettingsLayout: FC = () => { permissions.editAnyOrganization || permissions.viewAnyAuditLog; - if (deploymentConfigQuery.error) { - return ; - } - - if (!deploymentConfigQuery.data) { - return ; - } - const organization = organizations && orgName ? organizations.find((org) => org.name === orgName) @@ -87,7 +67,6 @@ export const ManagementSettingsLayout: FC = () => { { -
+
}> @@ -106,3 +85,5 @@ export const ManagementSettingsLayout: FC = () => { ); }; + +export default ManagementSettingsLayout; diff --git a/site/src/modules/management/SidebarView.stories.tsx b/site/src/modules/management/SidebarView.stories.tsx index 2ddcf7750bc8d..6ffe4480261c9 100644 --- a/site/src/modules/management/SidebarView.stories.tsx +++ b/site/src/modules/management/SidebarView.stories.tsx @@ -1,5 +1,6 @@ import type { Meta, StoryObj } from "@storybook/react"; import { + MockNoPermissions, MockOrganization, MockOrganization2, MockPermissions, @@ -96,7 +97,7 @@ export const NoDeploymentValues: Story = { export const NoPermissions: Story = { args: { - permissions: {}, + permissions: MockNoPermissions, }, }; diff --git a/site/src/modules/management/SidebarView.tsx b/site/src/modules/management/SidebarView.tsx index b4099a4dd7815..e6c99769e529f 100644 --- a/site/src/modules/management/SidebarView.tsx +++ b/site/src/modules/management/SidebarView.tsx @@ -2,19 +2,15 @@ import { cx } from "@emotion/css"; import type { Interpolation, Theme } from "@emotion/react"; import AddIcon from "@mui/icons-material/Add"; import SettingsIcon from "@mui/icons-material/Settings"; -import type { - AuthorizationResponse, - Experiments, - Organization, -} from "api/typesGenerated"; +import type { AuthorizationResponse, Organization } from "api/typesGenerated"; import { FeatureStageBadge } from "components/FeatureStageBadge/FeatureStageBadge"; import { Loader } from "components/Loader/Loader"; import { Sidebar as BaseSidebar } from "components/Sidebar/Sidebar"; import { Stack } from "components/Stack/Stack"; import { UserAvatar } from "components/UserAvatar/UserAvatar"; +import type { Permissions } from "contexts/auth/permissions"; import { type ClassName, useClassName } from "hooks/useClassName"; import { useDashboard } from "modules/dashboard/useDashboard"; -import { linkToUsers } from "modules/navigation"; import type { FC, ReactNode } from "react"; import { Link, NavLink } from "react-router-dom"; @@ -30,7 +26,7 @@ interface SidebarProps { /** Organizations and their permissions or undefined if still fetching. */ organizations: OrganizationWithPermissions[] | undefined; /** Site-wide permissions. */ - permissions: AuthorizationResponse; + permissions: Permissions; } /** @@ -72,7 +68,7 @@ interface DeploymentSettingsNavigationProps { /** Whether a deployment setting page is being viewed. */ active: boolean; /** Site-wide permissions. */ - permissions: AuthorizationResponse; + permissions: Permissions; } /** @@ -130,10 +126,11 @@ const DeploymentSettingsNavigation: FC = ({ {permissions.viewDeploymentValues && ( Network )} - {/* All users can view workspace regions. */} - - Workspace Proxies - + {permissions.readWorkspaceProxies && ( + + Workspace Proxies + + )} {permissions.viewDeploymentValues && ( Security )} @@ -145,12 +142,14 @@ const DeploymentSettingsNavigation: FC = ({ {permissions.viewAllUsers && ( Users )} - - - Notifications - - - + {permissions.viewNotificationTemplate && ( + + + Notifications + + + + )} )}
@@ -167,7 +166,7 @@ interface OrganizationsSettingsNavigationProps { /** Organizations and their permissions or undefined if still fetching. */ organizations: OrganizationWithPermissions[] | undefined; /** Site-wide permissions. */ - permissions: AuthorizationResponse; + permissions: Permissions; } /** @@ -241,8 +240,6 @@ interface OrganizationSettingsNavigationProps { const OrganizationSettingsNavigation: FC< OrganizationSettingsNavigationProps > = ({ active, organization }) => { - const { experiments } = useDashboard(); - return ( <> ({ cursor: "pointer", // We need to make the bar width at least 34px to allow the "..." icons to be displayed. // The calculation is border * 1 + side paddings * 2 + icon width (which is 18px) @@ -99,7 +99,7 @@ const styles = { "&:focus, &:hover, &:active": { outline: "none", - borderColor: "#38BDF8", + borderColor: theme.roles.active.outline, }, - }, + }), } satisfies Record>; diff --git a/site/src/modules/workspaces/WorkspaceTiming/Chart/Blocks.tsx b/site/src/modules/workspaces/WorkspaceTiming/Chart/Blocks.tsx index 752e53c5b5c4a..00660c39f495c 100644 --- a/site/src/modules/workspaces/WorkspaceTiming/Chart/Blocks.tsx +++ b/site/src/modules/workspaces/WorkspaceTiming/Chart/Blocks.tsx @@ -52,16 +52,16 @@ const styles = { gap: spaceBetweenBlocks, alignItems: "center", }, - block: { + block: (theme) => ({ borderRadius: 4, height: 18, - backgroundColor: "#082F49", - border: "1px solid #38BDF8", + backgroundColor: theme.roles.active.background, + border: `1px solid ${theme.roles.active.outline}`, flexShrink: 0, flex: 1, - }, - more: { - color: "#38BDF8", + }), + more: (theme) => ({ + color: theme.roles.active.outline, lineHeight: 0, flexShrink: 0, flex: 1, @@ -69,5 +69,5 @@ const styles = { "& svg": { fontSize: moreIconSize, }, - }, + }), } satisfies Record>; diff --git a/site/src/modules/workspaces/WorkspaceTiming/ResourcesChart.tsx b/site/src/modules/workspaces/WorkspaceTiming/ResourcesChart.tsx index b1c69b6d1baf7..3f1f7d761e748 100644 --- a/site/src/modules/workspaces/WorkspaceTiming/ResourcesChart.tsx +++ b/site/src/modules/workspaces/WorkspaceTiming/ResourcesChart.tsx @@ -32,33 +32,6 @@ import { } from "./Chart/utils"; import type { StageCategory } from "./StagesChart"; -const legendsByAction: Record = { - "state refresh": { - label: "state refresh", - }, - create: { - label: "create", - colors: { - fill: "#022C22", - stroke: "#BBF7D0", - }, - }, - delete: { - label: "delete", - colors: { - fill: "#422006", - stroke: "#FDBA74", - }, - }, - read: { - label: "read", - colors: { - fill: "#082F49", - stroke: "#38BDF8", - }, - }, -}; - type ResourceTiming = { name: string; source: string; @@ -86,6 +59,8 @@ export const ResourcesChart: FC = ({ const visibleTimings = timings.filter( (t) => !isCoderResource(t.name) && t.name.includes(filter), ); + const theme = useTheme(); + const legendsByAction = getLegendsByAction(theme); const visibleLegends = [...new Set(visibleTimings.map((t) => t.action))].map( (a) => legendsByAction[a], ); @@ -168,3 +143,32 @@ export const isCoderResource = (resource: string) => { resource.startsWith("coder_") ); }; + +function getLegendsByAction(theme: Theme): Record { + return { + "state refresh": { + label: "state refresh", + }, + create: { + label: "create", + colors: { + fill: theme.roles.success.background, + stroke: theme.roles.success.outline, + }, + }, + delete: { + label: "delete", + colors: { + fill: theme.roles.warning.background, + stroke: theme.roles.warning.outline, + }, + }, + read: { + label: "read", + colors: { + fill: theme.roles.active.background, + stroke: theme.roles.active.outline, + }, + }, + }; +} diff --git a/site/src/modules/workspaces/WorkspaceTiming/ScriptsChart.tsx b/site/src/modules/workspaces/WorkspaceTiming/ScriptsChart.tsx index 5dfc57e51098f..64d97bff7cfdb 100644 --- a/site/src/modules/workspaces/WorkspaceTiming/ScriptsChart.tsx +++ b/site/src/modules/workspaces/WorkspaceTiming/ScriptsChart.tsx @@ -1,3 +1,4 @@ +import { type Theme, useTheme } from "@emotion/react"; import { type FC, useState } from "react"; import { Bar } from "./Chart/Bar"; import { @@ -28,30 +29,6 @@ import { } from "./Chart/utils"; import type { StageCategory } from "./StagesChart"; -const legendsByStatus: Record = { - ok: { - label: "success", - colors: { - fill: "#022C22", - stroke: "#BBF7D0", - }, - }, - exit_failure: { - label: "failure", - colors: { - fill: "#450A0A", - stroke: "#F87171", - }, - }, - timeout: { - label: "timed out", - colors: { - fill: "#422006", - stroke: "#FDBA74", - }, - }, -}; - type ScriptTiming = { name: string; status: string; @@ -77,6 +54,8 @@ export const ScriptsChart: FC = ({ const [ticks, scale] = makeTicks(totalTime); const [filter, setFilter] = useState(""); const visibleTimings = timings.filter((t) => t.name.includes(filter)); + const theme = useTheme(); + const legendsByStatus = getLegendsByStatus(theme); const visibleLegends = [...new Set(visibleTimings.map((t) => t.status))].map( (s) => legendsByStatus[s], ); @@ -151,3 +130,29 @@ export const ScriptsChart: FC = ({ ); }; + +function getLegendsByStatus(theme: Theme): Record { + return { + ok: { + label: "success", + colors: { + fill: theme.roles.success.background, + stroke: theme.roles.success.outline, + }, + }, + exit_failure: { + label: "failure", + colors: { + fill: theme.roles.error.background, + stroke: theme.roles.error.outline, + }, + }, + timeout: { + label: "timed out", + colors: { + fill: theme.roles.warning.background, + stroke: theme.roles.warning.outline, + }, + }, + }; +} diff --git a/site/src/modules/workspaces/WorkspaceTiming/StagesChart.tsx b/site/src/modules/workspaces/WorkspaceTiming/StagesChart.tsx index 8f37605ce5956..dc5550dcfed98 100644 --- a/site/src/modules/workspaces/WorkspaceTiming/StagesChart.tsx +++ b/site/src/modules/workspaces/WorkspaceTiming/StagesChart.tsx @@ -102,7 +102,7 @@ export const stages: Stage[] = [ <> Terraform apply - Execute terraform plan to create/modify/delete resources into + Execute Terraform plan to create/modify/delete resources into desired states. diff --git a/site/src/modules/workspaces/WorkspaceTiming/WorkspaceTimings.stories.tsx b/site/src/modules/workspaces/WorkspaceTiming/WorkspaceTimings.stories.tsx index b1bf487c52732..f546e271395ab 100644 --- a/site/src/modules/workspaces/WorkspaceTiming/WorkspaceTimings.stories.tsx +++ b/site/src/modules/workspaces/WorkspaceTiming/WorkspaceTimings.stories.tsx @@ -1,5 +1,6 @@ import type { Meta, StoryObj } from "@storybook/react"; import { expect, userEvent, waitFor, within } from "@storybook/test"; +import { chromatic } from "testHelpers/chromatic"; import { WorkspaceTimings } from "./WorkspaceTimings"; import { WorkspaceTimingsResponse } from "./storybookData"; @@ -11,6 +12,9 @@ const meta: Meta = { provisionerTimings: WorkspaceTimingsResponse.provisioner_timings, agentScriptTimings: WorkspaceTimingsResponse.agent_script_timings, }, + parameters: { + chromatic, + }, }; export default meta; diff --git a/site/src/modules/workspaces/WorkspaceTiming/WorkspaceTimings.tsx b/site/src/modules/workspaces/WorkspaceTiming/WorkspaceTimings.tsx index 4835cc2be8f69..9e16e55bae36e 100644 --- a/site/src/modules/workspaces/WorkspaceTiming/WorkspaceTimings.tsx +++ b/site/src/modules/workspaces/WorkspaceTiming/WorkspaceTimings.tsx @@ -55,7 +55,7 @@ export const WorkspaceTimings: FC = ({ ) : ( )} - Provisioning time + Build timeline ({ marginLeft: "auto", diff --git a/site/src/pages/DeploymentSettingsPage/ExternalAuthSettingsPage/ExternalAuthSettingsPage.tsx b/site/src/pages/DeploymentSettingsPage/ExternalAuthSettingsPage/ExternalAuthSettingsPage.tsx index 422cebc7edb93..27edefa229b2f 100644 --- a/site/src/pages/DeploymentSettingsPage/ExternalAuthSettingsPage/ExternalAuthSettingsPage.tsx +++ b/site/src/pages/DeploymentSettingsPage/ExternalAuthSettingsPage/ExternalAuthSettingsPage.tsx @@ -1,24 +1,19 @@ import { Loader } from "components/Loader/Loader"; -import { useManagementSettings } from "modules/management/ManagementSettingsLayout"; +import { useDeploymentSettings } from "modules/management/DeploymentSettingsProvider"; import type { FC } from "react"; import { Helmet } from "react-helmet-async"; import { pageTitle } from "utils/page"; import { ExternalAuthSettingsPageView } from "./ExternalAuthSettingsPageView"; const ExternalAuthSettingsPage: FC = () => { - const { deploymentValues } = useManagementSettings(); + const { deploymentConfig } = useDeploymentSettings(); return ( <> {pageTitle("External Authentication Settings")} - - {deploymentValues ? ( - - ) : ( - - )} + ); }; diff --git a/site/src/pages/DeploymentSettingsPage/GeneralSettingsPage/GeneralSettingsPage.tsx b/site/src/pages/DeploymentSettingsPage/GeneralSettingsPage/GeneralSettingsPage.tsx index 5d3879e195996..2b094cbf89b26 100644 --- a/site/src/pages/DeploymentSettingsPage/GeneralSettingsPage/GeneralSettingsPage.tsx +++ b/site/src/pages/DeploymentSettingsPage/GeneralSettingsPage/GeneralSettingsPage.tsx @@ -1,9 +1,8 @@ import { deploymentDAUs } from "api/queries/deployment"; import { entitlements } from "api/queries/entitlements"; import { availableExperiments, experiments } from "api/queries/experiments"; -import { Loader } from "components/Loader/Loader"; import { useEmbeddedMetadata } from "hooks/useEmbeddedMetadata"; -import { useManagementSettings } from "modules/management/ManagementSettingsLayout"; +import { useDeploymentSettings } from "modules/management/DeploymentSettingsProvider"; import type { FC } from "react"; import { Helmet } from "react-helmet-async"; import { useQuery } from "react-query"; @@ -11,7 +10,7 @@ import { pageTitle } from "utils/page"; import { GeneralSettingsPageView } from "./GeneralSettingsPageView"; const GeneralSettingsPage: FC = () => { - const { deploymentValues } = useManagementSettings(); + const { deploymentConfig } = useDeploymentSettings(); const deploymentDAUsQuery = useQuery(deploymentDAUs()); const safeExperimentsQuery = useQuery(availableExperiments()); @@ -30,18 +29,14 @@ const GeneralSettingsPage: FC = () => { {pageTitle("General Settings")} - {deploymentValues ? ( - - ) : ( - - )} + ); }; diff --git a/site/src/pages/DeploymentSettingsPage/NetworkSettingsPage/NetworkSettingsPage.tsx b/site/src/pages/DeploymentSettingsPage/NetworkSettingsPage/NetworkSettingsPage.tsx index 6ebd005f71031..ec77bb95e5241 100644 --- a/site/src/pages/DeploymentSettingsPage/NetworkSettingsPage/NetworkSettingsPage.tsx +++ b/site/src/pages/DeploymentSettingsPage/NetworkSettingsPage/NetworkSettingsPage.tsx @@ -1,24 +1,19 @@ import { Loader } from "components/Loader/Loader"; -import { useManagementSettings } from "modules/management/ManagementSettingsLayout"; +import { useDeploymentSettings } from "modules/management/DeploymentSettingsProvider"; import type { FC } from "react"; import { Helmet } from "react-helmet-async"; import { pageTitle } from "utils/page"; import { NetworkSettingsPageView } from "./NetworkSettingsPageView"; const NetworkSettingsPage: FC = () => { - const { deploymentValues } = useManagementSettings(); + const { deploymentConfig } = useDeploymentSettings(); return ( <> {pageTitle("Network Settings")} - - {deploymentValues ? ( - - ) : ( - - )} + ); }; diff --git a/site/src/pages/DeploymentSettingsPage/NotificationsPage/NotificationEvents.stories.tsx b/site/src/pages/DeploymentSettingsPage/NotificationsPage/NotificationEvents.stories.tsx index c2e8479a26f8c..61a1eddcd1a78 100644 --- a/site/src/pages/DeploymentSettingsPage/NotificationsPage/NotificationEvents.stories.tsx +++ b/site/src/pages/DeploymentSettingsPage/NotificationsPage/NotificationEvents.stories.tsx @@ -14,7 +14,7 @@ const meta: Meta = { defaultMethod: "smtp", availableMethods: ["smtp", "webhook"], templatesByGroup: selectTemplatesByGroup(MockNotificationTemplates), - deploymentValues: baseMeta.parameters.deploymentValues, + deploymentConfig: baseMeta.parameters.deploymentValues, }, ...baseMeta, }; @@ -25,7 +25,7 @@ type Story = StoryObj; export const SMTPNotConfigured: Story = { args: { - deploymentValues: { + deploymentConfig: { notifications: { webhook: { endpoint: "https://example.com", @@ -40,7 +40,7 @@ export const SMTPNotConfigured: Story = { export const WebhookNotConfigured: Story = { args: { - deploymentValues: { + deploymentConfig: { notifications: { webhook: { endpoint: "", diff --git a/site/src/pages/DeploymentSettingsPage/NotificationsPage/NotificationEvents.tsx b/site/src/pages/DeploymentSettingsPage/NotificationsPage/NotificationEvents.tsx index 191e2eda6958e..38c36fc52c044 100644 --- a/site/src/pages/DeploymentSettingsPage/NotificationsPage/NotificationEvents.tsx +++ b/site/src/pages/DeploymentSettingsPage/NotificationsPage/NotificationEvents.tsx @@ -31,20 +31,20 @@ type NotificationEventsProps = { defaultMethod: NotificationMethod; availableMethods: NotificationMethod[]; templatesByGroup: ReturnType; - deploymentValues: DeploymentValues; + deploymentConfig: DeploymentValues; }; export const NotificationEvents: FC = ({ defaultMethod, availableMethods, templatesByGroup, - deploymentValues, + deploymentConfig, }) => { // Webhook const hasWebhookNotifications = Object.values(templatesByGroup) .flat() .some((t) => t.method === "webhook"); - const webhookValues = deploymentValues.notifications?.webhook ?? {}; + const webhookValues = deploymentConfig.notifications?.webhook ?? {}; const isWebhookConfigured = requiredFieldsArePresent(webhookValues, [ "endpoint", ]); @@ -53,7 +53,7 @@ export const NotificationEvents: FC = ({ const hasSMTPNotifications = Object.values(templatesByGroup) .flat() .some((t) => t.method === "smtp"); - const smtpValues = deploymentValues.notifications?.email ?? {}; + const smtpValues = deploymentConfig.notifications?.email ?? {}; const isSMTPConfigured = requiredFieldsArePresent(smtpValues, [ "smarthost", "from", diff --git a/site/src/pages/DeploymentSettingsPage/NotificationsPage/NotificationsPage.tsx b/site/src/pages/DeploymentSettingsPage/NotificationsPage/NotificationsPage.tsx index d43c8c3a841a6..23f8e6b42651e 100644 --- a/site/src/pages/DeploymentSettingsPage/NotificationsPage/NotificationsPage.tsx +++ b/site/src/pages/DeploymentSettingsPage/NotificationsPage/NotificationsPage.tsx @@ -6,21 +6,20 @@ import { } from "api/queries/notifications"; import { Loader } from "components/Loader/Loader"; import { TabLink, Tabs, TabsList } from "components/Tabs/Tabs"; -import { useManagementSettings } from "modules/management/ManagementSettingsLayout"; +import { useSearchParamsKey } from "hooks/useSearchParamsKey"; +import { useDeploymentSettings } from "modules/management/DeploymentSettingsProvider"; import { castNotificationMethod } from "modules/notifications/utils"; import { Section } from "pages/UserSettingsPage/Section"; import type { FC } from "react"; import { Helmet } from "react-helmet-async"; import { useQueries } from "react-query"; -import { useSearchParams } from "react-router-dom"; import { deploymentGroupHasParent } from "utils/deployOptions"; import { pageTitle } from "utils/page"; import OptionsTable from "../OptionsTable"; import { NotificationEvents } from "./NotificationEvents"; export const NotificationsPage: FC = () => { - const [searchParams] = useSearchParams(); - const { deploymentValues } = useManagementSettings(); + const { deploymentConfig } = useDeploymentSettings(); const [templatesByGroup, dispatchMethods] = useQueries({ queries: [ { @@ -30,10 +29,12 @@ export const NotificationsPage: FC = () => { notificationDispatchMethods(), ], }); - const ready = - templatesByGroup.data && dispatchMethods.data && deploymentValues; - const tab = searchParams.get("tab") || "events"; + const tabState = useSearchParamsKey({ + key: "tab", + defaultValue: "events", + }); + const ready = !!(templatesByGroup.data && dispatchMethods.data); return ( <> @@ -45,7 +46,7 @@ export const NotificationsPage: FC = () => { layout="fluid" featureStage={"beta"} > - + Events @@ -58,10 +59,10 @@ export const NotificationsPage: FC = () => {
{ready ? ( - tab === "events" ? ( + tabState.value === "events" ? ( { /> ) : ( + options={deploymentConfig.options.filter((o) => deploymentGroupHasParent(o.group, "Notifications"), )} /> diff --git a/site/src/pages/DeploymentSettingsPage/ObservabilitySettingsPage/ObservabilitySettingsPage.tsx b/site/src/pages/DeploymentSettingsPage/ObservabilitySettingsPage/ObservabilitySettingsPage.tsx index 1ea1a2d19ef82..12b574c177384 100644 --- a/site/src/pages/DeploymentSettingsPage/ObservabilitySettingsPage/ObservabilitySettingsPage.tsx +++ b/site/src/pages/DeploymentSettingsPage/ObservabilitySettingsPage/ObservabilitySettingsPage.tsx @@ -1,14 +1,13 @@ -import { Loader } from "components/Loader/Loader"; import { useDashboard } from "modules/dashboard/useDashboard"; import { useFeatureVisibility } from "modules/dashboard/useFeatureVisibility"; -import { useManagementSettings } from "modules/management/ManagementSettingsLayout"; +import { useDeploymentSettings } from "modules/management/DeploymentSettingsProvider"; import type { FC } from "react"; import { Helmet } from "react-helmet-async"; import { pageTitle } from "utils/page"; import { ObservabilitySettingsPageView } from "./ObservabilitySettingsPageView"; const ObservabilitySettingsPage: FC = () => { - const { deploymentValues } = useManagementSettings(); + const { deploymentConfig } = useDeploymentSettings(); const { entitlements } = useDashboard(); const { multiple_organizations: hasPremiumLicense } = useFeatureVisibility(); @@ -17,16 +16,11 @@ const ObservabilitySettingsPage: FC = () => { {pageTitle("Observability Settings")} - - {deploymentValues ? ( - - ) : ( - - )} + ); }; diff --git a/site/src/pages/DeploymentSettingsPage/SecuritySettingsPage/SecuritySettingsPage.tsx b/site/src/pages/DeploymentSettingsPage/SecuritySettingsPage/SecuritySettingsPage.tsx index 2a296fc9d22d2..bda0988f01966 100644 --- a/site/src/pages/DeploymentSettingsPage/SecuritySettingsPage/SecuritySettingsPage.tsx +++ b/site/src/pages/DeploymentSettingsPage/SecuritySettingsPage/SecuritySettingsPage.tsx @@ -1,13 +1,13 @@ import { Loader } from "components/Loader/Loader"; import { useDashboard } from "modules/dashboard/useDashboard"; -import { useManagementSettings } from "modules/management/ManagementSettingsLayout"; +import { useDeploymentSettings } from "modules/management/DeploymentSettingsProvider"; import type { FC } from "react"; import { Helmet } from "react-helmet-async"; import { pageTitle } from "utils/page"; import { SecuritySettingsPageView } from "./SecuritySettingsPageView"; const SecuritySettingsPage: FC = () => { - const { deploymentValues } = useManagementSettings(); + const { deploymentConfig } = useDeploymentSettings(); const { entitlements } = useDashboard(); return ( @@ -15,15 +15,10 @@ const SecuritySettingsPage: FC = () => { {pageTitle("Security Settings")} - - {deploymentValues ? ( - - ) : ( - - )} + ); }; diff --git a/site/src/pages/DeploymentSettingsPage/UserAuthSettingsPage/UserAuthSettingsPage.tsx b/site/src/pages/DeploymentSettingsPage/UserAuthSettingsPage/UserAuthSettingsPage.tsx index b6382f5a54f99..1511e29aca2d0 100644 --- a/site/src/pages/DeploymentSettingsPage/UserAuthSettingsPage/UserAuthSettingsPage.tsx +++ b/site/src/pages/DeploymentSettingsPage/UserAuthSettingsPage/UserAuthSettingsPage.tsx @@ -1,24 +1,19 @@ import { Loader } from "components/Loader/Loader"; -import { useManagementSettings } from "modules/management/ManagementSettingsLayout"; +import { useDeploymentSettings } from "modules/management/DeploymentSettingsProvider"; import type { FC } from "react"; import { Helmet } from "react-helmet-async"; import { pageTitle } from "utils/page"; import { UserAuthSettingsPageView } from "./UserAuthSettingsPageView"; const UserAuthSettingsPage: FC = () => { - const { deploymentValues } = useManagementSettings(); + const { deploymentConfig } = useDeploymentSettings(); return ( <> {pageTitle("User Authentication Settings")} - - {deploymentValues ? ( - - ) : ( - - )} + ); }; diff --git a/site/src/pages/ManagementSettingsPage/CustomRolesPage/CreateEditRolePage.tsx b/site/src/pages/ManagementSettingsPage/CustomRolesPage/CreateEditRolePage.tsx index 80995a160b67d..e770a400af2a7 100644 --- a/site/src/pages/ManagementSettingsPage/CustomRolesPage/CreateEditRolePage.tsx +++ b/site/src/pages/ManagementSettingsPage/CustomRolesPage/CreateEditRolePage.tsx @@ -19,6 +19,7 @@ import CreateEditRolePageView from "./CreateEditRolePageView"; export const CreateEditRolePage: FC = () => { const queryClient = useQueryClient(); const navigate = useNavigate(); + const { organization: organizationName, roleName } = useParams() as { organization: string; roleName: string; diff --git a/site/src/pages/ManagementSettingsPage/OrganizationProvisionersPage.tsx b/site/src/pages/ManagementSettingsPage/OrganizationProvisionersPage.tsx index bd91c348e03ee..19387a28730eb 100644 --- a/site/src/pages/ManagementSettingsPage/OrganizationProvisionersPage.tsx +++ b/site/src/pages/ManagementSettingsPage/OrganizationProvisionersPage.tsx @@ -14,15 +14,10 @@ const OrganizationProvisionersPage: FC = () => { const { organization: organizationName } = useParams() as { organization: string; }; - const { organizations } = useManagementSettings(); + const { organization } = useManagementSettings(); const { entitlements } = useDashboard(); - const { metadata } = useEmbeddedMetadata(); const buildInfoQuery = useQuery(buildInfo(metadata["build-info"])); - - const organization = organizations - ? getOrganizationByName(organizations, organizationName) - : undefined; const provisionersQuery = useQuery(provisionerDaemonGroups(organizationName)); if (!organization) { @@ -40,8 +35,3 @@ const OrganizationProvisionersPage: FC = () => { }; export default OrganizationProvisionersPage; - -const getOrganizationByName = ( - organizations: readonly Organization[], - name: string, -) => organizations.find((org) => org.name === name); diff --git a/site/src/pages/ManagementSettingsPage/OrganizationSettingsPage.stories.tsx b/site/src/pages/ManagementSettingsPage/OrganizationSettingsPage.stories.tsx index 9c85f89a62b55..f6b6b49c88d37 100644 --- a/site/src/pages/ManagementSettingsPage/OrganizationSettingsPage.stories.tsx +++ b/site/src/pages/ManagementSettingsPage/OrganizationSettingsPage.stories.tsx @@ -1,6 +1,11 @@ import type { Meta, StoryObj } from "@storybook/react"; import { reactRouterParameters } from "storybook-addon-remix-react-router"; -import { MockDefaultOrganization, MockUser } from "testHelpers/entities"; +import { + MockDefaultOrganization, + MockOrganization, + MockOrganization2, + MockUser, +} from "testHelpers/entities"; import { withAuthProvider, withDashboardProvider, diff --git a/site/src/pages/ManagementSettingsPage/OrganizationSettingsPage.tsx b/site/src/pages/ManagementSettingsPage/OrganizationSettingsPage.tsx index 1d11c85a605ae..2b4eb18a9a524 100644 --- a/site/src/pages/ManagementSettingsPage/OrganizationSettingsPage.tsx +++ b/site/src/pages/ManagementSettingsPage/OrganizationSettingsPage.tsx @@ -35,10 +35,7 @@ const OrganizationSettingsPage: FC = () => { deleteOrganization(queryClient), ); - const organization = - organizations && organizationName - ? getOrganizationByName(organizations, organizationName) - : undefined; + const organization = organizations?.find((o) => o.name === organizationName); const permissionsQuery = useQuery( organizationsPermissions(organizations?.map((o) => o.id)), ); @@ -55,13 +52,10 @@ const OrganizationSettingsPage: FC = () => { // Redirect /organizations => /organizations/default-org, or if they cannot edit // the default org, then the first org they can edit, if any. if (!organizationName) { + // .find will stop at the first match found; make sure default + // organizations are placed first const editableOrg = [...organizations] - .sort((a, b) => { - // Prefer default org (it may not be first). - // JavaScript will happily subtract booleans, but use numbers to keep - // the compiler happy. - return (b.is_default ? 1 : 0) - (a.is_default ? 1 : 0); - }) + .sort((a, b) => (b.is_default ? 1 : 0) - (a.is_default ? 1 : 0)) .find((org) => canEditOrganization(permissions[org.id])); if (editableOrg) { return ; @@ -111,10 +105,3 @@ const OrganizationSettingsPage: FC = () => { }; export default OrganizationSettingsPage; - -const getOrganizationByName = ( - organizations: readonly Organization[], - name: string, -) => { - return organizations.find((org) => org.name === name); -}; diff --git a/site/src/pages/ManagementSettingsPage/OrganizationSettingsPageView.stories.tsx b/site/src/pages/ManagementSettingsPage/OrganizationSettingsPageView.stories.tsx index 9983c25080a59..3e8b1ad3133b7 100644 --- a/site/src/pages/ManagementSettingsPage/OrganizationSettingsPageView.stories.tsx +++ b/site/src/pages/ManagementSettingsPage/OrganizationSettingsPageView.stories.tsx @@ -4,7 +4,6 @@ import { MockDefaultOrganization, MockOrganization, } from "testHelpers/entities"; -import { withManagementSettingsProvider } from "testHelpers/storybook"; import { OrganizationSettingsPageView } from "./OrganizationSettingsPageView"; const meta: Meta = { diff --git a/site/src/pages/SetupPage/countries.tsx b/site/src/pages/SetupPage/countries.tsx index 0fcebc25ac54d..9b13b6b6be0d9 100644 --- a/site/src/pages/SetupPage/countries.tsx +++ b/site/src/pages/SetupPage/countries.tsx @@ -964,7 +964,7 @@ export const countries = [ flag: "🇻🇪", }, { - name: "Viet Nam", + name: "Vietnam", flag: "🇻🇳", }, { diff --git a/site/src/pages/WorkspacePage/WorkspaceTopbar.stories.tsx b/site/src/pages/WorkspacePage/WorkspaceTopbar.stories.tsx index ef7c72895552b..d95cfc3d60daf 100644 --- a/site/src/pages/WorkspacePage/WorkspaceTopbar.stories.tsx +++ b/site/src/pages/WorkspacePage/WorkspaceTopbar.stories.tsx @@ -320,3 +320,39 @@ export const TemplateDoesNotAllowAutostop: Story = { }, }, }; + +export const TemplateInfoPopover: Story = { + play: async ({ canvasElement, step }) => { + const canvas = within(canvasElement); + + await step("activate hover trigger", async () => { + await userEvent.hover(canvas.getByText(baseWorkspace.name)); + await waitFor(() => + expect( + canvas.getByRole("presentation", { hidden: true }), + ).toHaveTextContent(MockTemplate.display_name), + ); + }); + }, +}; + +export const TemplateInfoPopoverWithoutDisplayName: Story = { + args: { + workspace: { + ...baseWorkspace, + template_display_name: "", + }, + }, + play: async ({ canvasElement, step }) => { + const canvas = within(canvasElement); + + await step("activate hover trigger", async () => { + await userEvent.hover(canvas.getByText(baseWorkspace.name)); + await waitFor(() => + expect( + canvas.getByRole("presentation", { hidden: true }), + ).toHaveTextContent(MockTemplate.name), + ); + }); + }, +}; diff --git a/site/src/pages/WorkspacePage/WorkspaceTopbar.tsx b/site/src/pages/WorkspacePage/WorkspaceTopbar.tsx index e3be26462cc5b..7ca112befb4e5 100644 --- a/site/src/pages/WorkspacePage/WorkspaceTopbar.tsx +++ b/site/src/pages/WorkspacePage/WorkspaceTopbar.tsx @@ -160,7 +160,9 @@ export const WorkspaceTopbar: FC = ({ templateIconUrl={workspace.template_icon} rootTemplateUrl={templateLink} templateVersionName={workspace.latest_build.template_version_name} - templateVersionDisplayName={workspace.template_display_name} + templateDisplayName={ + workspace.template_display_name || workspace.template_name + } latestBuildVersionName={ workspace.latest_build.template_version_name } @@ -366,7 +368,7 @@ type WorkspaceBreadcrumbProps = Readonly<{ rootTemplateUrl: string; templateVersionName: string; latestBuildVersionName: string; - templateVersionDisplayName?: string; + templateDisplayName: string; }>; const WorkspaceBreadcrumb: FC = ({ @@ -375,7 +377,7 @@ const WorkspaceBreadcrumb: FC = ({ rootTemplateUrl, templateVersionName, latestBuildVersionName, - templateVersionDisplayName = templateVersionName, + templateDisplayName, }) => { return ( @@ -399,7 +401,7 @@ const WorkspaceBreadcrumb: FC = ({ to={rootTemplateUrl} css={{ color: "inherit" }} > - {templateVersionDisplayName} + {templateDisplayName} } subtitle={ @@ -419,7 +421,7 @@ const WorkspaceBreadcrumb: FC = ({ fitImage /> } - imgFallbackText={templateVersionDisplayName} + imgFallbackText={templateDisplayName} /> diff --git a/site/src/router.tsx b/site/src/router.tsx index 2531c823b9f48..c9d8736979c34 100644 --- a/site/src/router.tsx +++ b/site/src/router.tsx @@ -10,7 +10,6 @@ import { import { Loader } from "./components/Loader/Loader"; import { RequireAuth } from "./contexts/auth/RequireAuth"; import { DashboardLayout } from "./modules/dashboard/DashboardLayout"; -import { ManagementSettingsLayout } from "./modules/management/ManagementSettingsLayout"; import AuditPage from "./pages/AuditPage/AuditPage"; import { HealthLayout } from "./pages/HealthPage/HealthLayout"; import LoginPage from "./pages/LoginPage/LoginPage"; @@ -28,6 +27,12 @@ import WorkspacesPage from "./pages/WorkspacesPage/WorkspacesPage"; // - Pages that are secondary, not in the main navigation or not usually accessed // - Pages that use heavy dependencies like charts or time libraries const NotFoundPage = lazy(() => import("./pages/404Page/404Page")); +const ManagementSettingsLayout = lazy( + () => import("./modules/management/ManagementSettingsLayout"), +); +const DeploymentSettingsProvider = lazy( + () => import("./modules/management/DeploymentSettingsProvider"), +); const CliAuthenticationPage = lazy( () => import("./pages/CliAuthPage/CliAuthPage"), ); @@ -427,22 +432,32 @@ export const router = createBrowserRouter( }> - } /> - } /> - } /> - } /> - } - /> - } /> - } /> - } /> - } - /> + }> + } /> + } /> + } + /> + } /> + } /> + } + /> + + } + /> + + + } /> + } /> + + } /> + } /> } /> @@ -452,14 +467,9 @@ export const router = createBrowserRouter( - } /> } /> } /> {groupsRouter()} - } - /> }> diff --git a/site/src/testHelpers/entities.ts b/site/src/testHelpers/entities.ts index 0db6e80d435d6..1593790e9792d 100644 --- a/site/src/testHelpers/entities.ts +++ b/site/src/testHelpers/entities.ts @@ -2766,12 +2766,37 @@ export const MockPermissions: Permissions = { viewUpdateCheck: true, viewDeploymentStats: true, viewExternalAuthConfig: true, + readWorkspaceProxies: true, editWorkspaceProxies: true, createOrganization: true, editAnyOrganization: true, viewAnyGroup: true, createGroup: true, viewAllLicenses: true, + viewNotificationTemplate: true, +}; + +export const MockNoPermissions: Permissions = { + createTemplates: false, + createUser: false, + deleteTemplates: false, + updateTemplates: false, + viewAllUsers: false, + updateUsers: false, + viewAnyAuditLog: false, + viewDeploymentValues: false, + editDeploymentValues: false, + viewUpdateCheck: false, + viewDeploymentStats: false, + viewExternalAuthConfig: false, + readWorkspaceProxies: false, + editWorkspaceProxies: false, + createOrganization: false, + editAnyOrganization: false, + viewAnyGroup: false, + createGroup: false, + viewAllLicenses: false, + viewNotificationTemplate: false, }; export const MockDeploymentConfig: DeploymentConfig = { diff --git a/site/src/testHelpers/renderHelpers.tsx b/site/src/testHelpers/renderHelpers.tsx index f093adb1cfb4a..46ae893927801 100644 --- a/site/src/testHelpers/renderHelpers.tsx +++ b/site/src/testHelpers/renderHelpers.tsx @@ -9,7 +9,7 @@ import { ThemeProvider } from "contexts/ThemeProvider"; import { RequireAuth } from "contexts/auth/RequireAuth"; import { DashboardLayout } from "modules/dashboard/DashboardLayout"; import type { DashboardProvider } from "modules/dashboard/DashboardProvider"; -import { ManagementSettingsLayout } from "modules/management/ManagementSettingsLayout"; +import ManagementSettingsLayout from "modules/management/ManagementSettingsLayout"; import { TemplateSettingsLayout } from "pages/TemplateSettingsPage/TemplateSettingsLayout"; import { WorkspaceSettingsLayout } from "pages/WorkspaceSettingsPage/WorkspaceSettingsLayout"; import type { ReactNode } from "react"; diff --git a/site/src/testHelpers/storybook.tsx b/site/src/testHelpers/storybook.tsx index a76e1230205fc..e905a9b412c2c 100644 --- a/site/src/testHelpers/storybook.tsx +++ b/site/src/testHelpers/storybook.tsx @@ -7,6 +7,7 @@ import { GlobalSnackbar } from "components/GlobalSnackbar/GlobalSnackbar"; import { AuthProvider } from "contexts/auth/AuthProvider"; import { permissionsToCheck } from "contexts/auth/permissions"; import { DashboardContext } from "modules/dashboard/DashboardProvider"; +import { DeploymentSettingsContext } from "modules/management/DeploymentSettingsProvider"; import { ManagementSettingsContext } from "modules/management/ManagementSettingsLayout"; import type { FC } from "react"; import { useQueryClient } from "react-query"; @@ -131,12 +132,15 @@ export const withManagementSettingsProvider = (Story: FC) => { return ( - + + + ); }; diff --git a/tailnet/resume.go b/tailnet/resume.go index b9443064a37f9..2975fa35f1674 100644 --- a/tailnet/resume.go +++ b/tailnet/resume.go @@ -3,32 +3,23 @@ package tailnet import ( "context" "crypto/rand" - "database/sql" - "encoding/hex" - "encoding/json" "time" - "github.com/go-jose/go-jose/v3" + "github.com/go-jose/go-jose/v4/jwt" "github.com/google/uuid" "golang.org/x/xerrors" "google.golang.org/protobuf/types/known/durationpb" "google.golang.org/protobuf/types/known/timestamppb" + "github.com/coder/coder/v2/coderd/jwtutils" "github.com/coder/coder/v2/tailnet/proto" "github.com/coder/quartz" ) const ( DefaultResumeTokenExpiry = 24 * time.Hour - - resumeTokenSigningAlgorithm = jose.HS512 ) -// resumeTokenSigningKeyID is a fixed key ID for the resume token signing key. -// If/when we add support for multiple keys (e.g. key rotation), this will move -// to the database instead. -var resumeTokenSigningKeyID = uuid.MustParse("97166747-9309-4d7f-9071-a230e257c2a4") - // NewInsecureTestResumeTokenProvider returns a ResumeTokenProvider that uses a // random key with short expiry for testing purposes. If any errors occur while // generating the key, the function panics. @@ -37,12 +28,15 @@ func NewInsecureTestResumeTokenProvider() ResumeTokenProvider { if err != nil { panic(err) } - return NewResumeTokenKeyProvider(key, quartz.NewReal(), time.Hour) + return NewResumeTokenKeyProvider(jwtutils.StaticKey{ + ID: uuid.New().String(), + Key: key[:], + }, quartz.NewReal(), time.Hour) } type ResumeTokenProvider interface { - GenerateResumeToken(peerID uuid.UUID) (*proto.RefreshResumeTokenResponse, error) - VerifyResumeToken(token string) (uuid.UUID, error) + GenerateResumeToken(ctx context.Context, peerID uuid.UUID) (*proto.RefreshResumeTokenResponse, error) + VerifyResumeToken(ctx context.Context, token string) (uuid.UUID, error) } type ResumeTokenSigningKey [64]byte @@ -56,104 +50,37 @@ func GenerateResumeTokenSigningKey() (ResumeTokenSigningKey, error) { return key, nil } -type ResumeTokenSigningKeyDatabaseStore interface { - GetCoordinatorResumeTokenSigningKey(ctx context.Context) (string, error) - UpsertCoordinatorResumeTokenSigningKey(ctx context.Context, key string) error -} - -// ResumeTokenSigningKeyFromDatabase retrieves the coordinator resume token -// signing key from the database. If the key is not found, a new key is -// generated and inserted into the database. -func ResumeTokenSigningKeyFromDatabase(ctx context.Context, db ResumeTokenSigningKeyDatabaseStore) (ResumeTokenSigningKey, error) { - var resumeTokenKey ResumeTokenSigningKey - resumeTokenKeyStr, err := db.GetCoordinatorResumeTokenSigningKey(ctx) - if err != nil && !xerrors.Is(err, sql.ErrNoRows) { - return resumeTokenKey, xerrors.Errorf("get coordinator resume token key: %w", err) - } - if decoded, err := hex.DecodeString(resumeTokenKeyStr); err != nil || len(decoded) != len(resumeTokenKey) { - newKey, err := GenerateResumeTokenSigningKey() - if err != nil { - return resumeTokenKey, xerrors.Errorf("generate fresh coordinator resume token key: %w", err) - } - - resumeTokenKeyStr = hex.EncodeToString(newKey[:]) - err = db.UpsertCoordinatorResumeTokenSigningKey(ctx, resumeTokenKeyStr) - if err != nil { - return resumeTokenKey, xerrors.Errorf("insert freshly generated coordinator resume token key to database: %w", err) - } - } - - resumeTokenKeyBytes, err := hex.DecodeString(resumeTokenKeyStr) - if err != nil { - return resumeTokenKey, xerrors.Errorf("decode coordinator resume token key from database: %w", err) - } - if len(resumeTokenKeyBytes) != len(resumeTokenKey) { - return resumeTokenKey, xerrors.Errorf("coordinator resume token key in database is not the correct length, expect %d got %d", len(resumeTokenKey), len(resumeTokenKeyBytes)) - } - copy(resumeTokenKey[:], resumeTokenKeyBytes) - if resumeTokenKey == [64]byte{} { - return resumeTokenKey, xerrors.Errorf("coordinator resume token key in database is empty") - } - return resumeTokenKey, nil -} - type ResumeTokenKeyProvider struct { - key ResumeTokenSigningKey + key jwtutils.SigningKeyManager clock quartz.Clock expiry time.Duration } -func NewResumeTokenKeyProvider(key ResumeTokenSigningKey, clock quartz.Clock, expiry time.Duration) ResumeTokenProvider { +func NewResumeTokenKeyProvider(key jwtutils.SigningKeyManager, clock quartz.Clock, expiry time.Duration) ResumeTokenProvider { if expiry <= 0 { expiry = DefaultResumeTokenExpiry } return ResumeTokenKeyProvider{ key: key, clock: clock, - expiry: DefaultResumeTokenExpiry, + expiry: expiry, } } -type resumeTokenPayload struct { - PeerID uuid.UUID `json:"sub"` - Expiry int64 `json:"exp"` -} - -func (p ResumeTokenKeyProvider) GenerateResumeToken(peerID uuid.UUID) (*proto.RefreshResumeTokenResponse, error) { +func (p ResumeTokenKeyProvider) GenerateResumeToken(ctx context.Context, peerID uuid.UUID) (*proto.RefreshResumeTokenResponse, error) { exp := p.clock.Now().Add(p.expiry) - payload := resumeTokenPayload{ - PeerID: peerID, - Expiry: exp.Unix(), - } - payloadBytes, err := json.Marshal(payload) - if err != nil { - return nil, xerrors.Errorf("marshal payload to JSON: %w", err) - } - - signer, err := jose.NewSigner(jose.SigningKey{ - Algorithm: resumeTokenSigningAlgorithm, - Key: p.key[:], - }, &jose.SignerOptions{ - ExtraHeaders: map[jose.HeaderKey]interface{}{ - "kid": resumeTokenSigningKeyID.String(), - }, - }) - if err != nil { - return nil, xerrors.Errorf("create signer: %w", err) + payload := jwtutils.RegisteredClaims{ + Subject: peerID.String(), + Expiry: jwt.NewNumericDate(exp), } - signedObject, err := signer.Sign(payloadBytes) + token, err := jwtutils.Sign(ctx, p.key, payload) if err != nil { return nil, xerrors.Errorf("sign payload: %w", err) } - serialized, err := signedObject.CompactSerialize() - if err != nil { - return nil, xerrors.Errorf("serialize JWS: %w", err) - } - return &proto.RefreshResumeTokenResponse{ - Token: serialized, + Token: token, RefreshIn: durationpb.New(p.expiry / 2), ExpiresAt: timestamppb.New(exp), }, nil @@ -162,35 +89,17 @@ func (p ResumeTokenKeyProvider) GenerateResumeToken(peerID uuid.UUID) (*proto.Re // VerifyResumeToken parses a signed tailnet resume token with the given key and // returns the payload. If the token is invalid or expired, an error is // returned. -func (p ResumeTokenKeyProvider) VerifyResumeToken(str string) (uuid.UUID, error) { - object, err := jose.ParseSigned(str) - if err != nil { - return uuid.Nil, xerrors.Errorf("parse JWS: %w", err) - } - if len(object.Signatures) != 1 { - return uuid.Nil, xerrors.New("expected 1 signature") - } - if object.Signatures[0].Header.Algorithm != string(resumeTokenSigningAlgorithm) { - return uuid.Nil, xerrors.Errorf("expected token signing algorithm to be %q, got %q", resumeTokenSigningAlgorithm, object.Signatures[0].Header.Algorithm) - } - if object.Signatures[0].Header.KeyID != resumeTokenSigningKeyID.String() { - return uuid.Nil, xerrors.Errorf("expected token key ID to be %q, got %q", resumeTokenSigningKeyID, object.Signatures[0].Header.KeyID) - } - - output, err := object.Verify(p.key[:]) +func (p ResumeTokenKeyProvider) VerifyResumeToken(ctx context.Context, str string) (uuid.UUID, error) { + var tok jwt.Claims + err := jwtutils.Verify(ctx, p.key, str, &tok, jwtutils.WithVerifyExpected(jwt.Expected{ + Time: p.clock.Now(), + })) if err != nil { - return uuid.Nil, xerrors.Errorf("verify JWS: %w", err) + return uuid.Nil, xerrors.Errorf("verify payload: %w", err) } - - var tok resumeTokenPayload - err = json.Unmarshal(output, &tok) + parsed, err := uuid.Parse(tok.Subject) if err != nil { - return uuid.Nil, xerrors.Errorf("unmarshal payload: %w", err) + return uuid.Nil, xerrors.Errorf("parse peerID from token: %w", err) } - exp := time.Unix(tok.Expiry, 0) - if exp.Before(p.clock.Now()) { - return uuid.Nil, xerrors.New("signed resume token expired") - } - - return tok.PeerID, nil + return parsed, nil } diff --git a/tailnet/resume_test.go b/tailnet/resume_test.go index 3f63887cbfef3..6f32fba4c511e 100644 --- a/tailnet/resume_test.go +++ b/tailnet/resume_test.go @@ -1,117 +1,20 @@ package tailnet_test import ( - "context" - "encoding/hex" "testing" "time" + "github.com/go-jose/go-jose/v4" + "github.com/go-jose/go-jose/v4/jwt" "github.com/google/uuid" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "go.uber.org/mock/gomock" - "github.com/coder/coder/v2/coderd/database/dbmock" - "github.com/coder/coder/v2/coderd/database/dbtestutil" + "github.com/coder/coder/v2/coderd/jwtutils" "github.com/coder/coder/v2/tailnet" "github.com/coder/coder/v2/testutil" "github.com/coder/quartz" ) -func TestResumeTokenSigningKeyFromDatabase(t *testing.T) { - t.Parallel() - - assertRandomKey := func(t *testing.T, key tailnet.ResumeTokenSigningKey) { - t.Helper() - assert.NotEqual(t, tailnet.ResumeTokenSigningKey{}, key, "key should not be empty") - assert.NotEqualValues(t, [64]byte{1}, key, "key should not be all 1s") - } - - t.Run("GenerateRetrieve", func(t *testing.T) { - t.Parallel() - - db, _ := dbtestutil.NewDB(t) - ctx := testutil.Context(t, testutil.WaitShort) - key1, err := tailnet.ResumeTokenSigningKeyFromDatabase(ctx, db) - require.NoError(t, err) - assertRandomKey(t, key1) - - key2, err := tailnet.ResumeTokenSigningKeyFromDatabase(ctx, db) - require.NoError(t, err) - require.Equal(t, key1, key2, "keys should not be different") - }) - - t.Run("GetError", func(t *testing.T) { - t.Parallel() - - db := dbmock.NewMockStore(gomock.NewController(t)) - db.EXPECT().GetCoordinatorResumeTokenSigningKey(gomock.Any()).Return("", assert.AnError) - - ctx := testutil.Context(t, testutil.WaitShort) - _, err := tailnet.ResumeTokenSigningKeyFromDatabase(ctx, db) - require.ErrorIs(t, err, assert.AnError) - }) - - t.Run("UpsertError", func(t *testing.T) { - t.Parallel() - - db := dbmock.NewMockStore(gomock.NewController(t)) - db.EXPECT().GetCoordinatorResumeTokenSigningKey(gomock.Any()).Return("", nil) - db.EXPECT().UpsertCoordinatorResumeTokenSigningKey(gomock.Any(), gomock.Any()).Return(assert.AnError) - - ctx := testutil.Context(t, testutil.WaitShort) - _, err := tailnet.ResumeTokenSigningKeyFromDatabase(ctx, db) - require.ErrorIs(t, err, assert.AnError) - }) - - t.Run("DecodeErrorShouldRegenerate", func(t *testing.T) { - t.Parallel() - - db := dbmock.NewMockStore(gomock.NewController(t)) - db.EXPECT().GetCoordinatorResumeTokenSigningKey(gomock.Any()).Return("invalid", nil) - - var storedKey tailnet.ResumeTokenSigningKey - db.EXPECT().UpsertCoordinatorResumeTokenSigningKey(gomock.Any(), gomock.Any()).Do(func(_ context.Context, value string) error { - keyBytes, err := hex.DecodeString(value) - require.NoError(t, err) - require.Len(t, keyBytes, len(storedKey)) - copy(storedKey[:], keyBytes) - return nil - }) - - ctx := testutil.Context(t, testutil.WaitShort) - key, err := tailnet.ResumeTokenSigningKeyFromDatabase(ctx, db) - require.NoError(t, err) - assertRandomKey(t, key) - require.Equal(t, storedKey, key, "key should match stored value") - }) - - t.Run("LengthErrorShouldRegenerate", func(t *testing.T) { - t.Parallel() - - db := dbmock.NewMockStore(gomock.NewController(t)) - db.EXPECT().GetCoordinatorResumeTokenSigningKey(gomock.Any()).Return("deadbeef", nil) - db.EXPECT().UpsertCoordinatorResumeTokenSigningKey(gomock.Any(), gomock.Any()).Return(nil) - - ctx := testutil.Context(t, testutil.WaitShort) - key, err := tailnet.ResumeTokenSigningKeyFromDatabase(ctx, db) - require.NoError(t, err) - assertRandomKey(t, key) - }) - - t.Run("EmptyError", func(t *testing.T) { - t.Parallel() - - db := dbmock.NewMockStore(gomock.NewController(t)) - emptyKey := hex.EncodeToString(make([]byte, 64)) - db.EXPECT().GetCoordinatorResumeTokenSigningKey(gomock.Any()).Return(emptyKey, nil) - - ctx := testutil.Context(t, testutil.WaitShort) - _, err := tailnet.ResumeTokenSigningKeyFromDatabase(ctx, db) - require.ErrorContains(t, err, "is empty") - }) -} - func TestResumeTokenKeyProvider(t *testing.T) { t.Parallel() @@ -121,17 +24,18 @@ func TestResumeTokenKeyProvider(t *testing.T) { t.Run("OK", func(t *testing.T) { t.Parallel() + ctx := testutil.Context(t, testutil.WaitShort) id := uuid.New() clock := quartz.NewMock(t) - provider := tailnet.NewResumeTokenKeyProvider(key, clock, tailnet.DefaultResumeTokenExpiry) - token, err := provider.GenerateResumeToken(id) + provider := tailnet.NewResumeTokenKeyProvider(newKeySigner(key), clock, tailnet.DefaultResumeTokenExpiry) + token, err := provider.GenerateResumeToken(ctx, id) require.NoError(t, err) require.NotNil(t, token) require.NotEmpty(t, token.Token) require.Equal(t, tailnet.DefaultResumeTokenExpiry/2, token.RefreshIn.AsDuration()) require.WithinDuration(t, clock.Now().Add(tailnet.DefaultResumeTokenExpiry), token.ExpiresAt.AsTime(), time.Second) - gotID, err := provider.VerifyResumeToken(token.Token) + gotID, err := provider.VerifyResumeToken(ctx, token.Token) require.NoError(t, err) require.Equal(t, id, gotID) }) @@ -139,43 +43,57 @@ func TestResumeTokenKeyProvider(t *testing.T) { t.Run("Expired", func(t *testing.T) { t.Parallel() + ctx := testutil.Context(t, testutil.WaitShort) id := uuid.New() clock := quartz.NewMock(t) - provider := tailnet.NewResumeTokenKeyProvider(key, clock, tailnet.DefaultResumeTokenExpiry) - token, err := provider.GenerateResumeToken(id) + provider := tailnet.NewResumeTokenKeyProvider(newKeySigner(key), clock, tailnet.DefaultResumeTokenExpiry) + token, err := provider.GenerateResumeToken(ctx, id) require.NoError(t, err) require.NotNil(t, token) require.NotEmpty(t, token.Token) require.Equal(t, tailnet.DefaultResumeTokenExpiry/2, token.RefreshIn.AsDuration()) require.WithinDuration(t, clock.Now().Add(tailnet.DefaultResumeTokenExpiry), token.ExpiresAt.AsTime(), time.Second) - // Advance time past expiry - _ = clock.Advance(tailnet.DefaultResumeTokenExpiry + time.Second) + // Advance time past expiry. Account for leeway. + _ = clock.Advance(tailnet.DefaultResumeTokenExpiry + time.Second*61) - _, err = provider.VerifyResumeToken(token.Token) - require.ErrorContains(t, err, "expired") + _, err = provider.VerifyResumeToken(ctx, token.Token) + require.Error(t, err) + require.ErrorIs(t, err, jwt.ErrExpired) }) t.Run("InvalidToken", func(t *testing.T) { t.Parallel() - provider := tailnet.NewResumeTokenKeyProvider(key, quartz.NewMock(t), tailnet.DefaultResumeTokenExpiry) - _, err := provider.VerifyResumeToken("invalid") + ctx := testutil.Context(t, testutil.WaitShort) + provider := tailnet.NewResumeTokenKeyProvider(newKeySigner(key), quartz.NewMock(t), tailnet.DefaultResumeTokenExpiry) + _, err := provider.VerifyResumeToken(ctx, "invalid") require.ErrorContains(t, err, "parse JWS") }) t.Run("VerifyError", func(t *testing.T) { t.Parallel() + ctx := testutil.Context(t, testutil.WaitShort) // Generate a resume token with a different key otherKey, err := tailnet.GenerateResumeTokenSigningKey() require.NoError(t, err) - otherProvider := tailnet.NewResumeTokenKeyProvider(otherKey, quartz.NewMock(t), tailnet.DefaultResumeTokenExpiry) - token, err := otherProvider.GenerateResumeToken(uuid.New()) + otherSigner := newKeySigner(otherKey) + otherProvider := tailnet.NewResumeTokenKeyProvider(otherSigner, quartz.NewMock(t), tailnet.DefaultResumeTokenExpiry) + token, err := otherProvider.GenerateResumeToken(ctx, uuid.New()) require.NoError(t, err) - provider := tailnet.NewResumeTokenKeyProvider(key, quartz.NewMock(t), tailnet.DefaultResumeTokenExpiry) - _, err = provider.VerifyResumeToken(token.Token) - require.ErrorContains(t, err, "verify JWS") + signer := newKeySigner(key) + signer.ID = otherSigner.ID + provider := tailnet.NewResumeTokenKeyProvider(signer, quartz.NewMock(t), tailnet.DefaultResumeTokenExpiry) + _, err = provider.VerifyResumeToken(ctx, token.Token) + require.ErrorIs(t, err, jose.ErrCryptoFailure) }) } + +func newKeySigner(key tailnet.ResumeTokenSigningKey) jwtutils.StaticKey { + return jwtutils.StaticKey{ + ID: "123", + Key: key[:], + } +} diff --git a/tailnet/service.go b/tailnet/service.go index 28a054dd8d671..7f38f63a589b3 100644 --- a/tailnet/service.go +++ b/tailnet/service.go @@ -177,7 +177,7 @@ func (s *DRPCService) RefreshResumeToken(ctx context.Context, _ *proto.RefreshRe return nil, xerrors.New("no Stream ID") } - res, err := s.ResumeTokenProvider.GenerateResumeToken(streamID.ID) + res, err := s.ResumeTokenProvider.GenerateResumeToken(ctx, streamID.ID) if err != nil { return nil, xerrors.Errorf("generate resume token: %w", err) }