-
+
-
+
@@ -40,14 +41,14 @@
- Onboard developers in seconds instead of days
-
+
## Quickstart
The most convenient way to try Coder is to install it on your local machine and experiment with provisioning cloud development environments using Docker (works on Linux, macOS, and Windows).
-```
+```shell
# First, install Coder
curl -L https://coder.com/install.sh | sh
@@ -65,7 +66,7 @@ The easiest way to install Coder is to use our
and macOS. For Windows, use the latest `..._installer.exe` file from GitHub
Releases.
-```bash
+```shell
curl -L https://coder.com/install.sh | sh
```
diff --git a/SECURITY.md b/SECURITY.md
index ee5ac8075eaf9..04be6e417548b 100644
--- a/SECURITY.md
+++ b/SECURITY.md
@@ -8,7 +8,7 @@ to us, what we expect, what you can expect from us.
You can see the pretty version [here](https://coder.com/security/policy)
-# Why Coder's security matters
+## Why Coder's security matters
If an attacker could fully compromise a Coder installation, they could spin up
expensive workstations, steal valuable credentials, or steal proprietary source
@@ -16,13 +16,13 @@ code. We take this risk very seriously and employ routine pen testing,
vulnerability scanning, and code reviews. We also welcome the contributions from
the community that helped make this product possible.
-# Where should I report security issues?
+## Where should I report security issues?
-Please report security issues to security@coder.com, providing all relevant
+Please report security issues to , providing all relevant
information. The more details you provide, the easier it will be for us to
triage and fix the issue.
-# Out of Scope
+## Out of Scope
Our primary concern is around an abuse of the Coder application that allows an
attacker to gain access to another users workspace, or spin up unwanted
@@ -40,7 +40,7 @@ workspaces.
out-of-scope systems should be reported to the appropriate vendor or
applicable authority.
-# Our Commitments
+## Our Commitments
When working with us, according to this policy, you can expect us to:
@@ -53,7 +53,7 @@ When working with us, according to this policy, you can expect us to:
- Extend Safe Harbor for your vulnerability research that is related to this
policy.
-# Our Expectations
+## Our Expectations
In participating in our vulnerability disclosure program in good faith, we ask
that you:
diff --git a/agent/agent_test.go b/agent/agent_test.go
index f1dfcd8c42a02..7674c906ff486 100644
--- a/agent/agent_test.go
+++ b/agent/agent_test.go
@@ -58,7 +58,7 @@ import (
)
func TestMain(m *testing.M) {
- goleak.VerifyTestMain(m)
+ goleak.VerifyTestMain(m, testutil.GoleakOptions...)
}
// NOTE: These tests only work when your default shell is bash for some reason.
diff --git a/agent/agentscripts/agentscripts_test.go b/agent/agentscripts/agentscripts_test.go
index 572f7b509d4d2..0d6e41772cdb7 100644
--- a/agent/agentscripts/agentscripts_test.go
+++ b/agent/agentscripts/agentscripts_test.go
@@ -24,7 +24,7 @@ import (
)
func TestMain(m *testing.M) {
- goleak.VerifyTestMain(m)
+ goleak.VerifyTestMain(m, testutil.GoleakOptions...)
}
func TestExecuteBasic(t *testing.T) {
diff --git a/agent/agentssh/agentssh_test.go b/agent/agentssh/agentssh_test.go
index dfe67290c358b..76321e6e19d85 100644
--- a/agent/agentssh/agentssh_test.go
+++ b/agent/agentssh/agentssh_test.go
@@ -29,7 +29,7 @@ import (
)
func TestMain(m *testing.M) {
- goleak.VerifyTestMain(m)
+ goleak.VerifyTestMain(m, testutil.GoleakOptions...)
}
func TestNewServer_ServeClient(t *testing.T) {
diff --git a/cli/clitest/clitest_test.go b/cli/clitest/clitest_test.go
index db31513d182c7..c2149813875dc 100644
--- a/cli/clitest/clitest_test.go
+++ b/cli/clitest/clitest_test.go
@@ -8,10 +8,11 @@ import (
"github.com/coder/coder/v2/cli/clitest"
"github.com/coder/coder/v2/coderd/coderdtest"
"github.com/coder/coder/v2/pty/ptytest"
+ "github.com/coder/coder/v2/testutil"
)
func TestMain(m *testing.M) {
- goleak.VerifyTestMain(m)
+ goleak.VerifyTestMain(m, testutil.GoleakOptions...)
}
func TestCli(t *testing.T) {
diff --git a/cli/exp_scaletest_test.go b/cli/exp_scaletest_test.go
index 27f1adaac6c7d..afcd213fc9d00 100644
--- a/cli/exp_scaletest_test.go
+++ b/cli/exp_scaletest_test.go
@@ -18,6 +18,10 @@ import (
func TestScaleTestCreateWorkspaces(t *testing.T) {
t.Parallel()
+ if testutil.RaceEnabled() {
+ t.Skip("Skipping due to race detector")
+ }
+
// This test only validates that the CLI command accepts known arguments.
// More thorough testing is done in scaletest/createworkspaces/run_test.go.
ctx, cancelFunc := context.WithTimeout(context.Background(), testutil.WaitLong)
@@ -65,6 +69,10 @@ func TestScaleTestCreateWorkspaces(t *testing.T) {
func TestScaleTestWorkspaceTraffic(t *testing.T) {
t.Parallel()
+ if testutil.RaceEnabled() {
+ t.Skip("Skipping due to race detector")
+ }
+
ctx, cancelFunc := context.WithTimeout(context.Background(), testutil.WaitMedium)
defer cancelFunc()
@@ -95,6 +103,10 @@ func TestScaleTestWorkspaceTraffic(t *testing.T) {
func TestScaleTestWorkspaceTraffic_Template(t *testing.T) {
t.Parallel()
+ if testutil.RaceEnabled() {
+ t.Skip("Skipping due to race detector")
+ }
+
ctx, cancelFunc := context.WithTimeout(context.Background(), testutil.WaitMedium)
defer cancelFunc()
@@ -120,6 +132,10 @@ func TestScaleTestWorkspaceTraffic_Template(t *testing.T) {
func TestScaleTestWorkspaceTraffic_TargetWorkspaces(t *testing.T) {
t.Parallel()
+ if testutil.RaceEnabled() {
+ t.Skip("Skipping due to race detector")
+ }
+
ctx, cancelFunc := context.WithTimeout(context.Background(), testutil.WaitMedium)
defer cancelFunc()
@@ -145,6 +161,10 @@ func TestScaleTestWorkspaceTraffic_TargetWorkspaces(t *testing.T) {
func TestScaleTestCleanup_Template(t *testing.T) {
t.Parallel()
+ if testutil.RaceEnabled() {
+ t.Skip("Skipping due to race detector")
+ }
+
ctx, cancelFunc := context.WithTimeout(context.Background(), testutil.WaitMedium)
defer cancelFunc()
@@ -169,6 +189,10 @@ func TestScaleTestCleanup_Template(t *testing.T) {
// This test just validates that the CLI command accepts its known arguments.
func TestScaleTestDashboard(t *testing.T) {
t.Parallel()
+ if testutil.RaceEnabled() {
+ t.Skip("Skipping due to race detector")
+ }
+
t.Run("MinWait", func(t *testing.T) {
t.Parallel()
ctx, cancelFunc := context.WithTimeout(context.Background(), testutil.WaitShort)
diff --git a/cli/resetpassword.go b/cli/resetpassword.go
index 2aacc8a6e6c44..f77ed81d14db4 100644
--- a/cli/resetpassword.go
+++ b/cli/resetpassword.go
@@ -3,22 +3,27 @@
package cli
import (
- "database/sql"
"fmt"
"golang.org/x/xerrors"
+ "cdr.dev/slog"
+ "cdr.dev/slog/sloggers/sloghuman"
+ "github.com/coder/coder/v2/coderd/database/awsiamrds"
+ "github.com/coder/coder/v2/codersdk"
"github.com/coder/pretty"
"github.com/coder/serpent"
"github.com/coder/coder/v2/cli/cliui"
"github.com/coder/coder/v2/coderd/database"
- "github.com/coder/coder/v2/coderd/database/migrations"
"github.com/coder/coder/v2/coderd/userpassword"
)
func (*RootCmd) resetPassword() *serpent.Command {
- var postgresURL string
+ var (
+ postgresURL string
+ postgresAuth string
+ )
root := &serpent.Command{
Use: "reset-password ",
@@ -27,20 +32,26 @@ func (*RootCmd) resetPassword() *serpent.Command {
Handler: func(inv *serpent.Invocation) error {
username := inv.Args[0]
- sqlDB, err := sql.Open("postgres", postgresURL)
- if err != nil {
- return xerrors.Errorf("dial postgres: %w", err)
+ logger := slog.Make(sloghuman.Sink(inv.Stdout))
+ if ok, _ := inv.ParsedFlags().GetBool("verbose"); ok {
+ logger = logger.Leveled(slog.LevelDebug)
}
- defer sqlDB.Close()
- err = sqlDB.Ping()
- if err != nil {
- return xerrors.Errorf("ping postgres: %w", err)
+
+ sqlDriver := "postgres"
+ if codersdk.PostgresAuth(postgresAuth) == codersdk.PostgresAuthAWSIAMRDS {
+ var err error
+ sqlDriver, err = awsiamrds.Register(inv.Context(), sqlDriver)
+ if err != nil {
+ return xerrors.Errorf("register aws rds iam auth: %w", err)
+ }
}
- err = migrations.EnsureClean(sqlDB)
+ sqlDB, err := ConnectToPostgres(inv.Context(), logger, sqlDriver, postgresURL, nil)
if err != nil {
- return xerrors.Errorf("database needs migration: %w", err)
+ return xerrors.Errorf("dial postgres: %w", err)
}
+ defer sqlDB.Close()
+
db := database.New(sqlDB)
user, err := db.GetUserByEmailOrUsername(inv.Context(), database.GetUserByEmailOrUsernameParams{
@@ -97,6 +108,14 @@ func (*RootCmd) resetPassword() *serpent.Command {
Env: "CODER_PG_CONNECTION_URL",
Value: serpent.StringOf(&postgresURL),
},
+ serpent.Option{
+ Name: "Postgres Connection Auth",
+ Description: "Type of auth to use when connecting to postgres.",
+ Flag: "postgres-connection-auth",
+ Env: "CODER_PG_CONNECTION_AUTH",
+ Default: "password",
+ Value: serpent.EnumOf(&postgresAuth, codersdk.PostgresAuthDrivers...),
+ },
}
return root
diff --git a/cli/root_internal_test.go b/cli/root_internal_test.go
index c10c853769900..f95ab04c1c9ec 100644
--- a/cli/root_internal_test.go
+++ b/cli/root_internal_test.go
@@ -19,6 +19,7 @@ import (
"github.com/coder/coder/v2/cli/cliui"
"github.com/coder/coder/v2/cli/telemetry"
"github.com/coder/coder/v2/codersdk"
+ "github.com/coder/coder/v2/testutil"
"github.com/coder/pretty"
"github.com/coder/serpent"
)
@@ -29,15 +30,7 @@ func TestMain(m *testing.M) {
// See: https://github.com/coder/coder/issues/8954
os.Exit(m.Run())
}
- goleak.VerifyTestMain(m,
- // The lumberjack library is used by by agent and seems to leave
- // goroutines after Close(), fails TestGitSSH tests.
- // https://github.com/natefinch/lumberjack/pull/100
- goleak.IgnoreTopFunction("gopkg.in/natefinch/lumberjack%2ev2.(*Logger).millRun"),
- goleak.IgnoreTopFunction("gopkg.in/natefinch/lumberjack%2ev2.(*Logger).mill.func1"),
- // The pq library appears to leave around a goroutine after Close().
- goleak.IgnoreTopFunction("github.com/lib/pq.NewDialListener"),
- )
+ goleak.VerifyTestMain(m, testutil.GoleakOptions...)
}
func Test_formatExamples(t *testing.T) {
diff --git a/cli/server.go b/cli/server.go
index ff8b2963e0eb4..9bb4cfb0a72f2 100644
--- a/cli/server.go
+++ b/cli/server.go
@@ -697,7 +697,7 @@ func (r *RootCmd) Server(newAPI func(context.Context, *coderd.Options) (*coderd.
options.Database = dbmem.New()
options.Pubsub = pubsub.NewInMemory()
} else {
- sqlDB, dbURL, err := getPostgresDB(ctx, logger, vals.PostgresURL.String(), codersdk.PostgresAuth(vals.PostgresAuth), sqlDriver)
+ sqlDB, dbURL, err := getAndMigratePostgresDB(ctx, logger, vals.PostgresURL.String(), codersdk.PostgresAuth(vals.PostgresAuth), sqlDriver)
if err != nil {
return xerrors.Errorf("connect to postgres: %w", err)
}
@@ -2090,9 +2090,18 @@ func IsLocalhost(host string) bool {
return host == "localhost" || host == "127.0.0.1" || host == "::1"
}
-func ConnectToPostgres(ctx context.Context, logger slog.Logger, driver string, dbURL string) (sqlDB *sql.DB, err error) {
+// ConnectToPostgres takes in the migration command to run on the database once
+// it connects. To avoid running migrations, pass in `nil` or a no-op function.
+// Regardless of the passed in migration function, if the database is not fully
+// migrated, an error will be returned. This can happen if the database is on a
+// future or past migration version.
+//
+// If no error is returned, the database is fully migrated and up to date.
+func ConnectToPostgres(ctx context.Context, logger slog.Logger, driver string, dbURL string, migrate func(db *sql.DB) error) (*sql.DB, error) {
logger.Debug(ctx, "connecting to postgresql")
+ var err error
+ var sqlDB *sql.DB
// Try to connect for 30 seconds.
ctx, cancel := context.WithTimeout(ctx, 30*time.Second)
defer cancel()
@@ -2155,9 +2164,16 @@ func ConnectToPostgres(ctx context.Context, logger slog.Logger, driver string, d
}
logger.Debug(ctx, "connected to postgresql", slog.F("version", versionNum))
- err = migrations.Up(sqlDB)
+ if migrate != nil {
+ err = migrate(sqlDB)
+ if err != nil {
+ return nil, xerrors.Errorf("migrate up: %w", err)
+ }
+ }
+
+ err = migrations.EnsureClean(sqlDB)
if err != nil {
- return nil, xerrors.Errorf("migrate up: %w", err)
+ return nil, xerrors.Errorf("migrations in database: %w", err)
}
// The default is 0 but the request will fail with a 500 if the DB
// cannot accept new connections, so we try to limit that here.
@@ -2561,7 +2577,7 @@ func signalNotifyContext(ctx context.Context, inv *serpent.Invocation, sig ...os
return inv.SignalNotifyContext(ctx, sig...)
}
-func getPostgresDB(ctx context.Context, logger slog.Logger, postgresURL string, auth codersdk.PostgresAuth, sqlDriver string) (*sql.DB, string, error) {
+func getAndMigratePostgresDB(ctx context.Context, logger slog.Logger, postgresURL string, auth codersdk.PostgresAuth, sqlDriver string) (*sql.DB, string, error) {
dbURL, err := escapePostgresURLUserInfo(postgresURL)
if err != nil {
return nil, "", xerrors.Errorf("escaping postgres URL: %w", err)
@@ -2574,7 +2590,7 @@ func getPostgresDB(ctx context.Context, logger slog.Logger, postgresURL string,
}
}
- sqlDB, err := ConnectToPostgres(ctx, logger, sqlDriver, dbURL)
+ sqlDB, err := ConnectToPostgres(ctx, logger, sqlDriver, dbURL, migrations.Up)
if err != nil {
return nil, "", xerrors.Errorf("connect to postgres: %w", err)
}
diff --git a/cli/server_createadminuser.go b/cli/server_createadminuser.go
index 7ef95e7e093e6..ed9c7b9bcc921 100644
--- a/cli/server_createadminuser.go
+++ b/cli/server_createadminuser.go
@@ -72,7 +72,7 @@ func (r *RootCmd) newCreateAdminUserCommand() *serpent.Command {
}
}
- sqlDB, err := ConnectToPostgres(ctx, logger, sqlDriver, newUserDBURL)
+ sqlDB, err := ConnectToPostgres(ctx, logger, sqlDriver, newUserDBURL, nil)
if err != nil {
return xerrors.Errorf("connect to postgres: %w", err)
}
diff --git a/cli/server_test.go b/cli/server_test.go
index 9ba963d484548..0dba63e7c2fe3 100644
--- a/cli/server_test.go
+++ b/cli/server_test.go
@@ -38,11 +38,13 @@ import (
"tailscale.com/derp/derphttp"
"tailscale.com/types/key"
+ "cdr.dev/slog/sloggers/slogtest"
"github.com/coder/coder/v2/cli"
"github.com/coder/coder/v2/cli/clitest"
"github.com/coder/coder/v2/cli/config"
"github.com/coder/coder/v2/coderd/coderdtest"
"github.com/coder/coder/v2/coderd/database/dbtestutil"
+ "github.com/coder/coder/v2/coderd/database/migrations"
"github.com/coder/coder/v2/coderd/httpapi"
"github.com/coder/coder/v2/coderd/telemetry"
"github.com/coder/coder/v2/codersdk"
@@ -1828,20 +1830,51 @@ func TestConnectToPostgres(t *testing.T) {
if !dbtestutil.WillUsePostgres() {
t.Skip("this test does not make sense without postgres")
}
- ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitShort)
- t.Cleanup(cancel)
- log := testutil.Logger(t)
+ t.Run("Migrate", func(t *testing.T) {
+ t.Parallel()
- dbURL, err := dbtestutil.Open(t)
- require.NoError(t, err)
+ ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitShort)
+ t.Cleanup(cancel)
- sqlDB, err := cli.ConnectToPostgres(ctx, log, "postgres", dbURL)
- require.NoError(t, err)
- t.Cleanup(func() {
- _ = sqlDB.Close()
+ log := testutil.Logger(t)
+
+ dbURL, err := dbtestutil.Open(t)
+ require.NoError(t, err)
+
+ sqlDB, err := cli.ConnectToPostgres(ctx, log, "postgres", dbURL, migrations.Up)
+ require.NoError(t, err)
+ t.Cleanup(func() {
+ _ = sqlDB.Close()
+ })
+ require.NoError(t, sqlDB.PingContext(ctx))
+ })
+
+ t.Run("NoMigrate", func(t *testing.T) {
+ t.Parallel()
+
+ ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitShort)
+ t.Cleanup(cancel)
+
+ log := slogtest.Make(t, &slogtest.Options{IgnoreErrors: true})
+
+ dbURL, err := dbtestutil.Open(t)
+ require.NoError(t, err)
+
+ okDB, err := cli.ConnectToPostgres(ctx, log, "postgres", dbURL, nil)
+ require.NoError(t, err)
+ defer okDB.Close()
+
+ // Set the migration number forward
+ _, err = okDB.Exec(`UPDATE schema_migrations SET version = version + 1`)
+ require.NoError(t, err)
+
+ _, err = cli.ConnectToPostgres(ctx, log, "postgres", dbURL, nil)
+ require.Error(t, err)
+ require.ErrorContains(t, err, "database needs migration")
+
+ require.NoError(t, okDB.PingContext(ctx))
})
- require.NoError(t, sqlDB.PingContext(ctx))
}
func TestServer_InvalidDERP(t *testing.T) {
diff --git a/cli/ssh.go b/cli/ssh.go
index 7df590946fd6b..7a1d5940bfd01 100644
--- a/cli/ssh.go
+++ b/cli/ssh.go
@@ -657,12 +657,19 @@ func getWorkspaceAndAgent(ctx context.Context, inv *serpent.Invocation, client *
// workspaces with the active version.
_, _ = fmt.Fprintf(inv.Stderr, "Workspace was stopped, starting workspace to allow connecting to %q...\n", workspace.Name)
_, err = startWorkspace(inv, client, workspace, workspaceParameterFlags{}, buildFlags{}, WorkspaceStart)
- if cerr, ok := codersdk.AsError(err); ok && cerr.StatusCode() == http.StatusForbidden {
- _, err = startWorkspace(inv, client, workspace, workspaceParameterFlags{}, buildFlags{}, WorkspaceUpdate)
- if err != nil {
- return codersdk.Workspace{}, codersdk.WorkspaceAgent{}, xerrors.Errorf("start workspace with active template version: %w", err)
+ if cerr, ok := codersdk.AsError(err); ok {
+ switch cerr.StatusCode() {
+ case http.StatusConflict:
+ _, _ = fmt.Fprintln(inv.Stderr, "Unable to start the workspace due to conflict, the workspace may be starting, retrying without autostart...")
+ return getWorkspaceAndAgent(ctx, inv, client, false, input)
+
+ case http.StatusForbidden:
+ _, err = startWorkspace(inv, client, workspace, workspaceParameterFlags{}, buildFlags{}, WorkspaceUpdate)
+ if err != nil {
+ return codersdk.Workspace{}, codersdk.WorkspaceAgent{}, xerrors.Errorf("start workspace with active template version: %w", err)
+ }
+ _, _ = fmt.Fprintln(inv.Stdout, "Unable to start the workspace with template version from last build. Your workspace has been updated to the current active template version.")
}
- _, _ = fmt.Fprintln(inv.Stdout, "Unable to start the workspace with template version from last build. Your workspace has been updated to the current active template version.")
} else if err != nil {
return codersdk.Workspace{}, codersdk.WorkspaceAgent{}, xerrors.Errorf("start workspace with current template version: %w", err)
}
diff --git a/cli/ssh_test.go b/cli/ssh_test.go
index 62feaf2b61e95..4fd52971df1cf 100644
--- a/cli/ssh_test.go
+++ b/cli/ssh_test.go
@@ -17,6 +17,7 @@ import (
"os/exec"
"path"
"path/filepath"
+ "regexp"
"runtime"
"strings"
"testing"
@@ -145,6 +146,101 @@ func TestSSH(t *testing.T) {
pty.WriteLine("exit")
<-cmdDone
})
+ t.Run("StartStoppedWorkspaceConflict", func(t *testing.T) {
+ t.Parallel()
+
+ // Intercept builds to synchronize execution of the SSH command.
+ // The purpose here is to make sure all commands try to trigger
+ // a start build of the workspace.
+ isFirstBuild := true
+ buildURL := regexp.MustCompile("/api/v2/workspaces/.*/builds")
+ buildPause := make(chan bool)
+ buildDone := make(chan struct{})
+ buildSyncMW := func(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ if r.Method == http.MethodPost && buildURL.MatchString(r.URL.Path) {
+ if !isFirstBuild {
+ t.Log("buildSyncMW: pausing build")
+ if shouldContinue := <-buildPause; !shouldContinue {
+ // We can't force the API to trigger a build conflict (racy) so we fake it.
+ t.Log("buildSyncMW: return conflict")
+ w.WriteHeader(http.StatusConflict)
+ return
+ }
+ t.Log("buildSyncMW: resuming build")
+ defer func() {
+ t.Log("buildSyncMW: sending build done")
+ buildDone <- struct{}{}
+ t.Log("buildSyncMW: done")
+ }()
+ } else {
+ isFirstBuild = false
+ }
+ }
+ next.ServeHTTP(w, r)
+ })
+ }
+
+ authToken := uuid.NewString()
+ ownerClient := coderdtest.New(t, &coderdtest.Options{
+ IncludeProvisionerDaemon: true,
+ APIMiddleware: buildSyncMW,
+ })
+ owner := coderdtest.CreateFirstUser(t, ownerClient)
+ client, _ := coderdtest.CreateAnotherUser(t, ownerClient, owner.OrganizationID, rbac.RoleTemplateAdmin())
+ version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, &echo.Responses{
+ Parse: echo.ParseComplete,
+ ProvisionPlan: echo.PlanComplete,
+ ProvisionApply: echo.ProvisionApplyWithAgent(authToken),
+ })
+ coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID)
+ template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID)
+ workspace := coderdtest.CreateWorkspace(t, client, template.ID)
+ coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, workspace.LatestBuild.ID)
+ // Stop the workspace
+ workspaceBuild := coderdtest.CreateWorkspaceBuild(t, client, workspace, database.WorkspaceTransitionStop)
+ coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, workspaceBuild.ID)
+
+ ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitMedium)
+ defer cancel()
+
+ var ptys []*ptytest.PTY
+ for i := 0; i < 3; i++ {
+ // SSH to the workspace which should autostart it
+ inv, root := clitest.New(t, "ssh", workspace.Name)
+
+ pty := ptytest.New(t).Attach(inv)
+ ptys = append(ptys, pty)
+ clitest.SetupConfig(t, client, root)
+ testutil.Go(t, func() {
+ _ = inv.WithContext(ctx).Run()
+ })
+ }
+
+ for _, pty := range ptys {
+ pty.ExpectMatchContext(ctx, "Workspace was stopped, starting workspace to allow connecting to")
+ }
+
+ // Allow one build to complete.
+ testutil.RequireSendCtx(ctx, t, buildPause, true)
+ testutil.RequireRecvCtx(ctx, t, buildDone)
+
+ // Allow the remaining builds to continue.
+ for i := 0; i < len(ptys)-1; i++ {
+ testutil.RequireSendCtx(ctx, t, buildPause, false)
+ }
+
+ var foundConflict int
+ for _, pty := range ptys {
+ // Either allow the command to start the workspace or fail
+ // due to conflict (race), in which case it retries.
+ match := pty.ExpectRegexMatchContext(ctx, "Waiting for the workspace agent to connect")
+ if strings.Contains(match, "Unable to start the workspace due to conflict, the workspace may be starting, retrying without autostart...") {
+ foundConflict++
+ }
+ }
+ require.Equal(t, 2, foundConflict, "expected 2 conflicts")
+ })
t.Run("RequireActiveVersion", func(t *testing.T) {
t.Parallel()
diff --git a/cli/testdata/coder_reset-password_--help.golden b/cli/testdata/coder_reset-password_--help.golden
index a7d53df12ad90..ccefb412d8fb7 100644
--- a/cli/testdata/coder_reset-password_--help.golden
+++ b/cli/testdata/coder_reset-password_--help.golden
@@ -6,6 +6,9 @@ USAGE:
Directly connect to the database to reset a user's password
OPTIONS:
+ --postgres-connection-auth password|awsiamrds, $CODER_PG_CONNECTION_AUTH (default: password)
+ Type of auth to use when connecting to postgres.
+
--postgres-url string, $CODER_PG_CONNECTION_URL
URL of a PostgreSQL database to connect to.
diff --git a/cli/testdata/server-config.yaml.golden b/cli/testdata/server-config.yaml.golden
index 29b5d1f46cfa5..5db0e9f59698e 100644
--- a/cli/testdata/server-config.yaml.golden
+++ b/cli/testdata/server-config.yaml.golden
@@ -459,8 +459,8 @@ termsOfServiceURL: ""
# (default: ed25519, type: string)
sshKeygenAlgorithm: ed25519
# URL to use for agent troubleshooting when not set in the template.
-# (default: https://coder.com/docs/templates/troubleshooting, type: url)
-agentFallbackTroubleshootingURL: https://coder.com/docs/templates/troubleshooting
+# (default: https://coder.com/docs/admin/templates/troubleshooting, type: url)
+agentFallbackTroubleshootingURL: https://coder.com/docs/admin/templates/troubleshooting
# Disable workspace apps that are not served from subdomains. Path-based apps can
# make requests to the Coder API and pose a security risk when the workspace
# serves malicious JavaScript. This is recommended for security purposes if a
diff --git a/coderd/agentapi/apps_test.go b/coderd/agentapi/apps_test.go
index 41d520efc2fc2..1564c48b04e35 100644
--- a/coderd/agentapi/apps_test.go
+++ b/coderd/agentapi/apps_test.go
@@ -30,6 +30,7 @@ func TestBatchUpdateAppHealths(t *testing.T) {
DisplayName: "code-server 1",
HealthcheckUrl: "http://localhost:3000",
Health: database.WorkspaceAppHealthInitializing,
+ OpenIn: database.WorkspaceAppOpenInSlimWindow,
}
app2 = database.WorkspaceApp{
ID: uuid.New(),
@@ -38,6 +39,7 @@ func TestBatchUpdateAppHealths(t *testing.T) {
DisplayName: "code-server 2",
HealthcheckUrl: "http://localhost:3001",
Health: database.WorkspaceAppHealthHealthy,
+ OpenIn: database.WorkspaceAppOpenInSlimWindow,
}
)
@@ -163,6 +165,7 @@ func TestBatchUpdateAppHealths(t *testing.T) {
AgentID: agent.ID,
Slug: "code-server-3",
DisplayName: "code-server 3",
+ OpenIn: database.WorkspaceAppOpenInSlimWindow,
}
dbM := dbmock.NewMockStore(gomock.NewController(t))
diff --git a/coderd/apidoc/docs.go b/coderd/apidoc/docs.go
index 5780443a42de1..a8bfcb2af3b19 100644
--- a/coderd/apidoc/docs.go
+++ b/coderd/apidoc/docs.go
@@ -3170,6 +3170,52 @@ const docTemplate = `{
}
}
},
+ "/organizations/{organization}/settings/idpsync/field-values": {
+ "get": {
+ "security": [
+ {
+ "CoderSessionToken": []
+ }
+ ],
+ "produces": [
+ "application/json"
+ ],
+ "tags": [
+ "Enterprise"
+ ],
+ "summary": "Get the organization idp sync claim field values",
+ "operationId": "get-the-organization-idp-sync-claim-field-values",
+ "parameters": [
+ {
+ "type": "string",
+ "format": "uuid",
+ "description": "Organization ID",
+ "name": "organization",
+ "in": "path",
+ "required": true
+ },
+ {
+ "type": "string",
+ "format": "string",
+ "description": "Claim Field",
+ "name": "claimField",
+ "in": "query",
+ "required": true
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "OK",
+ "schema": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ }
+ }
+ }
+ }
+ },
"/organizations/{organization}/settings/idpsync/groups": {
"get": {
"security": [
@@ -3952,6 +3998,52 @@ const docTemplate = `{
}
}
},
+ "/settings/idpsync/field-values": {
+ "get": {
+ "security": [
+ {
+ "CoderSessionToken": []
+ }
+ ],
+ "produces": [
+ "application/json"
+ ],
+ "tags": [
+ "Enterprise"
+ ],
+ "summary": "Get the idp sync claim field values",
+ "operationId": "get-the-idp-sync-claim-field-values",
+ "parameters": [
+ {
+ "type": "string",
+ "format": "uuid",
+ "description": "Organization ID",
+ "name": "organization",
+ "in": "path",
+ "required": true
+ },
+ {
+ "type": "string",
+ "format": "string",
+ "description": "Claim Field",
+ "name": "claimField",
+ "in": "query",
+ "required": true
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "OK",
+ "schema": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ }
+ }
+ }
+ }
+ },
"/settings/idpsync/organization": {
"get": {
"security": [
@@ -7696,13 +7788,13 @@ const docTemplate = `{
},
{
"type": "integer",
- "description": "Before Unix timestamp",
+ "description": "Before log id",
"name": "before",
"in": "query"
},
{
"type": "integer",
- "description": "After Unix timestamp",
+ "description": "After log id",
"name": "after",
"in": "query"
},
@@ -15079,6 +15171,9 @@ const docTemplate = `{
"type": "string",
"format": "uuid"
},
+ "open_in": {
+ "$ref": "#/definitions/codersdk.WorkspaceAppOpenIn"
+ },
"sharing_level": {
"enum": [
"owner",
@@ -15124,6 +15219,19 @@ const docTemplate = `{
"WorkspaceAppHealthUnhealthy"
]
},
+ "codersdk.WorkspaceAppOpenIn": {
+ "type": "string",
+ "enum": [
+ "slim-window",
+ "window",
+ "tab"
+ ],
+ "x-enum-varnames": [
+ "WorkspaceAppOpenInSlimWindow",
+ "WorkspaceAppOpenInWindow",
+ "WorkspaceAppOpenInTab"
+ ]
+ },
"codersdk.WorkspaceAppSharingLevel": {
"type": "string",
"enum": [
diff --git a/coderd/apidoc/swagger.json b/coderd/apidoc/swagger.json
index 1ecb6d185e03c..d7c32d8a33a52 100644
--- a/coderd/apidoc/swagger.json
+++ b/coderd/apidoc/swagger.json
@@ -2788,6 +2788,48 @@
}
}
},
+ "/organizations/{organization}/settings/idpsync/field-values": {
+ "get": {
+ "security": [
+ {
+ "CoderSessionToken": []
+ }
+ ],
+ "produces": ["application/json"],
+ "tags": ["Enterprise"],
+ "summary": "Get the organization idp sync claim field values",
+ "operationId": "get-the-organization-idp-sync-claim-field-values",
+ "parameters": [
+ {
+ "type": "string",
+ "format": "uuid",
+ "description": "Organization ID",
+ "name": "organization",
+ "in": "path",
+ "required": true
+ },
+ {
+ "type": "string",
+ "format": "string",
+ "description": "Claim Field",
+ "name": "claimField",
+ "in": "query",
+ "required": true
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "OK",
+ "schema": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ }
+ }
+ }
+ }
+ },
"/organizations/{organization}/settings/idpsync/groups": {
"get": {
"security": [
@@ -3478,6 +3520,48 @@
}
}
},
+ "/settings/idpsync/field-values": {
+ "get": {
+ "security": [
+ {
+ "CoderSessionToken": []
+ }
+ ],
+ "produces": ["application/json"],
+ "tags": ["Enterprise"],
+ "summary": "Get the idp sync claim field values",
+ "operationId": "get-the-idp-sync-claim-field-values",
+ "parameters": [
+ {
+ "type": "string",
+ "format": "uuid",
+ "description": "Organization ID",
+ "name": "organization",
+ "in": "path",
+ "required": true
+ },
+ {
+ "type": "string",
+ "format": "string",
+ "description": "Claim Field",
+ "name": "claimField",
+ "in": "query",
+ "required": true
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "OK",
+ "schema": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ }
+ }
+ }
+ }
+ },
"/settings/idpsync/organization": {
"get": {
"security": [
@@ -6798,13 +6882,13 @@
},
{
"type": "integer",
- "description": "Before Unix timestamp",
+ "description": "Before log id",
"name": "before",
"in": "query"
},
{
"type": "integer",
- "description": "After Unix timestamp",
+ "description": "After log id",
"name": "after",
"in": "query"
},
@@ -13728,6 +13812,9 @@
"type": "string",
"format": "uuid"
},
+ "open_in": {
+ "$ref": "#/definitions/codersdk.WorkspaceAppOpenIn"
+ },
"sharing_level": {
"enum": ["owner", "authenticated", "public"],
"allOf": [
@@ -13764,6 +13851,15 @@
"WorkspaceAppHealthUnhealthy"
]
},
+ "codersdk.WorkspaceAppOpenIn": {
+ "type": "string",
+ "enum": ["slim-window", "window", "tab"],
+ "x-enum-varnames": [
+ "WorkspaceAppOpenInSlimWindow",
+ "WorkspaceAppOpenInWindow",
+ "WorkspaceAppOpenInTab"
+ ]
+ },
"codersdk.WorkspaceAppSharingLevel": {
"type": "string",
"enum": ["owner", "authenticated", "public"],
diff --git a/coderd/autobuild/lifecycle_executor_test.go b/coderd/autobuild/lifecycle_executor_test.go
index c700773028d0a..3eb779376cc5c 100644
--- a/coderd/autobuild/lifecycle_executor_test.go
+++ b/coderd/autobuild/lifecycle_executor_test.go
@@ -1256,5 +1256,5 @@ func mustWorkspaceParameters(t *testing.T, client *codersdk.Client, workspaceID
}
func TestMain(m *testing.M) {
- goleak.VerifyTestMain(m)
+ goleak.VerifyTestMain(m, testutil.GoleakOptions...)
}
diff --git a/coderd/autobuild/notify/notifier_test.go b/coderd/autobuild/notify/notifier_test.go
index 5cfdb33e1acd5..4c87a745aba0c 100644
--- a/coderd/autobuild/notify/notifier_test.go
+++ b/coderd/autobuild/notify/notifier_test.go
@@ -122,5 +122,5 @@ func durations(ds ...time.Duration) []time.Duration {
}
func TestMain(m *testing.M) {
- goleak.VerifyTestMain(m)
+ goleak.VerifyTestMain(m, testutil.GoleakOptions...)
}
diff --git a/coderd/coderd_test.go b/coderd/coderd_test.go
index 4d15961a6388e..c94462814999e 100644
--- a/coderd/coderd_test.go
+++ b/coderd/coderd_test.go
@@ -39,7 +39,7 @@ import (
var updateGoldenFiles = flag.Bool("update", false, "Update golden files")
func TestMain(m *testing.M) {
- goleak.VerifyTestMain(m)
+ goleak.VerifyTestMain(m, testutil.GoleakOptions...)
}
func TestBuildInfo(t *testing.T) {
diff --git a/coderd/coderdtest/authorize.go b/coderd/coderdtest/authorize.go
index c10f954140ea5..af52f7fc70f53 100644
--- a/coderd/coderdtest/authorize.go
+++ b/coderd/coderdtest/authorize.go
@@ -358,6 +358,7 @@ func (s *PreparedRecorder) CompileToSQL(ctx context.Context, cfg regosql.Convert
// Meaning 'FakeAuthorizer' by default will never return "unauthorized".
type FakeAuthorizer struct {
ConditionalReturn func(context.Context, rbac.Subject, policy.Action, rbac.Object) error
+ sqlFilter string
}
var _ rbac.Authorizer = (*FakeAuthorizer)(nil)
@@ -370,6 +371,12 @@ func (d *FakeAuthorizer) AlwaysReturn(err error) *FakeAuthorizer {
return d
}
+// OverrideSQLFilter sets the SQL filter that will always be returned by CompileToSQL.
+func (d *FakeAuthorizer) OverrideSQLFilter(filter string) *FakeAuthorizer {
+ d.sqlFilter = filter
+ return d
+}
+
func (d *FakeAuthorizer) Authorize(ctx context.Context, subject rbac.Subject, action policy.Action, object rbac.Object) error {
if d.ConditionalReturn != nil {
return d.ConditionalReturn(ctx, subject, action, object)
@@ -400,10 +407,12 @@ func (f *fakePreparedAuthorizer) Authorize(ctx context.Context, object rbac.Obje
return f.Original.Authorize(ctx, f.Subject, f.Action, object)
}
-// CompileToSQL returns a compiled version of the authorizer that will work for
-// in memory databases. This fake version will not work against a SQL database.
-func (*fakePreparedAuthorizer) CompileToSQL(_ context.Context, _ regosql.ConvertConfig) (string, error) {
- return "not a valid sql string", nil
+func (f *fakePreparedAuthorizer) CompileToSQL(_ context.Context, _ regosql.ConvertConfig) (string, error) {
+ if f.Original.sqlFilter != "" {
+ return f.Original.sqlFilter, nil
+ }
+ // By default, allow all SQL queries.
+ return "TRUE", nil
}
// Random rbac helper funcs
diff --git a/coderd/coderdtest/authorize_test.go b/coderd/coderdtest/authorize_test.go
index 5cdcd26869cf3..75f9a5d843481 100644
--- a/coderd/coderdtest/authorize_test.go
+++ b/coderd/coderdtest/authorize_test.go
@@ -44,7 +44,7 @@ func TestAuthzRecorder(t *testing.T) {
require.NoError(t, rec.AllAsserted(), "all assertions should have been made")
})
- t.Run("Authorize&Prepared", func(t *testing.T) {
+ t.Run("Authorize_Prepared", func(t *testing.T) {
t.Parallel()
rec := &coderdtest.RecordingAuthorizer{
diff --git a/coderd/coderdtest/coderdtest.go b/coderd/coderdtest/coderdtest.go
index bd1ed740a7ce7..aa096707b8fb7 100644
--- a/coderd/coderdtest/coderdtest.go
+++ b/coderd/coderdtest/coderdtest.go
@@ -33,6 +33,7 @@ import (
"cloud.google.com/go/compute/metadata"
"github.com/fullsailor/pkcs7"
+ "github.com/go-chi/chi/v5"
"github.com/golang-jwt/jwt/v4"
"github.com/google/uuid"
"github.com/moby/moby/pkg/namesgenerator"
@@ -146,6 +147,11 @@ type Options struct {
Database database.Store
Pubsub pubsub.Pubsub
+ // APIMiddleware inserts middleware before api.RootHandler, this can be
+ // useful in certain tests where you want to intercept requests before
+ // passing them on to the API, e.g. for synchronization of execution.
+ APIMiddleware func(http.Handler) http.Handler
+
ConfigSSH codersdk.SSHConfigResponse
SwaggerEndpoint bool
@@ -555,7 +561,14 @@ func NewWithAPI(t testing.TB, options *Options) (*codersdk.Client, io.Closer, *c
setHandler, cancelFunc, serverURL, newOptions := NewOptions(t, options)
// We set the handler after server creation for the access URL.
coderAPI := coderd.New(newOptions)
- setHandler(coderAPI.RootHandler)
+ rootHandler := coderAPI.RootHandler
+ if options.APIMiddleware != nil {
+ r := chi.NewRouter()
+ r.Use(options.APIMiddleware)
+ r.Mount("/", rootHandler)
+ rootHandler = r
+ }
+ setHandler(rootHandler)
var provisionerCloser io.Closer = nopcloser{}
if options.IncludeProvisionerDaemon {
provisionerCloser = NewTaggedProvisionerDaemon(t, coderAPI, "test", options.ProvisionerDaemonTags)
diff --git a/coderd/coderdtest/coderdtest_test.go b/coderd/coderdtest/coderdtest_test.go
index d4dfae6529e8b..8bd4898fe2f21 100644
--- a/coderd/coderdtest/coderdtest_test.go
+++ b/coderd/coderdtest/coderdtest_test.go
@@ -6,10 +6,11 @@ import (
"go.uber.org/goleak"
"github.com/coder/coder/v2/coderd/coderdtest"
+ "github.com/coder/coder/v2/testutil"
)
func TestMain(m *testing.M) {
- goleak.VerifyTestMain(m)
+ goleak.VerifyTestMain(m, testutil.GoleakOptions...)
}
func TestNew(t *testing.T) {
diff --git a/coderd/coderdtest/oidctest/idp.go b/coderd/coderdtest/oidctest/idp.go
index 90c9c386628f1..d6c7e6259f760 100644
--- a/coderd/coderdtest/oidctest/idp.go
+++ b/coderd/coderdtest/oidctest/idp.go
@@ -25,7 +25,7 @@ import (
"github.com/coreos/go-oidc/v3/oidc"
"github.com/go-chi/chi/v5"
- "github.com/go-jose/go-jose/v3"
+ "github.com/go-jose/go-jose/v4"
"github.com/golang-jwt/jwt/v4"
"github.com/google/uuid"
"github.com/prometheus/client_golang/prometheus"
diff --git a/coderd/cryptokeys/cache_test.go b/coderd/cryptokeys/cache_test.go
index 0f732e3f171bc..8039d27233b59 100644
--- a/coderd/cryptokeys/cache_test.go
+++ b/coderd/cryptokeys/cache_test.go
@@ -18,7 +18,7 @@ import (
)
func TestMain(m *testing.M) {
- goleak.VerifyTestMain(m)
+ goleak.VerifyTestMain(m, testutil.GoleakOptions...)
}
func TestCryptoKeyCache(t *testing.T) {
diff --git a/coderd/database/awsiamrds/awsiamrds_test.go b/coderd/database/awsiamrds/awsiamrds_test.go
index 844b85b119850..d52da4aab7bfe 100644
--- a/coderd/database/awsiamrds/awsiamrds_test.go
+++ b/coderd/database/awsiamrds/awsiamrds_test.go
@@ -9,6 +9,7 @@ import (
"github.com/coder/coder/v2/cli"
"github.com/coder/coder/v2/coderd/database/awsiamrds"
+ "github.com/coder/coder/v2/coderd/database/migrations"
"github.com/coder/coder/v2/coderd/database/pubsub"
"github.com/coder/coder/v2/testutil"
)
@@ -32,7 +33,7 @@ func TestDriver(t *testing.T) {
sqlDriver, err := awsiamrds.Register(ctx, "postgres")
require.NoError(t, err)
- db, err := cli.ConnectToPostgres(ctx, testutil.Logger(t), sqlDriver, url)
+ db, err := cli.ConnectToPostgres(ctx, testutil.Logger(t), sqlDriver, url, migrations.Up)
require.NoError(t, err)
defer func() {
_ = db.Close()
diff --git a/coderd/database/db2sdk/db2sdk.go b/coderd/database/db2sdk/db2sdk.go
index 19f0d7201106d..aabebcd14b7ac 100644
--- a/coderd/database/db2sdk/db2sdk.go
+++ b/coderd/database/db2sdk/db2sdk.go
@@ -518,6 +518,7 @@ func Apps(dbApps []database.WorkspaceApp, agent database.WorkspaceAgent, ownerNa
},
Health: codersdk.WorkspaceAppHealth(dbApp.Health),
Hidden: dbApp.Hidden,
+ OpenIn: codersdk.WorkspaceAppOpenIn(dbApp.OpenIn),
})
}
return apps
diff --git a/coderd/database/dbauthz/dbauthz.go b/coderd/database/dbauthz/dbauthz.go
index f64dbcc166591..0a35667ed0178 100644
--- a/coderd/database/dbauthz/dbauthz.go
+++ b/coderd/database/dbauthz/dbauthz.go
@@ -8,6 +8,7 @@ import (
"fmt"
"strings"
"sync/atomic"
+ "testing"
"time"
"github.com/google/uuid"
@@ -1366,6 +1367,13 @@ func (q *querier) DeleteWorkspaceAgentPortSharesByTemplate(ctx context.Context,
return q.db.DeleteWorkspaceAgentPortSharesByTemplate(ctx, templateID)
}
+func (q *querier) DisableForeignKeysAndTriggers(ctx context.Context) error {
+ if !testing.Testing() {
+ return xerrors.Errorf("DisableForeignKeysAndTriggers is only allowed in tests")
+ }
+ return q.db.DisableForeignKeysAndTriggers(ctx)
+}
+
func (q *querier) EnqueueNotificationMessage(ctx context.Context, arg database.EnqueueNotificationMessageParams) error {
if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceNotificationMessage); err != nil {
return err
diff --git a/coderd/database/dbauthz/dbauthz_test.go b/coderd/database/dbauthz/dbauthz_test.go
index 961f5d535b280..93e9a4318d1ed 100644
--- a/coderd/database/dbauthz/dbauthz_test.go
+++ b/coderd/database/dbauthz/dbauthz_test.go
@@ -4,18 +4,22 @@ import (
"context"
"database/sql"
"encoding/json"
+ "fmt"
+ "net"
"reflect"
"strings"
"testing"
"time"
"github.com/google/uuid"
+ "github.com/sqlc-dev/pqtype"
"github.com/stretchr/testify/require"
"golang.org/x/xerrors"
"cdr.dev/slog"
"github.com/coder/coder/v2/coderd/database/db2sdk"
+ "github.com/coder/coder/v2/coderd/database/dbmem"
"github.com/coder/coder/v2/coderd/notifications"
"github.com/coder/coder/v2/coderd/rbac/policy"
"github.com/coder/coder/v2/codersdk"
@@ -24,7 +28,7 @@ import (
"github.com/coder/coder/v2/coderd/database"
"github.com/coder/coder/v2/coderd/database/dbauthz"
"github.com/coder/coder/v2/coderd/database/dbgen"
- "github.com/coder/coder/v2/coderd/database/dbmem"
+ "github.com/coder/coder/v2/coderd/database/dbtestutil"
"github.com/coder/coder/v2/coderd/database/dbtime"
"github.com/coder/coder/v2/coderd/rbac"
"github.com/coder/coder/v2/coderd/util/slice"
@@ -70,7 +74,8 @@ func TestAsNoActor(t *testing.T) {
func TestPing(t *testing.T) {
t.Parallel()
- q := dbauthz.New(dbmem.New(), &coderdtest.RecordingAuthorizer{}, slog.Make(), coderdtest.AccessControlStorePointer())
+ db, _ := dbtestutil.NewDB(t)
+ q := dbauthz.New(db, &coderdtest.RecordingAuthorizer{}, slog.Make(), coderdtest.AccessControlStorePointer())
_, err := q.Ping(context.Background())
require.NoError(t, err, "must not error")
}
@@ -79,7 +84,7 @@ func TestPing(t *testing.T) {
func TestInTX(t *testing.T) {
t.Parallel()
- db := dbmem.New()
+ db, _ := dbtestutil.NewDB(t)
q := dbauthz.New(db, &coderdtest.RecordingAuthorizer{
Wrapped: (&coderdtest.FakeAuthorizer{}).AlwaysReturn(xerrors.New("custom error")),
}, slog.Make(), coderdtest.AccessControlStorePointer())
@@ -89,8 +94,17 @@ func TestInTX(t *testing.T) {
Groups: []string{},
Scope: rbac.ScopeAll,
}
-
- w := dbgen.Workspace(t, db, database.WorkspaceTable{})
+ u := dbgen.User(t, db, database.User{})
+ o := dbgen.Organization(t, db, database.Organization{})
+ tpl := dbgen.Template(t, db, database.Template{
+ CreatedBy: u.ID,
+ OrganizationID: o.ID,
+ })
+ w := dbgen.Workspace(t, db, database.WorkspaceTable{
+ OwnerID: u.ID,
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ })
ctx := dbauthz.As(context.Background(), actor)
err := q.InTx(func(tx database.Store) error {
// The inner tx should use the parent's authz
@@ -107,15 +121,24 @@ func TestNew(t *testing.T) {
t.Parallel()
var (
- db = dbmem.New()
- exp = dbgen.Workspace(t, db, database.WorkspaceTable{})
- rec = &coderdtest.RecordingAuthorizer{
+ db, _ = dbtestutil.NewDB(t)
+ rec = &coderdtest.RecordingAuthorizer{
Wrapped: &coderdtest.FakeAuthorizer{},
}
subj = rbac.Subject{}
ctx = dbauthz.As(context.Background(), rbac.Subject{})
)
-
+ u := dbgen.User(t, db, database.User{})
+ org := dbgen.Organization(t, db, database.Organization{})
+ tpl := dbgen.Template(t, db, database.Template{
+ OrganizationID: org.ID,
+ CreatedBy: u.ID,
+ })
+ exp := dbgen.Workspace(t, db, database.WorkspaceTable{
+ OwnerID: u.ID,
+ OrganizationID: org.ID,
+ TemplateID: tpl.ID,
+ })
// Double wrap should not cause an actual double wrap. So only 1 rbac call
// should be made.
az := dbauthz.New(db, rec, slog.Make(), coderdtest.AccessControlStorePointer())
@@ -134,7 +157,8 @@ func TestNew(t *testing.T) {
// as only the first db call will be made. But it is better than nothing.
func TestDBAuthzRecursive(t *testing.T) {
t.Parallel()
- q := dbauthz.New(dbmem.New(), &coderdtest.RecordingAuthorizer{
+ db, _ := dbtestutil.NewDB(t)
+ q := dbauthz.New(db, &coderdtest.RecordingAuthorizer{
Wrapped: &coderdtest.FakeAuthorizer{},
}, slog.Make(), coderdtest.AccessControlStorePointer())
actor := rbac.Subject{
@@ -173,16 +197,29 @@ func must[T any](value T, err error) T {
return value
}
+func defaultIPAddress() pqtype.Inet {
+ return pqtype.Inet{
+ IPNet: net.IPNet{
+ IP: net.IPv4(127, 0, 0, 1),
+ Mask: net.IPv4Mask(255, 255, 255, 255),
+ },
+ Valid: true,
+ }
+}
+
func (s *MethodTestSuite) TestAPIKey() {
s.Run("DeleteAPIKeyByID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
key, _ := dbgen.APIKey(s.T(), db, database.APIKey{})
check.Args(key.ID).Asserts(key, policy.ActionDelete).Returns()
}))
s.Run("GetAPIKeyByID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
key, _ := dbgen.APIKey(s.T(), db, database.APIKey{})
check.Args(key.ID).Asserts(key, policy.ActionRead).Returns(key)
}))
s.Run("GetAPIKeyByName", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
key, _ := dbgen.APIKey(s.T(), db, database.APIKey{
TokenName: "marge-cat",
LoginType: database.LoginTypeToken,
@@ -193,6 +230,7 @@ func (s *MethodTestSuite) TestAPIKey() {
}).Asserts(key, policy.ActionRead).Returns(key)
}))
s.Run("GetAPIKeysByLoginType", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
a, _ := dbgen.APIKey(s.T(), db, database.APIKey{LoginType: database.LoginTypePassword})
b, _ := dbgen.APIKey(s.T(), db, database.APIKey{LoginType: database.LoginTypePassword})
_, _ = dbgen.APIKey(s.T(), db, database.APIKey{LoginType: database.LoginTypeGithub})
@@ -201,18 +239,19 @@ func (s *MethodTestSuite) TestAPIKey() {
Returns(slice.New(a, b))
}))
s.Run("GetAPIKeysByUserID", s.Subtest(func(db database.Store, check *expects) {
- idAB := uuid.New()
- idC := uuid.New()
+ u1 := dbgen.User(s.T(), db, database.User{})
+ u2 := dbgen.User(s.T(), db, database.User{})
- keyA, _ := dbgen.APIKey(s.T(), db, database.APIKey{UserID: idAB, LoginType: database.LoginTypeToken})
- keyB, _ := dbgen.APIKey(s.T(), db, database.APIKey{UserID: idAB, LoginType: database.LoginTypeToken})
- _, _ = dbgen.APIKey(s.T(), db, database.APIKey{UserID: idC, LoginType: database.LoginTypeToken})
+ keyA, _ := dbgen.APIKey(s.T(), db, database.APIKey{UserID: u1.ID, LoginType: database.LoginTypeToken, TokenName: "key-a"})
+ keyB, _ := dbgen.APIKey(s.T(), db, database.APIKey{UserID: u1.ID, LoginType: database.LoginTypeToken, TokenName: "key-b"})
+ _, _ = dbgen.APIKey(s.T(), db, database.APIKey{UserID: u2.ID, LoginType: database.LoginTypeToken})
- check.Args(database.GetAPIKeysByUserIDParams{LoginType: database.LoginTypeToken, UserID: idAB}).
+ check.Args(database.GetAPIKeysByUserIDParams{LoginType: database.LoginTypeToken, UserID: u1.ID}).
Asserts(keyA, policy.ActionRead, keyB, policy.ActionRead).
Returns(slice.New(keyA, keyB))
}))
s.Run("GetAPIKeysLastUsedAfter", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
a, _ := dbgen.APIKey(s.T(), db, database.APIKey{LastUsed: time.Now().Add(time.Hour)})
b, _ := dbgen.APIKey(s.T(), db, database.APIKey{LastUsed: time.Now().Add(time.Hour)})
_, _ = dbgen.APIKey(s.T(), db, database.APIKey{LastUsed: time.Now().Add(-time.Hour)})
@@ -222,19 +261,26 @@ func (s *MethodTestSuite) TestAPIKey() {
}))
s.Run("InsertAPIKey", s.Subtest(func(db database.Store, check *expects) {
u := dbgen.User(s.T(), db, database.User{})
+
check.Args(database.InsertAPIKeyParams{
UserID: u.ID,
LoginType: database.LoginTypePassword,
Scope: database.APIKeyScopeAll,
+ IPAddress: defaultIPAddress(),
}).Asserts(rbac.ResourceApiKey.WithOwner(u.ID.String()), policy.ActionCreate)
}))
s.Run("UpdateAPIKeyByID", s.Subtest(func(db database.Store, check *expects) {
- a, _ := dbgen.APIKey(s.T(), db, database.APIKey{})
+ u := dbgen.User(s.T(), db, database.User{})
+ a, _ := dbgen.APIKey(s.T(), db, database.APIKey{UserID: u.ID, IPAddress: defaultIPAddress()})
check.Args(database.UpdateAPIKeyByIDParams{
- ID: a.ID,
+ ID: a.ID,
+ IPAddress: defaultIPAddress(),
+ LastUsed: time.Now(),
+ ExpiresAt: time.Now().Add(time.Hour),
}).Asserts(a, policy.ActionUpdate).Returns()
}))
s.Run("DeleteApplicationConnectAPIKeysByUserID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
a, _ := dbgen.APIKey(s.T(), db, database.APIKey{
Scope: database.APIKeyScopeApplicationConnect,
})
@@ -261,8 +307,10 @@ func (s *MethodTestSuite) TestAPIKey() {
func (s *MethodTestSuite) TestAuditLogs() {
s.Run("InsertAuditLog", s.Subtest(func(db database.Store, check *expects) {
check.Args(database.InsertAuditLogParams{
- ResourceType: database.ResourceTypeOrganization,
- Action: database.AuditActionCreate,
+ ResourceType: database.ResourceTypeOrganization,
+ Action: database.AuditActionCreate,
+ Diff: json.RawMessage("{}"),
+ AdditionalFields: json.RawMessage("{}"),
}).Asserts(rbac.ResourceAuditLog, policy.ActionCreate)
}))
s.Run("GetAuditLogsOffset", s.Subtest(func(db database.Store, check *expects) {
@@ -270,9 +318,10 @@ func (s *MethodTestSuite) TestAuditLogs() {
_ = dbgen.AuditLog(s.T(), db, database.AuditLog{})
check.Args(database.GetAuditLogsOffsetParams{
LimitOpt: 10,
- }).Asserts(rbac.ResourceAuditLog, policy.ActionRead)
+ }).Asserts(rbac.ResourceAuditLog, policy.ActionRead).WithNotAuthorized("nil")
}))
s.Run("GetAuthorizedAuditLogsOffset", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
_ = dbgen.AuditLog(s.T(), db, database.AuditLog{})
_ = dbgen.AuditLog(s.T(), db, database.AuditLog{})
check.Args(database.GetAuditLogsOffsetParams{
@@ -303,10 +352,12 @@ func (s *MethodTestSuite) TestFile() {
func (s *MethodTestSuite) TestGroup() {
s.Run("DeleteGroupByID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
g := dbgen.Group(s.T(), db, database.Group{})
check.Args(g.ID).Asserts(g, policy.ActionDelete).Returns()
}))
s.Run("DeleteGroupMemberFromGroup", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
g := dbgen.Group(s.T(), db, database.Group{})
u := dbgen.User(s.T(), db, database.User{})
m := dbgen.GroupMember(s.T(), db, database.GroupMemberTable{
@@ -319,10 +370,12 @@ func (s *MethodTestSuite) TestGroup() {
}).Asserts(g, policy.ActionUpdate).Returns()
}))
s.Run("GetGroupByID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
g := dbgen.Group(s.T(), db, database.Group{})
check.Args(g.ID).Asserts(g, policy.ActionRead).Returns(g)
}))
s.Run("GetGroupByOrgAndName", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
g := dbgen.Group(s.T(), db, database.Group{})
check.Args(database.GetGroupByOrgAndNameParams{
OrganizationID: g.OrganizationID,
@@ -330,28 +383,33 @@ func (s *MethodTestSuite) TestGroup() {
}).Asserts(g, policy.ActionRead).Returns(g)
}))
s.Run("GetGroupMembersByGroupID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
g := dbgen.Group(s.T(), db, database.Group{})
u := dbgen.User(s.T(), db, database.User{})
gm := dbgen.GroupMember(s.T(), db, database.GroupMemberTable{GroupID: g.ID, UserID: u.ID})
check.Args(g.ID).Asserts(gm, policy.ActionRead)
}))
s.Run("GetGroupMembersCountByGroupID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
g := dbgen.Group(s.T(), db, database.Group{})
check.Args(g.ID).Asserts(g, policy.ActionRead)
}))
s.Run("GetGroupMembers", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
g := dbgen.Group(s.T(), db, database.Group{})
u := dbgen.User(s.T(), db, database.User{})
dbgen.GroupMember(s.T(), db, database.GroupMemberTable{GroupID: g.ID, UserID: u.ID})
check.Asserts(rbac.ResourceSystem, policy.ActionRead)
}))
s.Run("System/GetGroups", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
_ = dbgen.Group(s.T(), db, database.Group{})
check.Args(database.GetGroupsParams{}).
Asserts(rbac.ResourceSystem, policy.ActionRead)
}))
s.Run("GetGroups", s.Subtest(func(db database.Store, check *expects) {
- g := dbgen.Group(s.T(), db, database.Group{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ g := dbgen.Group(s.T(), db, database.Group{OrganizationID: o.ID})
u := dbgen.User(s.T(), db, database.User{})
gm := dbgen.GroupMember(s.T(), db, database.GroupMemberTable{GroupID: g.ID, UserID: u.ID})
check.Args(database.GetGroupsParams{
@@ -373,6 +431,7 @@ func (s *MethodTestSuite) TestGroup() {
}).Asserts(rbac.ResourceGroup.InOrg(o.ID), policy.ActionCreate)
}))
s.Run("InsertGroupMember", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
g := dbgen.Group(s.T(), db, database.Group{})
check.Args(database.InsertGroupMemberParams{
UserID: uuid.New(),
@@ -384,7 +443,6 @@ func (s *MethodTestSuite) TestGroup() {
u1 := dbgen.User(s.T(), db, database.User{})
g1 := dbgen.Group(s.T(), db, database.Group{OrganizationID: o.ID})
g2 := dbgen.Group(s.T(), db, database.Group{OrganizationID: o.ID})
- _ = dbgen.GroupMember(s.T(), db, database.GroupMemberTable{GroupID: g1.ID, UserID: u1.ID})
check.Args(database.InsertUserGroupsByNameParams{
OrganizationID: o.ID,
UserID: u1.ID,
@@ -396,11 +454,16 @@ func (s *MethodTestSuite) TestGroup() {
u1 := dbgen.User(s.T(), db, database.User{})
g1 := dbgen.Group(s.T(), db, database.Group{OrganizationID: o.ID})
g2 := dbgen.Group(s.T(), db, database.Group{OrganizationID: o.ID})
+ g3 := dbgen.Group(s.T(), db, database.Group{OrganizationID: o.ID})
_ = dbgen.GroupMember(s.T(), db, database.GroupMemberTable{GroupID: g1.ID, UserID: u1.ID})
+ returns := slice.New(g2.ID, g3.ID)
+ if !dbtestutil.WillUsePostgres() {
+ returns = slice.New(g1.ID, g2.ID, g3.ID)
+ }
check.Args(database.InsertUserGroupsByIDParams{
UserID: u1.ID,
- GroupIds: slice.New(g1.ID, g2.ID),
- }).Asserts(rbac.ResourceSystem, policy.ActionUpdate).Returns(slice.New(g1.ID, g2.ID))
+ GroupIds: slice.New(g1.ID, g2.ID, g3.ID),
+ }).Asserts(rbac.ResourceSystem, policy.ActionUpdate).Returns(returns)
}))
s.Run("RemoveUserFromAllGroups", s.Subtest(func(db database.Store, check *expects) {
o := dbgen.Organization(s.T(), db, database.Organization{})
@@ -424,6 +487,7 @@ func (s *MethodTestSuite) TestGroup() {
}).Asserts(rbac.ResourceSystem, policy.ActionUpdate).Returns(slice.New(g1.ID, g2.ID))
}))
s.Run("UpdateGroupByID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
g := dbgen.Group(s.T(), db, database.Group{})
check.Args(database.UpdateGroupByIDParams{
ID: g.ID,
@@ -433,6 +497,7 @@ func (s *MethodTestSuite) TestGroup() {
func (s *MethodTestSuite) TestProvisionerJob() {
s.Run("ArchiveUnusedTemplateVersions", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
Type: database.ProvisionerJobTypeTemplateVersionImport,
Error: sql.NullString{
@@ -453,6 +518,7 @@ func (s *MethodTestSuite) TestProvisionerJob() {
}).Asserts(v.RBACObject(tpl), policy.ActionUpdate)
}))
s.Run("UnarchiveTemplateVersion", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
Type: database.ProvisionerJobTypeTemplateVersionImport,
})
@@ -468,14 +534,35 @@ func (s *MethodTestSuite) TestProvisionerJob() {
}).Asserts(v.RBACObject(tpl), policy.ActionUpdate)
}))
s.Run("Build/GetProvisionerJobByID", s.Subtest(func(db database.Store, check *expects) {
- w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ OwnerID: u.ID,
+ OrganizationID: o.ID,
+ TemplateID: tpl.ID,
+ })
j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
Type: database.ProvisionerJobTypeWorkspaceBuild,
})
- _ = dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{JobID: j.ID, WorkspaceID: w.ID})
+ tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
+ TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true},
+ JobID: j.ID,
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ _ = dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{
+ JobID: j.ID,
+ WorkspaceID: w.ID,
+ TemplateVersionID: tv.ID,
+ })
check.Args(j.ID).Asserts(w, policy.ActionRead).Returns(j)
}))
s.Run("TemplateVersion/GetProvisionerJobByID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
Type: database.ProvisionerJobTypeTemplateVersionImport,
})
@@ -487,6 +574,7 @@ func (s *MethodTestSuite) TestProvisionerJob() {
check.Args(j.ID).Asserts(v.RBACObject(tpl), policy.ActionRead).Returns(j)
}))
s.Run("TemplateVersionDryRun/GetProvisionerJobByID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
tpl := dbgen.Template(s.T(), db, database.Template{})
v := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true},
@@ -500,24 +588,59 @@ func (s *MethodTestSuite) TestProvisionerJob() {
check.Args(j.ID).Asserts(v.RBACObject(tpl), policy.ActionRead).Returns(j)
}))
s.Run("Build/UpdateProvisionerJobWithCancelByID", s.Subtest(func(db database.Store, check *expects) {
- tpl := dbgen.Template(s.T(), db, database.Template{AllowUserCancelWorkspaceJobs: true})
- w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{TemplateID: tpl.ID})
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ AllowUserCancelWorkspaceJobs: true,
+ })
+ tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
+ TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true},
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
Type: database.ProvisionerJobTypeWorkspaceBuild,
})
- _ = dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{JobID: j.ID, WorkspaceID: w.ID})
+ _ = dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{
+ JobID: j.ID,
+ WorkspaceID: w.ID,
+ TemplateVersionID: tv.ID,
+ })
check.Args(database.UpdateProvisionerJobWithCancelByIDParams{ID: j.ID}).Asserts(w, policy.ActionUpdate).Returns()
}))
s.Run("BuildFalseCancel/UpdateProvisionerJobWithCancelByID", s.Subtest(func(db database.Store, check *expects) {
- tpl := dbgen.Template(s.T(), db, database.Template{AllowUserCancelWorkspaceJobs: false})
- w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{TemplateID: tpl.ID})
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ AllowUserCancelWorkspaceJobs: false,
+ })
+ tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
+ TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true},
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{TemplateID: tpl.ID, OrganizationID: o.ID, OwnerID: u.ID})
j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
Type: database.ProvisionerJobTypeWorkspaceBuild,
})
- _ = dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{JobID: j.ID, WorkspaceID: w.ID})
+ _ = dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{
+ JobID: j.ID,
+ WorkspaceID: w.ID,
+ TemplateVersionID: tv.ID,
+ })
check.Args(database.UpdateProvisionerJobWithCancelByIDParams{ID: j.ID}).Asserts(w, policy.ActionUpdate).Returns()
}))
s.Run("TemplateVersion/UpdateProvisionerJobWithCancelByID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
Type: database.ProvisionerJobTypeTemplateVersionImport,
})
@@ -530,6 +653,7 @@ func (s *MethodTestSuite) TestProvisionerJob() {
Asserts(v.RBACObject(tpl), []policy.Action{policy.ActionRead, policy.ActionUpdate}).Returns()
}))
s.Run("TemplateVersionNoTemplate/UpdateProvisionerJobWithCancelByID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
Type: database.ProvisionerJobTypeTemplateVersionImport,
})
@@ -541,6 +665,7 @@ func (s *MethodTestSuite) TestProvisionerJob() {
Asserts(v.RBACObjectNoTemplate(), []policy.Action{policy.ActionRead, policy.ActionUpdate}).Returns()
}))
s.Run("TemplateVersionDryRun/UpdateProvisionerJobWithCancelByID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
tpl := dbgen.Template(s.T(), db, database.Template{})
v := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true},
@@ -560,11 +685,30 @@ func (s *MethodTestSuite) TestProvisionerJob() {
check.Args([]uuid.UUID{a.ID, b.ID}).Asserts().Returns(slice.New(a, b))
}))
s.Run("GetProvisionerLogsAfterID", s.Subtest(func(db database.Store, check *expects) {
- w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ TemplateID: tpl.ID,
+ })
+ tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
+ TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true},
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
Type: database.ProvisionerJobTypeWorkspaceBuild,
})
- _ = dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{JobID: j.ID, WorkspaceID: w.ID})
+ _ = dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{
+ JobID: j.ID,
+ WorkspaceID: w.ID,
+ TemplateVersionID: tv.ID,
+ })
check.Args(database.GetProvisionerLogsAfterIDParams{
JobID: j.ID,
}).Asserts(w, policy.ActionRead).Returns([]database.ProvisionerJobLog{})
@@ -605,7 +749,8 @@ func (s *MethodTestSuite) TestLicense() {
check.Args(l.ID).Asserts(l, policy.ActionDelete)
}))
s.Run("GetDeploymentID", s.Subtest(func(db database.Store, check *expects) {
- check.Args().Asserts().Returns("")
+ db.InsertDeploymentID(context.Background(), "value")
+ check.Args().Asserts().Returns("value")
}))
s.Run("GetDefaultProxyConfig", s.Subtest(func(db database.Store, check *expects) {
check.Args().Asserts().Returns(database.GetDefaultProxyConfigRow{
@@ -675,10 +820,12 @@ func (s *MethodTestSuite) TestOrganization() {
s.Run("GetOrganizationIDsByMemberIDs", s.Subtest(func(db database.Store, check *expects) {
oa := dbgen.Organization(s.T(), db, database.Organization{})
ob := dbgen.Organization(s.T(), db, database.Organization{})
- ma := dbgen.OrganizationMember(s.T(), db, database.OrganizationMember{OrganizationID: oa.ID})
- mb := dbgen.OrganizationMember(s.T(), db, database.OrganizationMember{OrganizationID: ob.ID})
+ ua := dbgen.User(s.T(), db, database.User{})
+ ub := dbgen.User(s.T(), db, database.User{})
+ ma := dbgen.OrganizationMember(s.T(), db, database.OrganizationMember{OrganizationID: oa.ID, UserID: ua.ID})
+ mb := dbgen.OrganizationMember(s.T(), db, database.OrganizationMember{OrganizationID: ob.ID, UserID: ub.ID})
check.Args([]uuid.UUID{ma.UserID, mb.UserID}).
- Asserts(rbac.ResourceUserObject(ma.UserID), policy.ActionRead, rbac.ResourceUserObject(mb.UserID), policy.ActionRead)
+ Asserts(rbac.ResourceUserObject(ma.UserID), policy.ActionRead, rbac.ResourceUserObject(mb.UserID), policy.ActionRead).OutOfOrder()
}))
s.Run("GetOrganizations", s.Subtest(func(db database.Store, check *expects) {
def, _ := db.GetDefaultOrganization(context.Background())
@@ -717,6 +864,11 @@ func (s *MethodTestSuite) TestOrganization() {
u := dbgen.User(s.T(), db, database.User{})
member := dbgen.OrganizationMember(s.T(), db, database.OrganizationMember{UserID: u.ID, OrganizationID: o.ID})
+ cancelledErr := "fetch object: context canceled"
+ if !dbtestutil.WillUsePostgres() {
+ cancelledErr = sql.ErrNoRows.Error()
+ }
+
check.Args(database.DeleteOrganizationMemberParams{
OrganizationID: o.ID,
UserID: u.ID,
@@ -724,10 +876,9 @@ func (s *MethodTestSuite) TestOrganization() {
// Reads the org member before it tries to delete it
member, policy.ActionRead,
member, policy.ActionDelete).
- // SQL Filter returns a 404
WithNotAuthorized("no rows").
- WithCancelled("no rows").
- Errors(sql.ErrNoRows)
+ WithCancelled(cancelledErr).
+ ErrorsWithInMemDB(sql.ErrNoRows)
}))
s.Run("UpdateOrganization", s.Subtest(func(db database.Store, check *expects) {
o := dbgen.Organization(s.T(), db, database.Organization{
@@ -773,13 +924,18 @@ func (s *MethodTestSuite) TestOrganization() {
out := mem
out.Roles = []string{}
+ cancelledErr := "fetch object: context canceled"
+ if !dbtestutil.WillUsePostgres() {
+ cancelledErr = sql.ErrNoRows.Error()
+ }
+
check.Args(database.UpdateMemberRolesParams{
GrantedRoles: []string{},
UserID: u.ID,
OrgID: o.ID,
}).
WithNotAuthorized(sql.ErrNoRows.Error()).
- WithCancelled(sql.ErrNoRows.Error()).
+ WithCancelled(cancelledErr).
Asserts(
mem, policy.ActionRead,
rbac.ResourceAssignOrgRole.InOrg(o.ID), policy.ActionAssign, // org-mem
@@ -832,10 +988,12 @@ func (s *MethodTestSuite) TestTemplate() {
s.Run("GetPreviousTemplateVersion", s.Subtest(func(db database.Store, check *expects) {
tvid := uuid.New()
now := time.Now()
+ u := dbgen.User(s.T(), db, database.User{})
o1 := dbgen.Organization(s.T(), db, database.Organization{})
t1 := dbgen.Template(s.T(), db, database.Template{
OrganizationID: o1.ID,
ActiveVersionID: tvid,
+ CreatedBy: u.ID,
})
_ = dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
CreatedAt: now.Add(-time.Hour),
@@ -843,12 +1001,14 @@ func (s *MethodTestSuite) TestTemplate() {
Name: t1.Name,
OrganizationID: o1.ID,
TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true},
+ CreatedBy: u.ID,
})
b := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
CreatedAt: now.Add(-2 * time.Hour),
- Name: t1.Name,
+ Name: t1.Name + "b",
OrganizationID: o1.ID,
TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true},
+ CreatedBy: u.ID,
})
check.Args(database.GetPreviousTemplateVersionParams{
Name: t1.Name,
@@ -857,10 +1017,12 @@ func (s *MethodTestSuite) TestTemplate() {
}).Asserts(t1, policy.ActionRead).Returns(b)
}))
s.Run("GetTemplateByID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
t1 := dbgen.Template(s.T(), db, database.Template{})
check.Args(t1.ID).Asserts(t1, policy.ActionRead).Returns(t1)
}))
s.Run("GetTemplateByOrganizationAndName", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
o1 := dbgen.Organization(s.T(), db, database.Organization{})
t1 := dbgen.Template(s.T(), db, database.Template{
OrganizationID: o1.ID,
@@ -871,6 +1033,7 @@ func (s *MethodTestSuite) TestTemplate() {
}).Asserts(t1, policy.ActionRead).Returns(t1)
}))
s.Run("GetTemplateVersionByJobID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
t1 := dbgen.Template(s.T(), db, database.Template{})
tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true},
@@ -878,6 +1041,7 @@ func (s *MethodTestSuite) TestTemplate() {
check.Args(tv.JobID).Asserts(t1, policy.ActionRead).Returns(tv)
}))
s.Run("GetTemplateVersionByTemplateIDAndName", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
t1 := dbgen.Template(s.T(), db, database.Template{})
tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true},
@@ -888,6 +1052,7 @@ func (s *MethodTestSuite) TestTemplate() {
}).Asserts(t1, policy.ActionRead).Returns(tv)
}))
s.Run("GetTemplateVersionParameters", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
t1 := dbgen.Template(s.T(), db, database.Template{})
tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true},
@@ -895,6 +1060,7 @@ func (s *MethodTestSuite) TestTemplate() {
check.Args(tv.ID).Asserts(t1, policy.ActionRead).Returns([]database.TemplateVersionParameter{})
}))
s.Run("GetTemplateVersionVariables", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
t1 := dbgen.Template(s.T(), db, database.Template{})
tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true},
@@ -905,6 +1071,7 @@ func (s *MethodTestSuite) TestTemplate() {
check.Args(tv.ID).Asserts(t1, policy.ActionRead).Returns([]database.TemplateVersionVariable{tvv1})
}))
s.Run("GetTemplateVersionWorkspaceTags", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
t1 := dbgen.Template(s.T(), db, database.Template{})
tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true},
@@ -915,14 +1082,17 @@ func (s *MethodTestSuite) TestTemplate() {
check.Args(tv.ID).Asserts(t1, policy.ActionRead).Returns([]database.TemplateVersionWorkspaceTag{wt1})
}))
s.Run("GetTemplateGroupRoles", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
t1 := dbgen.Template(s.T(), db, database.Template{})
check.Args(t1.ID).Asserts(t1, policy.ActionUpdate)
}))
s.Run("GetTemplateUserRoles", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
t1 := dbgen.Template(s.T(), db, database.Template{})
check.Args(t1.ID).Asserts(t1, policy.ActionUpdate)
}))
s.Run("GetTemplateVersionByID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
t1 := dbgen.Template(s.T(), db, database.Template{})
tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true},
@@ -930,6 +1100,7 @@ func (s *MethodTestSuite) TestTemplate() {
check.Args(tv.ID).Asserts(t1, policy.ActionRead).Returns(tv)
}))
s.Run("GetTemplateVersionsByTemplateID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
t1 := dbgen.Template(s.T(), db, database.Template{})
a := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true},
@@ -943,6 +1114,7 @@ func (s *MethodTestSuite) TestTemplate() {
Returns(slice.New(a, b))
}))
s.Run("GetTemplateVersionsCreatedAfter", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
now := time.Now()
t1 := dbgen.Template(s.T(), db, database.Template{})
_ = dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
@@ -956,12 +1128,18 @@ func (s *MethodTestSuite) TestTemplate() {
check.Args(now.Add(-time.Hour)).Asserts(rbac.ResourceTemplate.All(), policy.ActionRead)
}))
s.Run("GetTemplatesWithFilter", s.Subtest(func(db database.Store, check *expects) {
- a := dbgen.Template(s.T(), db, database.Template{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ u := dbgen.User(s.T(), db, database.User{})
+ a := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
// No asserts because SQLFilter.
check.Args(database.GetTemplatesWithFilterParams{}).
Asserts().Returns(slice.New(a))
}))
s.Run("GetAuthorizedTemplates", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
a := dbgen.Template(s.T(), db, database.Template{})
// No asserts because SQLFilter.
check.Args(database.GetTemplatesWithFilterParams{}, emptyPreparedAuthorized{}).
@@ -969,6 +1147,7 @@ func (s *MethodTestSuite) TestTemplate() {
Returns(slice.New(a))
}))
s.Run("InsertTemplate", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
orgID := uuid.New()
check.Args(database.InsertTemplateParams{
Provisioner: "echo",
@@ -977,6 +1156,7 @@ func (s *MethodTestSuite) TestTemplate() {
}).Asserts(rbac.ResourceTemplate.InOrg(orgID), policy.ActionCreate)
}))
s.Run("InsertTemplateVersion", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
t1 := dbgen.Template(s.T(), db, database.Template{})
check.Args(database.InsertTemplateVersionParams{
TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true},
@@ -984,46 +1164,54 @@ func (s *MethodTestSuite) TestTemplate() {
}).Asserts(t1, policy.ActionRead, t1, policy.ActionCreate)
}))
s.Run("SoftDeleteTemplateByID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
t1 := dbgen.Template(s.T(), db, database.Template{})
check.Args(t1.ID).Asserts(t1, policy.ActionDelete)
}))
s.Run("UpdateTemplateACLByID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
t1 := dbgen.Template(s.T(), db, database.Template{})
check.Args(database.UpdateTemplateACLByIDParams{
ID: t1.ID,
}).Asserts(t1, policy.ActionCreate)
}))
s.Run("UpdateTemplateAccessControlByID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
t1 := dbgen.Template(s.T(), db, database.Template{})
check.Args(database.UpdateTemplateAccessControlByIDParams{
ID: t1.ID,
}).Asserts(t1, policy.ActionUpdate)
}))
s.Run("UpdateTemplateScheduleByID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
t1 := dbgen.Template(s.T(), db, database.Template{})
check.Args(database.UpdateTemplateScheduleByIDParams{
ID: t1.ID,
}).Asserts(t1, policy.ActionUpdate)
}))
s.Run("UpdateTemplateWorkspacesLastUsedAt", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
t1 := dbgen.Template(s.T(), db, database.Template{})
check.Args(database.UpdateTemplateWorkspacesLastUsedAtParams{
TemplateID: t1.ID,
}).Asserts(t1, policy.ActionUpdate)
}))
s.Run("UpdateWorkspacesDormantDeletingAtByTemplateID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
t1 := dbgen.Template(s.T(), db, database.Template{})
check.Args(database.UpdateWorkspacesDormantDeletingAtByTemplateIDParams{
TemplateID: t1.ID,
}).Asserts(t1, policy.ActionUpdate)
}))
s.Run("UpdateWorkspacesTTLByTemplateID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
t1 := dbgen.Template(s.T(), db, database.Template{})
check.Args(database.UpdateWorkspacesTTLByTemplateIDParams{
TemplateID: t1.ID,
}).Asserts(t1, policy.ActionUpdate)
}))
s.Run("UpdateTemplateActiveVersionByID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
t1 := dbgen.Template(s.T(), db, database.Template{
ActiveVersionID: uuid.New(),
})
@@ -1037,6 +1225,7 @@ func (s *MethodTestSuite) TestTemplate() {
}).Asserts(t1, policy.ActionUpdate).Returns()
}))
s.Run("UpdateTemplateDeletedByID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
t1 := dbgen.Template(s.T(), db, database.Template{})
check.Args(database.UpdateTemplateDeletedByIDParams{
ID: t1.ID,
@@ -1044,6 +1233,7 @@ func (s *MethodTestSuite) TestTemplate() {
}).Asserts(t1, policy.ActionDelete).Returns()
}))
s.Run("UpdateTemplateMetaByID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
t1 := dbgen.Template(s.T(), db, database.Template{})
check.Args(database.UpdateTemplateMetaByIDParams{
ID: t1.ID,
@@ -1051,6 +1241,7 @@ func (s *MethodTestSuite) TestTemplate() {
}).Asserts(t1, policy.ActionUpdate)
}))
s.Run("UpdateTemplateVersionByID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
t1 := dbgen.Template(s.T(), db, database.Template{})
tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true},
@@ -1063,6 +1254,7 @@ func (s *MethodTestSuite) TestTemplate() {
}).Asserts(t1, policy.ActionUpdate)
}))
s.Run("UpdateTemplateVersionDescriptionByJobID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
jobID := uuid.New()
t1 := dbgen.Template(s.T(), db, database.Template{})
_ = dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
@@ -1076,13 +1268,21 @@ func (s *MethodTestSuite) TestTemplate() {
}))
s.Run("UpdateTemplateVersionExternalAuthProvidersByJobID", s.Subtest(func(db database.Store, check *expects) {
jobID := uuid.New()
- t1 := dbgen.Template(s.T(), db, database.Template{})
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ t1 := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
_ = dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
- TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true},
- JobID: jobID,
+ TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true},
+ CreatedBy: u.ID,
+ OrganizationID: o.ID,
+ JobID: jobID,
})
check.Args(database.UpdateTemplateVersionExternalAuthProvidersByJobIDParams{
- JobID: jobID,
+ JobID: jobID,
+ ExternalAuthProviders: json.RawMessage("{}"),
}).Asserts(t1, policy.ActionUpdate).Returns()
}))
s.Run("GetTemplateInsights", s.Subtest(func(db database.Store, check *expects) {
@@ -1092,13 +1292,19 @@ func (s *MethodTestSuite) TestTemplate() {
check.Args(database.GetUserLatencyInsightsParams{}).Asserts(rbac.ResourceTemplate, policy.ActionViewInsights)
}))
s.Run("GetUserActivityInsights", s.Subtest(func(db database.Store, check *expects) {
- check.Args(database.GetUserActivityInsightsParams{}).Asserts(rbac.ResourceTemplate, policy.ActionViewInsights).Errors(sql.ErrNoRows)
+ check.Args(database.GetUserActivityInsightsParams{}).Asserts(rbac.ResourceTemplate, policy.ActionViewInsights).
+ ErrorsWithInMemDB(sql.ErrNoRows).
+ Returns([]database.GetUserActivityInsightsRow{})
}))
s.Run("GetTemplateParameterInsights", s.Subtest(func(db database.Store, check *expects) {
check.Args(database.GetTemplateParameterInsightsParams{}).Asserts(rbac.ResourceTemplate, policy.ActionViewInsights)
}))
s.Run("GetTemplateInsightsByInterval", s.Subtest(func(db database.Store, check *expects) {
- check.Args(database.GetTemplateInsightsByIntervalParams{}).Asserts(rbac.ResourceTemplate, policy.ActionViewInsights)
+ check.Args(database.GetTemplateInsightsByIntervalParams{
+ IntervalDays: 7,
+ StartTime: dbtime.Now().Add(-time.Hour * 24 * 7),
+ EndTime: dbtime.Now(),
+ }).Asserts(rbac.ResourceTemplate, policy.ActionViewInsights)
}))
s.Run("GetTemplateInsightsByTemplate", s.Subtest(func(db database.Store, check *expects) {
check.Args(database.GetTemplateInsightsByTemplateParams{}).Asserts(rbac.ResourceTemplate, policy.ActionViewInsights)
@@ -1110,7 +1316,9 @@ func (s *MethodTestSuite) TestTemplate() {
check.Args(database.GetTemplateAppInsightsByTemplateParams{}).Asserts(rbac.ResourceTemplate, policy.ActionViewInsights)
}))
s.Run("GetTemplateUsageStats", s.Subtest(func(db database.Store, check *expects) {
- check.Args(database.GetTemplateUsageStatsParams{}).Asserts(rbac.ResourceTemplate, policy.ActionViewInsights).Errors(sql.ErrNoRows)
+ check.Args(database.GetTemplateUsageStatsParams{}).Asserts(rbac.ResourceTemplate, policy.ActionViewInsights).
+ ErrorsWithInMemDB(sql.ErrNoRows).
+ Returns([]database.TemplateUsageStat{})
}))
s.Run("UpsertTemplateUsageStats", s.Subtest(func(db database.Store, check *expects) {
check.Asserts(rbac.ResourceSystem, policy.ActionUpdate)
@@ -1119,6 +1327,7 @@ func (s *MethodTestSuite) TestTemplate() {
func (s *MethodTestSuite) TestUser() {
s.Run("GetAuthorizedUsers", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
dbgen.User(s.T(), db, database.User{})
// No asserts because SQLFilter.
check.Args(database.GetUsersParams{}, emptyPreparedAuthorized{}).
@@ -1161,6 +1370,7 @@ func (s *MethodTestSuite) TestUser() {
Returns(slice.New(a, b))
}))
s.Run("GetUsers", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
dbgen.User(s.T(), db, database.User{Username: "GetUsers-a-user"})
dbgen.User(s.T(), db, database.User{Username: "GetUsers-b-user"})
check.Args(database.GetUsersParams{}).
@@ -1171,6 +1381,7 @@ func (s *MethodTestSuite) TestUser() {
check.Args(database.InsertUserParams{
ID: uuid.New(),
LoginType: database.LoginTypePassword,
+ RBACRoles: []string{},
}).Asserts(rbac.ResourceAssignRole, policy.ActionAssign, rbac.ResourceUser, policy.ActionCreate)
}))
s.Run("InsertUserLink", s.Subtest(func(db database.Store, check *expects) {
@@ -1199,7 +1410,9 @@ func (s *MethodTestSuite) TestUser() {
s.Run("UpdateUserHashedOneTimePasscode", s.Subtest(func(db database.Store, check *expects) {
u := dbgen.User(s.T(), db, database.User{})
check.Args(database.UpdateUserHashedOneTimePasscodeParams{
- ID: u.ID,
+ ID: u.ID,
+ HashedOneTimePasscode: []byte{},
+ OneTimePasscodeExpiresAt: sql.NullTime{Time: u.CreatedAt, Valid: true},
}).Asserts(rbac.ResourceSystem, policy.ActionUpdate).Returns()
}))
s.Run("UpdateUserQuietHoursSchedule", s.Subtest(func(db database.Store, check *expects) {
@@ -1254,10 +1467,12 @@ func (s *MethodTestSuite) TestUser() {
}).Asserts(u, policy.ActionUpdate).Returns(u)
}))
s.Run("DeleteGitSSHKey", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
key := dbgen.GitSSHKey(s.T(), db, database.GitSSHKey{})
check.Args(key.UserID).Asserts(rbac.ResourceUserObject(key.UserID), policy.ActionUpdatePersonal).Returns()
}))
s.Run("GetGitSSHKey", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
key := dbgen.GitSSHKey(s.T(), db, database.GitSSHKey{})
check.Args(key.UserID).Asserts(rbac.ResourceUserObject(key.UserID), policy.ActionReadPersonal).Returns(key)
}))
@@ -1268,6 +1483,7 @@ func (s *MethodTestSuite) TestUser() {
}).Asserts(u, policy.ActionUpdatePersonal)
}))
s.Run("UpdateGitSSHKey", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
key := dbgen.GitSSHKey(s.T(), db, database.GitSSHKey{})
check.Args(database.UpdateGitSSHKeyParams{
UserID: key.UserID,
@@ -1310,6 +1526,7 @@ func (s *MethodTestSuite) TestUser() {
}).Asserts(rbac.ResourceUserObject(link.UserID), policy.ActionUpdatePersonal).Returns(link)
}))
s.Run("UpdateUserLink", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
link := dbgen.UserLink(s.T(), db, database.UserLink{})
check.Args(database.UpdateUserLinkParams{
OAuthAccessToken: link.OAuthAccessToken,
@@ -1367,6 +1584,7 @@ func (s *MethodTestSuite) TestUser() {
rbac.ResourceAssignRole, policy.ActionDelete)
}))
s.Run("Blank/UpdateCustomRole", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
customRole := dbgen.CustomRole(s.T(), db, database.CustomRole{})
// Blank is no perms in the role
check.Args(database.UpdateCustomRoleParams{
@@ -1375,7 +1593,7 @@ func (s *MethodTestSuite) TestUser() {
SitePermissions: nil,
OrgPermissions: nil,
UserPermissions: nil,
- }).Asserts(rbac.ResourceAssignRole, policy.ActionUpdate)
+ }).Asserts(rbac.ResourceAssignRole, policy.ActionUpdate).ErrorsWithPG(sql.ErrNoRows)
}))
s.Run("SitePermissions/UpdateCustomRole", s.Subtest(func(db database.Store, check *expects) {
customRole := dbgen.CustomRole(s.T(), db, database.CustomRole{
@@ -1406,7 +1624,7 @@ func (s *MethodTestSuite) TestUser() {
rbac.ResourceTemplate, policy.ActionViewInsights,
rbac.ResourceWorkspace.WithOwner(testActorID.String()), policy.ActionRead,
- )
+ ).ErrorsWithPG(sql.ErrNoRows)
}))
s.Run("OrgPermissions/UpdateCustomRole", s.Subtest(func(db database.Store, check *expects) {
orgID := uuid.New()
@@ -1494,22 +1712,29 @@ func (s *MethodTestSuite) TestUser() {
func (s *MethodTestSuite) TestWorkspace() {
s.Run("GetWorkspaceByID", s.Subtest(func(db database.Store, check *expects) {
- ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ OwnerID: u.ID,
+ OrganizationID: o.ID,
+ TemplateID: tpl.ID,
+ })
check.Args(ws.ID).Asserts(ws, policy.ActionRead)
}))
- s.Run("GetWorkspaces", s.Subtest(func(db database.Store, check *expects) {
- _ = dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
- _ = dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
+ s.Run("GetWorkspaces", s.Subtest(func(_ database.Store, check *expects) {
// No asserts here because SQLFilter.
check.Args(database.GetWorkspacesParams{}).Asserts()
}))
- s.Run("GetAuthorizedWorkspaces", s.Subtest(func(db database.Store, check *expects) {
- _ = dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
- _ = dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
+ s.Run("GetAuthorizedWorkspaces", s.Subtest(func(_ database.Store, check *expects) {
// No asserts here because SQLFilter.
check.Args(database.GetWorkspacesParams{}, emptyPreparedAuthorized{}).Asserts()
}))
s.Run("GetWorkspacesAndAgentsByOwnerID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, JobID: uuid.New()})
_ = dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{ID: build.JobID, Type: database.ProvisionerJobTypeWorkspaceBuild})
@@ -1519,6 +1744,7 @@ func (s *MethodTestSuite) TestWorkspace() {
check.Args(ws.OwnerID).Asserts()
}))
s.Run("GetAuthorizedWorkspacesAndAgentsByOwnerID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, JobID: uuid.New()})
_ = dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{ID: build.JobID, Type: database.ProvisionerJobTypeWorkspaceBuild})
@@ -1528,37 +1754,116 @@ func (s *MethodTestSuite) TestWorkspace() {
check.Args(ws.OwnerID, emptyPreparedAuthorized{}).Asserts()
}))
s.Run("GetLatestWorkspaceBuildByWorkspaceID", s.Subtest(func(db database.Store, check *expects) {
- ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
- b := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID})
- check.Args(ws.ID).Asserts(ws, policy.ActionRead).Returns(b)
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
+ TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true},
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
+ j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
+ Type: database.ProvisionerJobTypeWorkspaceBuild,
+ })
+ b := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{
+ JobID: j.ID,
+ WorkspaceID: w.ID,
+ TemplateVersionID: tv.ID,
+ })
+ check.Args(w.ID).Asserts(w, policy.ActionRead).Returns(b)
}))
s.Run("GetWorkspaceAgentByID", s.Subtest(func(db database.Store, check *expects) {
- tpl := dbgen.Template(s.T(), db, database.Template{})
- ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
- TemplateID: tpl.ID,
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
})
- build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, JobID: uuid.New()})
- res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: build.JobID})
+ tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
+ TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true},
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
+ j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
+ Type: database.ProvisionerJobTypeWorkspaceBuild,
+ })
+ b := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{
+ JobID: j.ID,
+ WorkspaceID: w.ID,
+ TemplateVersionID: tv.ID,
+ })
+ res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: b.JobID})
agt := dbgen.WorkspaceAgent(s.T(), db, database.WorkspaceAgent{ResourceID: res.ID})
- check.Args(agt.ID).Asserts(ws, policy.ActionRead).Returns(agt)
+ check.Args(agt.ID).Asserts(w, policy.ActionRead).Returns(agt)
}))
s.Run("GetWorkspaceAgentLifecycleStateByID", s.Subtest(func(db database.Store, check *expects) {
- tpl := dbgen.Template(s.T(), db, database.Template{})
- ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
- TemplateID: tpl.ID,
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
})
- build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, JobID: uuid.New()})
- res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: build.JobID})
+ tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
+ TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true},
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
+ j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
+ Type: database.ProvisionerJobTypeWorkspaceBuild,
+ })
+ b := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{
+ JobID: j.ID,
+ WorkspaceID: w.ID,
+ TemplateVersionID: tv.ID,
+ })
+ res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: b.JobID})
agt := dbgen.WorkspaceAgent(s.T(), db, database.WorkspaceAgent{ResourceID: res.ID})
- check.Args(agt.ID).Asserts(ws, policy.ActionRead)
+ check.Args(agt.ID).Asserts(w, policy.ActionRead)
}))
s.Run("GetWorkspaceAgentMetadata", s.Subtest(func(db database.Store, check *expects) {
- tpl := dbgen.Template(s.T(), db, database.Template{})
- ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
- TemplateID: tpl.ID,
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
})
- build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, JobID: uuid.New()})
- res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: build.JobID})
+ tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
+ TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true},
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
+ j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
+ Type: database.ProvisionerJobTypeWorkspaceBuild,
+ })
+ b := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{
+ JobID: j.ID,
+ WorkspaceID: w.ID,
+ TemplateVersionID: tv.ID,
+ })
+ res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: b.JobID})
agt := dbgen.WorkspaceAgent(s.T(), db, database.WorkspaceAgent{ResourceID: res.ID})
_ = db.InsertWorkspaceAgentMetadata(context.Background(), database.InsertWorkspaceAgentMetadataParams{
WorkspaceAgentID: agt.ID,
@@ -1568,77 +1873,191 @@ func (s *MethodTestSuite) TestWorkspace() {
check.Args(database.GetWorkspaceAgentMetadataParams{
WorkspaceAgentID: agt.ID,
Keys: []string{"test"},
- }).Asserts(ws, policy.ActionRead)
+ }).Asserts(w, policy.ActionRead)
}))
s.Run("GetWorkspaceAgentByInstanceID", s.Subtest(func(db database.Store, check *expects) {
- tpl := dbgen.Template(s.T(), db, database.Template{})
- ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
- TemplateID: tpl.ID,
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
})
- build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, JobID: uuid.New()})
- res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: build.JobID})
+ tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
+ TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true},
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
+ j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
+ Type: database.ProvisionerJobTypeWorkspaceBuild,
+ })
+ b := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{
+ JobID: j.ID,
+ WorkspaceID: w.ID,
+ TemplateVersionID: tv.ID,
+ })
+ res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: b.JobID})
agt := dbgen.WorkspaceAgent(s.T(), db, database.WorkspaceAgent{ResourceID: res.ID})
- check.Args(agt.AuthInstanceID.String).Asserts(ws, policy.ActionRead).Returns(agt)
+ check.Args(agt.AuthInstanceID.String).Asserts(w, policy.ActionRead).Returns(agt)
}))
s.Run("UpdateWorkspaceAgentLifecycleStateByID", s.Subtest(func(db database.Store, check *expects) {
- tpl := dbgen.Template(s.T(), db, database.Template{})
- ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
- TemplateID: tpl.ID,
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
})
- build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, JobID: uuid.New()})
- res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: build.JobID})
+ tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
+ TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true},
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
+ j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
+ Type: database.ProvisionerJobTypeWorkspaceBuild,
+ })
+ b := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{
+ JobID: j.ID,
+ WorkspaceID: w.ID,
+ TemplateVersionID: tv.ID,
+ })
+ res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: b.JobID})
agt := dbgen.WorkspaceAgent(s.T(), db, database.WorkspaceAgent{ResourceID: res.ID})
check.Args(database.UpdateWorkspaceAgentLifecycleStateByIDParams{
ID: agt.ID,
LifecycleState: database.WorkspaceAgentLifecycleStateCreated,
- }).Asserts(ws, policy.ActionUpdate).Returns()
+ }).Asserts(w, policy.ActionUpdate).Returns()
}))
s.Run("UpdateWorkspaceAgentMetadata", s.Subtest(func(db database.Store, check *expects) {
- tpl := dbgen.Template(s.T(), db, database.Template{})
- ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
- TemplateID: tpl.ID,
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
})
- build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, JobID: uuid.New()})
- res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: build.JobID})
+ tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
+ TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true},
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
+ j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
+ Type: database.ProvisionerJobTypeWorkspaceBuild,
+ })
+ b := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{
+ JobID: j.ID,
+ WorkspaceID: w.ID,
+ TemplateVersionID: tv.ID,
+ })
+ res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: b.JobID})
agt := dbgen.WorkspaceAgent(s.T(), db, database.WorkspaceAgent{ResourceID: res.ID})
check.Args(database.UpdateWorkspaceAgentMetadataParams{
WorkspaceAgentID: agt.ID,
- }).Asserts(ws, policy.ActionUpdate).Returns()
+ }).Asserts(w, policy.ActionUpdate).Returns()
}))
s.Run("UpdateWorkspaceAgentLogOverflowByID", s.Subtest(func(db database.Store, check *expects) {
- tpl := dbgen.Template(s.T(), db, database.Template{})
- ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
- TemplateID: tpl.ID,
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
})
- build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, JobID: uuid.New()})
- res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: build.JobID})
- agt := dbgen.WorkspaceAgent(s.T(), db, database.WorkspaceAgent{ResourceID: res.ID})
- check.Args(database.UpdateWorkspaceAgentLogOverflowByIDParams{
- ID: agt.ID,
+ tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
+ TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true},
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
+ j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
+ Type: database.ProvisionerJobTypeWorkspaceBuild,
+ })
+ b := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{
+ JobID: j.ID,
+ WorkspaceID: w.ID,
+ TemplateVersionID: tv.ID,
+ })
+ res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: b.JobID})
+ agt := dbgen.WorkspaceAgent(s.T(), db, database.WorkspaceAgent{ResourceID: res.ID})
+ check.Args(database.UpdateWorkspaceAgentLogOverflowByIDParams{
+ ID: agt.ID,
LogsOverflowed: true,
- }).Asserts(ws, policy.ActionUpdate).Returns()
+ }).Asserts(w, policy.ActionUpdate).Returns()
}))
s.Run("UpdateWorkspaceAgentStartupByID", s.Subtest(func(db database.Store, check *expects) {
- tpl := dbgen.Template(s.T(), db, database.Template{})
- ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
- TemplateID: tpl.ID,
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
})
- build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, JobID: uuid.New()})
- res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: build.JobID})
+ tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
+ TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true},
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
+ j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
+ Type: database.ProvisionerJobTypeWorkspaceBuild,
+ })
+ b := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{
+ JobID: j.ID,
+ WorkspaceID: w.ID,
+ TemplateVersionID: tv.ID,
+ })
+ res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: b.JobID})
agt := dbgen.WorkspaceAgent(s.T(), db, database.WorkspaceAgent{ResourceID: res.ID})
check.Args(database.UpdateWorkspaceAgentStartupByIDParams{
ID: agt.ID,
Subsystems: []database.WorkspaceAgentSubsystem{
database.WorkspaceAgentSubsystemEnvbox,
},
- }).Asserts(ws, policy.ActionUpdate).Returns()
+ }).Asserts(w, policy.ActionUpdate).Returns()
}))
s.Run("GetWorkspaceAgentLogsAfter", s.Subtest(func(db database.Store, check *expects) {
- tpl := dbgen.Template(s.T(), db, database.Template{})
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
+ TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true},
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
- TemplateID: tpl.ID,
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
+ j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
+ Type: database.ProvisionerJobTypeWorkspaceBuild,
+ })
+ build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{
+ JobID: j.ID,
+ WorkspaceID: ws.ID,
+ TemplateVersionID: tv.ID,
})
- build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, JobID: uuid.New()})
res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: build.JobID})
agt := dbgen.WorkspaceAgent(s.T(), db, database.WorkspaceAgent{ResourceID: res.ID})
check.Args(database.GetWorkspaceAgentLogsAfterParams{
@@ -1646,11 +2065,30 @@ func (s *MethodTestSuite) TestWorkspace() {
}).Asserts(ws, policy.ActionRead).Returns([]database.WorkspaceAgentLog{})
}))
s.Run("GetWorkspaceAppByAgentIDAndSlug", s.Subtest(func(db database.Store, check *expects) {
- tpl := dbgen.Template(s.T(), db, database.Template{})
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
+ TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true},
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
- TemplateID: tpl.ID,
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
+ j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
+ Type: database.ProvisionerJobTypeWorkspaceBuild,
+ })
+ build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{
+ JobID: j.ID,
+ WorkspaceID: ws.ID,
+ TemplateVersionID: tv.ID,
})
- build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, JobID: uuid.New()})
res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: build.JobID})
agt := dbgen.WorkspaceAgent(s.T(), db, database.WorkspaceAgent{ResourceID: res.ID})
app := dbgen.WorkspaceApp(s.T(), db, database.WorkspaceApp{AgentID: agt.ID})
@@ -1661,11 +2099,30 @@ func (s *MethodTestSuite) TestWorkspace() {
}).Asserts(ws, policy.ActionRead).Returns(app)
}))
s.Run("GetWorkspaceAppsByAgentID", s.Subtest(func(db database.Store, check *expects) {
- tpl := dbgen.Template(s.T(), db, database.Template{})
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
+ TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true},
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
- TemplateID: tpl.ID,
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
+ j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
+ Type: database.ProvisionerJobTypeWorkspaceBuild,
+ })
+ build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{
+ JobID: j.ID,
+ WorkspaceID: ws.ID,
+ TemplateVersionID: tv.ID,
})
- build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, JobID: uuid.New()})
res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: build.JobID})
agt := dbgen.WorkspaceAgent(s.T(), db, database.WorkspaceAgent{ResourceID: res.ID})
a := dbgen.WorkspaceApp(s.T(), db, database.WorkspaceApp{AgentID: agt.ID})
@@ -1674,58 +2131,234 @@ func (s *MethodTestSuite) TestWorkspace() {
check.Args(agt.ID).Asserts(ws, policy.ActionRead).Returns(slice.New(a, b))
}))
s.Run("GetWorkspaceBuildByID", s.Subtest(func(db database.Store, check *expects) {
- ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
- build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID})
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
+ TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true},
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
+ j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
+ Type: database.ProvisionerJobTypeWorkspaceBuild,
+ })
+ build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{
+ JobID: j.ID,
+ WorkspaceID: ws.ID,
+ TemplateVersionID: tv.ID,
+ })
check.Args(build.ID).Asserts(ws, policy.ActionRead).Returns(build)
}))
s.Run("GetWorkspaceBuildByJobID", s.Subtest(func(db database.Store, check *expects) {
- ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
- build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID})
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
+ TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true},
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
+ j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
+ Type: database.ProvisionerJobTypeWorkspaceBuild,
+ })
+ build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{
+ JobID: j.ID,
+ WorkspaceID: ws.ID,
+ TemplateVersionID: tv.ID,
+ })
check.Args(build.JobID).Asserts(ws, policy.ActionRead).Returns(build)
}))
s.Run("GetWorkspaceBuildByWorkspaceIDAndBuildNumber", s.Subtest(func(db database.Store, check *expects) {
- ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
- build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, BuildNumber: 10})
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
+ TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true},
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
+ j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
+ Type: database.ProvisionerJobTypeWorkspaceBuild,
+ })
+ build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{
+ JobID: j.ID,
+ WorkspaceID: ws.ID,
+ TemplateVersionID: tv.ID,
+ BuildNumber: 10,
+ })
check.Args(database.GetWorkspaceBuildByWorkspaceIDAndBuildNumberParams{
WorkspaceID: ws.ID,
BuildNumber: build.BuildNumber,
}).Asserts(ws, policy.ActionRead).Returns(build)
}))
s.Run("GetWorkspaceBuildParameters", s.Subtest(func(db database.Store, check *expects) {
- ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
- build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID})
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
+ TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true},
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
+ j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
+ Type: database.ProvisionerJobTypeWorkspaceBuild,
+ })
+ build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{
+ JobID: j.ID,
+ WorkspaceID: ws.ID,
+ TemplateVersionID: tv.ID,
+ })
check.Args(build.ID).Asserts(ws, policy.ActionRead).
Returns([]database.WorkspaceBuildParameter{})
}))
s.Run("GetWorkspaceBuildsByWorkspaceID", s.Subtest(func(db database.Store, check *expects) {
- ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
- _ = dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, BuildNumber: 1})
- _ = dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, BuildNumber: 2})
- _ = dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, BuildNumber: 3})
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
+ TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true},
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
+ j1 := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
+ Type: database.ProvisionerJobTypeWorkspaceBuild,
+ })
+ _ = dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{
+ JobID: j1.ID,
+ WorkspaceID: ws.ID,
+ TemplateVersionID: tv.ID,
+ BuildNumber: 1,
+ })
+ j2 := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
+ Type: database.ProvisionerJobTypeWorkspaceBuild,
+ })
+ _ = dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{
+ JobID: j2.ID,
+ WorkspaceID: ws.ID,
+ TemplateVersionID: tv.ID,
+ BuildNumber: 2,
+ })
+ j3 := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
+ Type: database.ProvisionerJobTypeWorkspaceBuild,
+ })
+ _ = dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{
+ JobID: j3.ID,
+ WorkspaceID: ws.ID,
+ TemplateVersionID: tv.ID,
+ BuildNumber: 3,
+ })
check.Args(database.GetWorkspaceBuildsByWorkspaceIDParams{WorkspaceID: ws.ID}).Asserts(ws, policy.ActionRead) // ordering
}))
s.Run("GetWorkspaceByAgentID", s.Subtest(func(db database.Store, check *expects) {
- tpl := dbgen.Template(s.T(), db, database.Template{})
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
+ TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true},
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
- TemplateID: tpl.ID,
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
+ j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
+ Type: database.ProvisionerJobTypeWorkspaceBuild,
+ })
+ build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{
+ JobID: j.ID,
+ WorkspaceID: ws.ID,
+ TemplateVersionID: tv.ID,
})
- build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, JobID: uuid.New()})
res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: build.JobID})
agt := dbgen.WorkspaceAgent(s.T(), db, database.WorkspaceAgent{ResourceID: res.ID})
check.Args(agt.ID).Asserts(ws, policy.ActionRead)
}))
s.Run("GetWorkspaceAgentsInLatestBuildByWorkspaceID", s.Subtest(func(db database.Store, check *expects) {
- tpl := dbgen.Template(s.T(), db, database.Template{})
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
+ TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true},
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
- TemplateID: tpl.ID,
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
+ j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
+ Type: database.ProvisionerJobTypeWorkspaceBuild,
+ })
+ build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{
+ JobID: j.ID,
+ WorkspaceID: ws.ID,
+ TemplateVersionID: tv.ID,
})
- build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, JobID: uuid.New()})
res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: build.JobID})
dbgen.WorkspaceAgent(s.T(), db, database.WorkspaceAgent{ResourceID: res.ID})
check.Args(ws.ID).Asserts(ws, policy.ActionRead)
}))
s.Run("GetWorkspaceByOwnerIDAndName", s.Subtest(func(db database.Store, check *expects) {
- ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
check.Args(database.GetWorkspaceByOwnerIDAndNameParams{
OwnerID: ws.OwnerID,
Deleted: ws.Deleted,
@@ -1733,58 +2366,157 @@ func (s *MethodTestSuite) TestWorkspace() {
}).Asserts(ws, policy.ActionRead)
}))
s.Run("GetWorkspaceResourceByID", s.Subtest(func(db database.Store, check *expects) {
- ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
- build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, JobID: uuid.New()})
- _ = dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{ID: build.JobID, Type: database.ProvisionerJobTypeWorkspaceBuild})
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
+ TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true},
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
+ j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
+ Type: database.ProvisionerJobTypeWorkspaceBuild,
+ })
+ build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{
+ JobID: j.ID,
+ WorkspaceID: ws.ID,
+ TemplateVersionID: tv.ID,
+ })
res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: build.JobID})
check.Args(res.ID).Asserts(ws, policy.ActionRead).Returns(res)
}))
s.Run("Build/GetWorkspaceResourcesByJobID", s.Subtest(func(db database.Store, check *expects) {
- ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
- build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, JobID: uuid.New()})
- job := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{ID: build.JobID, Type: database.ProvisionerJobTypeWorkspaceBuild})
- check.Args(job.ID).Asserts(ws, policy.ActionRead).Returns([]database.WorkspaceResource{})
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
+ TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true},
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
+ j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
+ Type: database.ProvisionerJobTypeWorkspaceBuild,
+ })
+ build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{
+ JobID: j.ID,
+ WorkspaceID: ws.ID,
+ TemplateVersionID: tv.ID,
+ })
+ check.Args(build.JobID).Asserts(ws, policy.ActionRead).Returns([]database.WorkspaceResource{})
}))
s.Run("Template/GetWorkspaceResourcesByJobID", s.Subtest(func(db database.Store, check *expects) {
- tpl := dbgen.Template(s.T(), db, database.Template{})
- v := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true}, JobID: uuid.New()})
- job := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{ID: v.JobID, Type: database.ProvisionerJobTypeTemplateVersionImport})
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ v := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
+ TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true},
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ JobID: uuid.New(),
+ })
+ job := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
+ ID: v.JobID,
+ Type: database.ProvisionerJobTypeTemplateVersionImport,
+ })
check.Args(job.ID).Asserts(v.RBACObject(tpl), []policy.Action{policy.ActionRead, policy.ActionRead}).Returns([]database.WorkspaceResource{})
}))
s.Run("InsertWorkspace", s.Subtest(func(db database.Store, check *expects) {
u := dbgen.User(s.T(), db, database.User{})
o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
check.Args(database.InsertWorkspaceParams{
ID: uuid.New(),
OwnerID: u.ID,
OrganizationID: o.ID,
AutomaticUpdates: database.AutomaticUpdatesNever,
+ TemplateID: tpl.ID,
}).Asserts(rbac.ResourceWorkspace.WithOwner(u.ID.String()).InOrg(o.ID), policy.ActionCreate)
}))
s.Run("Start/InsertWorkspaceBuild", s.Subtest(func(db database.Store, check *expects) {
- t := dbgen.Template(s.T(), db, database.Template{})
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ t := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
- TemplateID: t.ID,
+ TemplateID: t.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
+ pj := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
+ OrganizationID: o.ID,
+ })
+ tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
+ TemplateID: uuid.NullUUID{UUID: t.ID, Valid: true},
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
})
check.Args(database.InsertWorkspaceBuildParams{
- WorkspaceID: w.ID,
- Transition: database.WorkspaceTransitionStart,
- Reason: database.BuildReasonInitiator,
+ WorkspaceID: w.ID,
+ TemplateVersionID: tv.ID,
+ Transition: database.WorkspaceTransitionStart,
+ Reason: database.BuildReasonInitiator,
+ JobID: pj.ID,
}).Asserts(w, policy.ActionWorkspaceStart)
}))
s.Run("Stop/InsertWorkspaceBuild", s.Subtest(func(db database.Store, check *expects) {
- t := dbgen.Template(s.T(), db, database.Template{})
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ t := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
- TemplateID: t.ID,
+ TemplateID: t.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
+ tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
+ TemplateID: uuid.NullUUID{UUID: t.ID, Valid: true},
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ pj := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
+ OrganizationID: o.ID,
})
check.Args(database.InsertWorkspaceBuildParams{
- WorkspaceID: w.ID,
- Transition: database.WorkspaceTransitionStop,
- Reason: database.BuildReasonInitiator,
+ WorkspaceID: w.ID,
+ TemplateVersionID: tv.ID,
+ Transition: database.WorkspaceTransitionStop,
+ Reason: database.BuildReasonInitiator,
+ JobID: pj.ID,
}).Asserts(w, policy.ActionWorkspaceStop)
}))
s.Run("Start/RequireActiveVersion/VersionMismatch/InsertWorkspaceBuild", s.Subtest(func(db database.Store, check *expects) {
- t := dbgen.Template(s.T(), db, database.Template{})
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ t := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
ctx := testutil.Context(s.T(), testutil.WaitShort)
err := db.UpdateTemplateAccessControlByID(ctx, database.UpdateTemplateAccessControlByIDParams{
ID: t.ID,
@@ -1792,24 +2524,39 @@ func (s *MethodTestSuite) TestWorkspace() {
})
require.NoError(s.T(), err)
v := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
- TemplateID: uuid.NullUUID{UUID: t.ID},
+ TemplateID: uuid.NullUUID{UUID: t.ID},
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
})
w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
- TemplateID: t.ID,
+ TemplateID: t.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
+ pj := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
+ OrganizationID: o.ID,
})
check.Args(database.InsertWorkspaceBuildParams{
WorkspaceID: w.ID,
Transition: database.WorkspaceTransitionStart,
Reason: database.BuildReasonInitiator,
TemplateVersionID: v.ID,
+ JobID: pj.ID,
}).Asserts(
w, policy.ActionWorkspaceStart,
t, policy.ActionUpdate,
)
}))
s.Run("Start/RequireActiveVersion/VersionsMatch/InsertWorkspaceBuild", s.Subtest(func(db database.Store, check *expects) {
- v := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{})
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ v := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
t := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
ActiveVersionID: v.ID,
})
@@ -1821,7 +2568,12 @@ func (s *MethodTestSuite) TestWorkspace() {
require.NoError(s.T(), err)
w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
- TemplateID: t.ID,
+ TemplateID: t.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
+ pj := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
+ OrganizationID: o.ID,
})
// Assert that we do not check for template update permissions
// if versions match.
@@ -1830,21 +2582,64 @@ func (s *MethodTestSuite) TestWorkspace() {
Transition: database.WorkspaceTransitionStart,
Reason: database.BuildReasonInitiator,
TemplateVersionID: v.ID,
+ JobID: pj.ID,
}).Asserts(
w, policy.ActionWorkspaceStart,
)
}))
s.Run("Delete/InsertWorkspaceBuild", s.Subtest(func(db database.Store, check *expects) {
- w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
+ pj := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
+ OrganizationID: o.ID,
+ })
+ tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
+ TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true},
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
check.Args(database.InsertWorkspaceBuildParams{
- WorkspaceID: w.ID,
- Transition: database.WorkspaceTransitionDelete,
- Reason: database.BuildReasonInitiator,
+ WorkspaceID: w.ID,
+ Transition: database.WorkspaceTransitionDelete,
+ Reason: database.BuildReasonInitiator,
+ TemplateVersionID: tv.ID,
+ JobID: pj.ID,
}).Asserts(w, policy.ActionDelete)
}))
s.Run("InsertWorkspaceBuildParameters", s.Subtest(func(db database.Store, check *expects) {
- w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
- b := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: w.ID})
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
+ TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true},
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
+ j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
+ Type: database.ProvisionerJobTypeWorkspaceBuild,
+ })
+ b := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{
+ JobID: j.ID,
+ WorkspaceID: w.ID,
+ TemplateVersionID: tv.ID,
+ })
check.Args(database.InsertWorkspaceBuildParametersParams{
WorkspaceBuildID: b.ID,
Name: []string{"foo", "bar"},
@@ -1852,7 +2647,17 @@ func (s *MethodTestSuite) TestWorkspace() {
}).Asserts(w, policy.ActionUpdate)
}))
s.Run("UpdateWorkspace", s.Subtest(func(db database.Store, check *expects) {
- w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
expected := w
expected.Name = ""
check.Args(database.UpdateWorkspaceParams{
@@ -1860,64 +2665,180 @@ func (s *MethodTestSuite) TestWorkspace() {
}).Asserts(w, policy.ActionUpdate).Returns(expected)
}))
s.Run("UpdateWorkspaceDormantDeletingAt", s.Subtest(func(db database.Store, check *expects) {
- w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
check.Args(database.UpdateWorkspaceDormantDeletingAtParams{
ID: w.ID,
}).Asserts(w, policy.ActionUpdate)
}))
s.Run("UpdateWorkspaceAutomaticUpdates", s.Subtest(func(db database.Store, check *expects) {
- w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
check.Args(database.UpdateWorkspaceAutomaticUpdatesParams{
ID: w.ID,
AutomaticUpdates: database.AutomaticUpdatesAlways,
}).Asserts(w, policy.ActionUpdate)
}))
s.Run("UpdateWorkspaceAppHealthByID", s.Subtest(func(db database.Store, check *expects) {
- ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
- build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, JobID: uuid.New()})
- res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: build.JobID})
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
+ TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true},
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
+ j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
+ Type: database.ProvisionerJobTypeWorkspaceBuild,
+ })
+ b := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{
+ JobID: j.ID,
+ WorkspaceID: w.ID,
+ TemplateVersionID: tv.ID,
+ })
+ res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: b.JobID})
agt := dbgen.WorkspaceAgent(s.T(), db, database.WorkspaceAgent{ResourceID: res.ID})
app := dbgen.WorkspaceApp(s.T(), db, database.WorkspaceApp{AgentID: agt.ID})
check.Args(database.UpdateWorkspaceAppHealthByIDParams{
ID: app.ID,
Health: database.WorkspaceAppHealthDisabled,
- }).Asserts(ws, policy.ActionUpdate).Returns()
+ }).Asserts(w, policy.ActionUpdate).Returns()
}))
s.Run("UpdateWorkspaceAutostart", s.Subtest(func(db database.Store, check *expects) {
- ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
check.Args(database.UpdateWorkspaceAutostartParams{
- ID: ws.ID,
- }).Asserts(ws, policy.ActionUpdate).Returns()
+ ID: w.ID,
+ }).Asserts(w, policy.ActionUpdate).Returns()
}))
s.Run("UpdateWorkspaceBuildDeadlineByID", s.Subtest(func(db database.Store, check *expects) {
- ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
- build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, JobID: uuid.New()})
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
+ TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true},
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
+ j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
+ Type: database.ProvisionerJobTypeWorkspaceBuild,
+ })
+ b := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{
+ JobID: j.ID,
+ WorkspaceID: w.ID,
+ TemplateVersionID: tv.ID,
+ })
check.Args(database.UpdateWorkspaceBuildDeadlineByIDParams{
- ID: build.ID,
- UpdatedAt: build.UpdatedAt,
- Deadline: build.Deadline,
- }).Asserts(ws, policy.ActionUpdate)
+ ID: b.ID,
+ UpdatedAt: b.UpdatedAt,
+ Deadline: b.Deadline,
+ }).Asserts(w, policy.ActionUpdate)
}))
s.Run("SoftDeleteWorkspaceByID", s.Subtest(func(db database.Store, check *expects) {
- ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
- ws.Deleted = true
- check.Args(ws.ID).Asserts(ws, policy.ActionDelete).Returns()
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
+ w.Deleted = true
+ check.Args(w.ID).Asserts(w, policy.ActionDelete).Returns()
}))
s.Run("UpdateWorkspaceDeletedByID", s.Subtest(func(db database.Store, check *expects) {
- ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{Deleted: true})
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ Deleted: true,
+ })
check.Args(database.UpdateWorkspaceDeletedByIDParams{
- ID: ws.ID,
+ ID: w.ID,
Deleted: true,
- }).Asserts(ws, policy.ActionDelete).Returns()
+ }).Asserts(w, policy.ActionDelete).Returns()
}))
s.Run("UpdateWorkspaceLastUsedAt", s.Subtest(func(db database.Store, check *expects) {
- ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
check.Args(database.UpdateWorkspaceLastUsedAtParams{
- ID: ws.ID,
- }).Asserts(ws, policy.ActionUpdate).Returns()
+ ID: w.ID,
+ }).Asserts(w, policy.ActionUpdate).Returns()
}))
s.Run("UpdateWorkspaceNextStartAt", s.Subtest(func(db database.Store, check *expects) {
- ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
check.Args(database.UpdateWorkspaceNextStartAtParams{
ID: ws.ID,
NextStartAt: sql.NullTime{Valid: true, Time: dbtime.Now()},
@@ -1930,50 +2851,144 @@ func (s *MethodTestSuite) TestWorkspace() {
}).Asserts(rbac.ResourceWorkspace.All(), policy.ActionUpdate)
}))
s.Run("BatchUpdateWorkspaceLastUsedAt", s.Subtest(func(db database.Store, check *expects) {
- ws1 := dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
- ws2 := dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ w1 := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
+ w2 := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
check.Args(database.BatchUpdateWorkspaceLastUsedAtParams{
- IDs: []uuid.UUID{ws1.ID, ws2.ID},
+ IDs: []uuid.UUID{w1.ID, w2.ID},
}).Asserts(rbac.ResourceWorkspace.All(), policy.ActionUpdate).Returns()
}))
s.Run("UpdateWorkspaceTTL", s.Subtest(func(db database.Store, check *expects) {
- ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
check.Args(database.UpdateWorkspaceTTLParams{
- ID: ws.ID,
- }).Asserts(ws, policy.ActionUpdate).Returns()
+ ID: w.ID,
+ }).Asserts(w, policy.ActionUpdate).Returns()
}))
s.Run("GetWorkspaceByWorkspaceAppID", s.Subtest(func(db database.Store, check *expects) {
- ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
- build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, JobID: uuid.New()})
- res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: build.JobID})
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
+ TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true},
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
+ j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
+ Type: database.ProvisionerJobTypeWorkspaceBuild,
+ })
+ b := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{
+ JobID: j.ID,
+ WorkspaceID: w.ID,
+ TemplateVersionID: tv.ID,
+ })
+ res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: b.JobID})
agt := dbgen.WorkspaceAgent(s.T(), db, database.WorkspaceAgent{ResourceID: res.ID})
app := dbgen.WorkspaceApp(s.T(), db, database.WorkspaceApp{AgentID: agt.ID})
- check.Args(app.ID).Asserts(ws, policy.ActionRead)
+ check.Args(app.ID).Asserts(w, policy.ActionRead)
}))
s.Run("ActivityBumpWorkspace", s.Subtest(func(db database.Store, check *expects) {
- ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
- build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, JobID: uuid.New()})
- dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{ID: build.JobID, Type: database.ProvisionerJobTypeWorkspaceBuild})
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
+ TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true},
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
+ j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
+ Type: database.ProvisionerJobTypeWorkspaceBuild,
+ })
+ _ = dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{
+ JobID: j.ID,
+ WorkspaceID: w.ID,
+ TemplateVersionID: tv.ID,
+ })
check.Args(database.ActivityBumpWorkspaceParams{
- WorkspaceID: ws.ID,
- }).Asserts(ws, policy.ActionUpdate).Returns()
+ WorkspaceID: w.ID,
+ }).Asserts(w, policy.ActionUpdate).Returns()
}))
s.Run("FavoriteWorkspace", s.Subtest(func(db database.Store, check *expects) {
u := dbgen.User(s.T(), db, database.User{})
- ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{OwnerID: u.ID})
- check.Args(ws.ID).Asserts(ws, policy.ActionUpdate).Returns()
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
+ check.Args(w.ID).Asserts(w, policy.ActionUpdate).Returns()
}))
s.Run("UnfavoriteWorkspace", s.Subtest(func(db database.Store, check *expects) {
u := dbgen.User(s.T(), db, database.User{})
- ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{OwnerID: u.ID})
- check.Args(ws.ID).Asserts(ws, policy.ActionUpdate).Returns()
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
+ check.Args(w.ID).Asserts(w, policy.ActionUpdate).Returns()
}))
}
func (s *MethodTestSuite) TestWorkspacePortSharing() {
s.Run("UpsertWorkspaceAgentPortShare", s.Subtest(func(db database.Store, check *expects) {
u := dbgen.User(s.T(), db, database.User{})
- ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{OwnerID: u.ID})
+ org := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: org.ID,
+ CreatedBy: u.ID,
+ })
+ ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ OwnerID: u.ID,
+ OrganizationID: org.ID,
+ TemplateID: tpl.ID,
+ })
ps := dbgen.WorkspaceAgentPortShare(s.T(), db, database.WorkspaceAgentPortShare{WorkspaceID: ws.ID})
//nolint:gosimple // casting is not a simplification
check.Args(database.UpsertWorkspaceAgentPortShareParams{
@@ -1986,7 +3001,16 @@ func (s *MethodTestSuite) TestWorkspacePortSharing() {
}))
s.Run("GetWorkspaceAgentPortShare", s.Subtest(func(db database.Store, check *expects) {
u := dbgen.User(s.T(), db, database.User{})
- ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{OwnerID: u.ID})
+ org := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: org.ID,
+ CreatedBy: u.ID,
+ })
+ ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ OwnerID: u.ID,
+ OrganizationID: org.ID,
+ TemplateID: tpl.ID,
+ })
ps := dbgen.WorkspaceAgentPortShare(s.T(), db, database.WorkspaceAgentPortShare{WorkspaceID: ws.ID})
check.Args(database.GetWorkspaceAgentPortShareParams{
WorkspaceID: ps.WorkspaceID,
@@ -1996,13 +3020,31 @@ func (s *MethodTestSuite) TestWorkspacePortSharing() {
}))
s.Run("ListWorkspaceAgentPortShares", s.Subtest(func(db database.Store, check *expects) {
u := dbgen.User(s.T(), db, database.User{})
- ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{OwnerID: u.ID})
+ org := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: org.ID,
+ CreatedBy: u.ID,
+ })
+ ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ OwnerID: u.ID,
+ OrganizationID: org.ID,
+ TemplateID: tpl.ID,
+ })
ps := dbgen.WorkspaceAgentPortShare(s.T(), db, database.WorkspaceAgentPortShare{WorkspaceID: ws.ID})
check.Args(ws.ID).Asserts(ws, policy.ActionRead).Returns([]database.WorkspaceAgentPortShare{ps})
}))
s.Run("DeleteWorkspaceAgentPortShare", s.Subtest(func(db database.Store, check *expects) {
u := dbgen.User(s.T(), db, database.User{})
- ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{OwnerID: u.ID})
+ org := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: org.ID,
+ CreatedBy: u.ID,
+ })
+ ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ OwnerID: u.ID,
+ OrganizationID: org.ID,
+ TemplateID: tpl.ID,
+ })
ps := dbgen.WorkspaceAgentPortShare(s.T(), db, database.WorkspaceAgentPortShare{WorkspaceID: ws.ID})
check.Args(database.DeleteWorkspaceAgentPortShareParams{
WorkspaceID: ps.WorkspaceID,
@@ -2012,17 +3054,33 @@ func (s *MethodTestSuite) TestWorkspacePortSharing() {
}))
s.Run("DeleteWorkspaceAgentPortSharesByTemplate", s.Subtest(func(db database.Store, check *expects) {
u := dbgen.User(s.T(), db, database.User{})
- t := dbgen.Template(s.T(), db, database.Template{})
- ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{OwnerID: u.ID, TemplateID: t.ID})
+ org := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: org.ID,
+ CreatedBy: u.ID,
+ })
+ ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ OwnerID: u.ID,
+ OrganizationID: org.ID,
+ TemplateID: tpl.ID,
+ })
_ = dbgen.WorkspaceAgentPortShare(s.T(), db, database.WorkspaceAgentPortShare{WorkspaceID: ws.ID})
- check.Args(t.ID).Asserts(t, policy.ActionUpdate).Returns()
+ check.Args(tpl.ID).Asserts(tpl, policy.ActionUpdate).Returns()
}))
s.Run("ReduceWorkspaceAgentShareLevelToAuthenticatedByTemplate", s.Subtest(func(db database.Store, check *expects) {
u := dbgen.User(s.T(), db, database.User{})
- t := dbgen.Template(s.T(), db, database.Template{})
- ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{OwnerID: u.ID, TemplateID: t.ID})
+ org := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: org.ID,
+ CreatedBy: u.ID,
+ })
+ ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ OwnerID: u.ID,
+ OrganizationID: org.ID,
+ TemplateID: tpl.ID,
+ })
_ = dbgen.WorkspaceAgentPortShare(s.T(), db, database.WorkspaceAgentPortShare{WorkspaceID: ws.ID})
- check.Args(t.ID).Asserts(t, policy.ActionUpdate).Returns()
+ check.Args(tpl.ID).Asserts(tpl, policy.ActionUpdate).Returns()
}))
}
@@ -2031,7 +3089,7 @@ func (s *MethodTestSuite) TestProvisionerKeys() {
org := dbgen.Organization(s.T(), db, database.Organization{})
pk := database.ProvisionerKey{
ID: uuid.New(),
- CreatedAt: time.Now(),
+ CreatedAt: dbtestutil.NowInDefaultTimezone(),
OrganizationID: org.ID,
Name: strings.ToLower(coderdtest.RandomName(s.T())),
HashedSecret: []byte(coderdtest.RandomName(s.T())),
@@ -2072,6 +3130,7 @@ func (s *MethodTestSuite) TestProvisionerKeys() {
CreatedAt: pk.CreatedAt,
OrganizationID: pk.OrganizationID,
Name: pk.Name,
+ HashedSecret: pk.HashedSecret,
},
}
check.Args(org.ID).Asserts(pk, policy.ActionRead).Returns(pks)
@@ -2085,6 +3144,7 @@ func (s *MethodTestSuite) TestProvisionerKeys() {
CreatedAt: pk.CreatedAt,
OrganizationID: pk.OrganizationID,
Name: pk.Name,
+ HashedSecret: pk.HashedSecret,
},
}
check.Args(org.ID).Asserts(pk, policy.ActionRead).Returns(pks)
@@ -2098,7 +3158,9 @@ func (s *MethodTestSuite) TestProvisionerKeys() {
func (s *MethodTestSuite) TestExtraMethods() {
s.Run("GetProvisionerDaemons", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
d, err := db.UpsertProvisionerDaemon(context.Background(), database.UpsertProvisionerDaemonParams{
+ Provisioners: []database.ProvisionerType{},
Tags: database.StringMap(map[string]string{
provisionersdk.TagScope: provisionersdk.ScopeOrganization,
}),
@@ -2107,9 +3169,11 @@ func (s *MethodTestSuite) TestExtraMethods() {
check.Args().Asserts(d, policy.ActionRead)
}))
s.Run("GetProvisionerDaemonsByOrganization", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
org := dbgen.Organization(s.T(), db, database.Organization{})
d, err := db.UpsertProvisionerDaemon(context.Background(), database.UpsertProvisionerDaemonParams{
OrganizationID: org.ID,
+ Provisioners: []database.ProvisionerType{},
Tags: database.StringMap(map[string]string{
provisionersdk.TagScope: provisionersdk.ScopeOrganization,
}),
@@ -2120,6 +3184,7 @@ func (s *MethodTestSuite) TestExtraMethods() {
check.Args(database.GetProvisionerDaemonsByOrganizationParams{OrganizationID: org.ID}).Asserts(d, policy.ActionRead).Returns(ds)
}))
s.Run("GetEligibleProvisionerDaemonsByProvisionerJobIDs", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
org := dbgen.Organization(s.T(), db, database.Organization{})
tags := database.StringMap(map[string]string{
provisionersdk.TagScope: provisionersdk.ScopeOrganization,
@@ -2130,6 +3195,7 @@ func (s *MethodTestSuite) TestExtraMethods() {
Tags: tags,
Provisioner: database.ProvisionerTypeEcho,
StorageMethod: database.ProvisionerStorageMethodFile,
+ Input: json.RawMessage("{}"),
})
s.NoError(err, "insert provisioner job")
d, err := db.UpsertProvisionerDaemon(context.Background(), database.UpsertProvisionerDaemonParams{
@@ -2143,7 +3209,9 @@ func (s *MethodTestSuite) TestExtraMethods() {
check.Args(uuid.UUIDs{j.ID}).Asserts(d, policy.ActionRead).Returns(ds)
}))
s.Run("DeleteOldProvisionerDaemons", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
_, err := db.UpsertProvisionerDaemon(context.Background(), database.UpsertProvisionerDaemonParams{
+ Provisioners: []database.ProvisionerType{},
Tags: database.StringMap(map[string]string{
provisionersdk.TagScope: provisionersdk.ScopeOrganization,
}),
@@ -2152,7 +3220,9 @@ func (s *MethodTestSuite) TestExtraMethods() {
check.Args().Asserts(rbac.ResourceSystem, policy.ActionDelete)
}))
s.Run("UpdateProvisionerDaemonLastSeenAt", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
d, err := db.UpsertProvisionerDaemon(context.Background(), database.UpsertProvisionerDaemonParams{
+ Provisioners: []database.ProvisionerType{},
Tags: database.StringMap(map[string]string{
provisionersdk.TagScope: provisionersdk.ScopeOrganization,
}),
@@ -2165,145 +3235,151 @@ func (s *MethodTestSuite) TestExtraMethods() {
}))
}
-// All functions in this method test suite are not implemented in dbmem, but
-// we still want to assert RBAC checks.
func (s *MethodTestSuite) TestTailnetFunctions() {
- s.Run("CleanTailnetCoordinators", s.Subtest(func(db database.Store, check *expects) {
+ s.Run("CleanTailnetCoordinators", s.Subtest(func(_ database.Store, check *expects) {
check.Args().
Asserts(rbac.ResourceTailnetCoordinator, policy.ActionDelete).
- Errors(dbmem.ErrUnimplemented)
+ ErrorsWithInMemDB(dbmem.ErrUnimplemented)
}))
- s.Run("CleanTailnetLostPeers", s.Subtest(func(db database.Store, check *expects) {
+ s.Run("CleanTailnetLostPeers", s.Subtest(func(_ database.Store, check *expects) {
check.Args().
Asserts(rbac.ResourceTailnetCoordinator, policy.ActionDelete).
- Errors(dbmem.ErrUnimplemented)
+ ErrorsWithInMemDB(dbmem.ErrUnimplemented)
}))
- s.Run("CleanTailnetTunnels", s.Subtest(func(db database.Store, check *expects) {
+ s.Run("CleanTailnetTunnels", s.Subtest(func(_ database.Store, check *expects) {
check.Args().
Asserts(rbac.ResourceTailnetCoordinator, policy.ActionDelete).
- Errors(dbmem.ErrUnimplemented)
+ ErrorsWithInMemDB(dbmem.ErrUnimplemented)
}))
- s.Run("DeleteAllTailnetClientSubscriptions", s.Subtest(func(db database.Store, check *expects) {
+ s.Run("DeleteAllTailnetClientSubscriptions", s.Subtest(func(_ database.Store, check *expects) {
check.Args(database.DeleteAllTailnetClientSubscriptionsParams{}).
Asserts(rbac.ResourceTailnetCoordinator, policy.ActionDelete).
- Errors(dbmem.ErrUnimplemented)
+ ErrorsWithInMemDB(dbmem.ErrUnimplemented)
}))
- s.Run("DeleteAllTailnetTunnels", s.Subtest(func(db database.Store, check *expects) {
+ s.Run("DeleteAllTailnetTunnels", s.Subtest(func(_ database.Store, check *expects) {
check.Args(database.DeleteAllTailnetTunnelsParams{}).
Asserts(rbac.ResourceTailnetCoordinator, policy.ActionDelete).
- Errors(dbmem.ErrUnimplemented)
+ ErrorsWithInMemDB(dbmem.ErrUnimplemented)
}))
- s.Run("DeleteCoordinator", s.Subtest(func(db database.Store, check *expects) {
+ s.Run("DeleteCoordinator", s.Subtest(func(_ database.Store, check *expects) {
check.Args(uuid.New()).
Asserts(rbac.ResourceTailnetCoordinator, policy.ActionDelete).
- Errors(dbmem.ErrUnimplemented)
+ ErrorsWithInMemDB(dbmem.ErrUnimplemented)
}))
- s.Run("DeleteTailnetAgent", s.Subtest(func(db database.Store, check *expects) {
+ s.Run("DeleteTailnetAgent", s.Subtest(func(_ database.Store, check *expects) {
check.Args(database.DeleteTailnetAgentParams{}).
- Asserts(rbac.ResourceTailnetCoordinator, policy.ActionUpdate).
- Errors(dbmem.ErrUnimplemented)
+ Asserts(rbac.ResourceTailnetCoordinator, policy.ActionUpdate).Errors(sql.ErrNoRows).
+ ErrorsWithInMemDB(dbmem.ErrUnimplemented)
}))
- s.Run("DeleteTailnetClient", s.Subtest(func(db database.Store, check *expects) {
+ s.Run("DeleteTailnetClient", s.Subtest(func(_ database.Store, check *expects) {
check.Args(database.DeleteTailnetClientParams{}).
- Asserts(rbac.ResourceTailnetCoordinator, policy.ActionDelete).
- Errors(dbmem.ErrUnimplemented)
+ Asserts(rbac.ResourceTailnetCoordinator, policy.ActionDelete).Errors(sql.ErrNoRows).
+ ErrorsWithInMemDB(dbmem.ErrUnimplemented)
}))
- s.Run("DeleteTailnetClientSubscription", s.Subtest(func(db database.Store, check *expects) {
+ s.Run("DeleteTailnetClientSubscription", s.Subtest(func(_ database.Store, check *expects) {
check.Args(database.DeleteTailnetClientSubscriptionParams{}).
Asserts(rbac.ResourceTailnetCoordinator, policy.ActionDelete).
- Errors(dbmem.ErrUnimplemented)
+ ErrorsWithInMemDB(dbmem.ErrUnimplemented)
}))
- s.Run("DeleteTailnetPeer", s.Subtest(func(db database.Store, check *expects) {
+ s.Run("DeleteTailnetPeer", s.Subtest(func(_ database.Store, check *expects) {
check.Args(database.DeleteTailnetPeerParams{}).
Asserts(rbac.ResourceTailnetCoordinator, policy.ActionDelete).
- Errors(dbmem.ErrUnimplemented)
+ ErrorsWithInMemDB(dbmem.ErrUnimplemented).
+ ErrorsWithPG(sql.ErrNoRows)
}))
- s.Run("DeleteTailnetTunnel", s.Subtest(func(db database.Store, check *expects) {
+ s.Run("DeleteTailnetTunnel", s.Subtest(func(_ database.Store, check *expects) {
check.Args(database.DeleteTailnetTunnelParams{}).
Asserts(rbac.ResourceTailnetCoordinator, policy.ActionDelete).
- Errors(dbmem.ErrUnimplemented)
+ ErrorsWithInMemDB(dbmem.ErrUnimplemented).
+ ErrorsWithPG(sql.ErrNoRows)
}))
- s.Run("GetAllTailnetAgents", s.Subtest(func(db database.Store, check *expects) {
+ s.Run("GetAllTailnetAgents", s.Subtest(func(_ database.Store, check *expects) {
check.Args().
Asserts(rbac.ResourceTailnetCoordinator, policy.ActionRead).
- Errors(dbmem.ErrUnimplemented)
+ ErrorsWithInMemDB(dbmem.ErrUnimplemented)
}))
- s.Run("GetTailnetAgents", s.Subtest(func(db database.Store, check *expects) {
+ s.Run("GetTailnetAgents", s.Subtest(func(_ database.Store, check *expects) {
check.Args(uuid.New()).
Asserts(rbac.ResourceTailnetCoordinator, policy.ActionRead).
- Errors(dbmem.ErrUnimplemented)
+ ErrorsWithInMemDB(dbmem.ErrUnimplemented)
}))
- s.Run("GetTailnetClientsForAgent", s.Subtest(func(db database.Store, check *expects) {
+ s.Run("GetTailnetClientsForAgent", s.Subtest(func(_ database.Store, check *expects) {
check.Args(uuid.New()).
Asserts(rbac.ResourceTailnetCoordinator, policy.ActionRead).
- Errors(dbmem.ErrUnimplemented)
+ ErrorsWithInMemDB(dbmem.ErrUnimplemented)
}))
- s.Run("GetTailnetPeers", s.Subtest(func(db database.Store, check *expects) {
+ s.Run("GetTailnetPeers", s.Subtest(func(_ database.Store, check *expects) {
check.Args(uuid.New()).
Asserts(rbac.ResourceTailnetCoordinator, policy.ActionRead).
- Errors(dbmem.ErrUnimplemented)
+ ErrorsWithInMemDB(dbmem.ErrUnimplemented)
}))
- s.Run("GetTailnetTunnelPeerBindings", s.Subtest(func(db database.Store, check *expects) {
+ s.Run("GetTailnetTunnelPeerBindings", s.Subtest(func(_ database.Store, check *expects) {
check.Args(uuid.New()).
Asserts(rbac.ResourceTailnetCoordinator, policy.ActionRead).
- Errors(dbmem.ErrUnimplemented)
+ ErrorsWithInMemDB(dbmem.ErrUnimplemented)
}))
- s.Run("GetTailnetTunnelPeerIDs", s.Subtest(func(db database.Store, check *expects) {
+ s.Run("GetTailnetTunnelPeerIDs", s.Subtest(func(_ database.Store, check *expects) {
check.Args(uuid.New()).
Asserts(rbac.ResourceTailnetCoordinator, policy.ActionRead).
- Errors(dbmem.ErrUnimplemented)
+ ErrorsWithInMemDB(dbmem.ErrUnimplemented)
}))
- s.Run("GetAllTailnetCoordinators", s.Subtest(func(db database.Store, check *expects) {
+ s.Run("GetAllTailnetCoordinators", s.Subtest(func(_ database.Store, check *expects) {
check.Args().
Asserts(rbac.ResourceTailnetCoordinator, policy.ActionRead).
- Errors(dbmem.ErrUnimplemented)
+ ErrorsWithInMemDB(dbmem.ErrUnimplemented)
}))
- s.Run("GetAllTailnetPeers", s.Subtest(func(db database.Store, check *expects) {
+ s.Run("GetAllTailnetPeers", s.Subtest(func(_ database.Store, check *expects) {
check.Args().
Asserts(rbac.ResourceTailnetCoordinator, policy.ActionRead).
- Errors(dbmem.ErrUnimplemented)
+ ErrorsWithInMemDB(dbmem.ErrUnimplemented)
}))
- s.Run("GetAllTailnetTunnels", s.Subtest(func(db database.Store, check *expects) {
+ s.Run("GetAllTailnetTunnels", s.Subtest(func(_ database.Store, check *expects) {
check.Args().
Asserts(rbac.ResourceTailnetCoordinator, policy.ActionRead).
- Errors(dbmem.ErrUnimplemented)
+ ErrorsWithInMemDB(dbmem.ErrUnimplemented)
}))
s.Run("UpsertTailnetAgent", s.Subtest(func(db database.Store, check *expects) {
- check.Args(database.UpsertTailnetAgentParams{}).
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
+ check.Args(database.UpsertTailnetAgentParams{Node: json.RawMessage("{}")}).
Asserts(rbac.ResourceTailnetCoordinator, policy.ActionUpdate).
- Errors(dbmem.ErrUnimplemented)
+ ErrorsWithInMemDB(dbmem.ErrUnimplemented)
}))
s.Run("UpsertTailnetClient", s.Subtest(func(db database.Store, check *expects) {
- check.Args(database.UpsertTailnetClientParams{}).
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
+ check.Args(database.UpsertTailnetClientParams{Node: json.RawMessage("{}")}).
Asserts(rbac.ResourceTailnetCoordinator, policy.ActionUpdate).
- Errors(dbmem.ErrUnimplemented)
+ ErrorsWithInMemDB(dbmem.ErrUnimplemented)
}))
s.Run("UpsertTailnetClientSubscription", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
check.Args(database.UpsertTailnetClientSubscriptionParams{}).
Asserts(rbac.ResourceTailnetCoordinator, policy.ActionUpdate).
- Errors(dbmem.ErrUnimplemented)
+ ErrorsWithInMemDB(dbmem.ErrUnimplemented)
}))
- s.Run("UpsertTailnetCoordinator", s.Subtest(func(db database.Store, check *expects) {
+ s.Run("UpsertTailnetCoordinator", s.Subtest(func(_ database.Store, check *expects) {
check.Args(uuid.New()).
Asserts(rbac.ResourceTailnetCoordinator, policy.ActionUpdate).
- Errors(dbmem.ErrUnimplemented)
+ ErrorsWithInMemDB(dbmem.ErrUnimplemented)
}))
s.Run("UpsertTailnetPeer", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
check.Args(database.UpsertTailnetPeerParams{
Status: database.TailnetStatusOk,
}).
Asserts(rbac.ResourceTailnetCoordinator, policy.ActionCreate).
- Errors(dbmem.ErrUnimplemented)
+ ErrorsWithInMemDB(dbmem.ErrUnimplemented)
}))
s.Run("UpsertTailnetTunnel", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
check.Args(database.UpsertTailnetTunnelParams{}).
Asserts(rbac.ResourceTailnetCoordinator, policy.ActionCreate).
- Errors(dbmem.ErrUnimplemented)
+ ErrorsWithInMemDB(dbmem.ErrUnimplemented)
}))
- s.Run("UpdateTailnetPeerStatusByCoordinator", s.Subtest(func(_ database.Store, check *expects) {
- check.Args(database.UpdateTailnetPeerStatusByCoordinatorParams{}).
+ s.Run("UpdateTailnetPeerStatusByCoordinator", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
+ check.Args(database.UpdateTailnetPeerStatusByCoordinatorParams{Status: database.TailnetStatusOk}).
Asserts(rbac.ResourceTailnetCoordinator, policy.ActionUpdate).
- Errors(dbmem.ErrUnimplemented)
+ ErrorsWithInMemDB(dbmem.ErrUnimplemented)
}))
}
@@ -2395,6 +3471,7 @@ func (s *MethodTestSuite) TestSystemFunctions() {
}).Asserts(rbac.ResourceSystem, policy.ActionUpdate).Returns(l)
}))
s.Run("GetLatestWorkspaceBuildsByWorkspaceIDs", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
b := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID})
check.Args([]uuid.UUID{ws.ID}).Asserts(rbac.ResourceSystem, policy.ActionRead).Returns(slice.New(b))
@@ -2403,10 +3480,12 @@ func (s *MethodTestSuite) TestSystemFunctions() {
check.Args(database.UpsertDefaultProxyParams{}).Asserts(rbac.ResourceSystem, policy.ActionUpdate).Returns()
}))
s.Run("GetUserLinkByLinkedID", s.Subtest(func(db database.Store, check *expects) {
- l := dbgen.UserLink(s.T(), db, database.UserLink{})
+ u := dbgen.User(s.T(), db, database.User{})
+ l := dbgen.UserLink(s.T(), db, database.UserLink{UserID: u.ID})
check.Args(l.LinkedID).Asserts(rbac.ResourceSystem, policy.ActionRead).Returns(l)
}))
s.Run("GetUserLinkByUserIDLoginType", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
l := dbgen.UserLink(s.T(), db, database.UserLink{})
check.Args(database.GetUserLinkByUserIDLoginTypeParams{
UserID: l.UserID,
@@ -2414,6 +3493,7 @@ func (s *MethodTestSuite) TestSystemFunctions() {
}).Asserts(rbac.ResourceSystem, policy.ActionRead).Returns(l)
}))
s.Run("GetLatestWorkspaceBuilds", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{})
dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{})
check.Args().Asserts(rbac.ResourceSystem, policy.ActionRead)
@@ -2465,10 +3545,12 @@ func (s *MethodTestSuite) TestSystemFunctions() {
check.Args().Asserts(rbac.ResourceSystem, policy.ActionRead).Returns(int64(0))
}))
s.Run("GetTemplates", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
_ = dbgen.Template(s.T(), db, database.Template{})
check.Args().Asserts(rbac.ResourceSystem, policy.ActionRead)
}))
s.Run("UpdateWorkspaceBuildCostByID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
b := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{})
o := b
o.DailyCost = 10
@@ -2478,6 +3560,7 @@ func (s *MethodTestSuite) TestSystemFunctions() {
}).Asserts(rbac.ResourceSystem, policy.ActionUpdate)
}))
s.Run("UpdateWorkspaceBuildProvisionerStateByID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, JobID: uuid.New()})
check.Args(database.UpdateWorkspaceBuildProvisionerStateByIDParams{
@@ -2494,22 +3577,27 @@ func (s *MethodTestSuite) TestSystemFunctions() {
check.Args().Asserts(rbac.ResourceSystem, policy.ActionRead)
}))
s.Run("GetWorkspaceBuildsCreatedAfter", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
_ = dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{CreatedAt: time.Now().Add(-time.Hour)})
check.Args(time.Now()).Asserts(rbac.ResourceSystem, policy.ActionRead)
}))
s.Run("GetWorkspaceAgentsCreatedAfter", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
_ = dbgen.WorkspaceAgent(s.T(), db, database.WorkspaceAgent{CreatedAt: time.Now().Add(-time.Hour)})
check.Args(time.Now()).Asserts(rbac.ResourceSystem, policy.ActionRead)
}))
s.Run("GetWorkspaceAppsCreatedAfter", s.Subtest(func(db database.Store, check *expects) {
- _ = dbgen.WorkspaceApp(s.T(), db, database.WorkspaceApp{CreatedAt: time.Now().Add(-time.Hour)})
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
+ _ = dbgen.WorkspaceApp(s.T(), db, database.WorkspaceApp{CreatedAt: time.Now().Add(-time.Hour), OpenIn: database.WorkspaceAppOpenInSlimWindow})
check.Args(time.Now()).Asserts(rbac.ResourceSystem, policy.ActionRead)
}))
s.Run("GetWorkspaceResourcesCreatedAfter", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
_ = dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{CreatedAt: time.Now().Add(-time.Hour)})
check.Args(time.Now()).Asserts(rbac.ResourceSystem, policy.ActionRead)
}))
s.Run("GetWorkspaceResourceMetadataCreatedAfter", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
_ = dbgen.WorkspaceResourceMetadatums(s.T(), db, database.WorkspaceResourceMetadatum{})
check.Args(time.Now()).Asserts(rbac.ResourceSystem, policy.ActionRead)
}))
@@ -2522,6 +3610,7 @@ func (s *MethodTestSuite) TestSystemFunctions() {
check.Args(time.Now()).Asserts( /*rbac.ResourceSystem, policy.ActionRead*/ )
}))
s.Run("GetTemplateVersionsByIDs", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
t1 := dbgen.Template(s.T(), db, database.Template{})
t2 := dbgen.Template(s.T(), db, database.Template{})
tv1 := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
@@ -2538,32 +3627,37 @@ func (s *MethodTestSuite) TestSystemFunctions() {
Returns(slice.New(tv1, tv2, tv3))
}))
s.Run("GetParameterSchemasByJobID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
tpl := dbgen.Template(s.T(), db, database.Template{})
tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true},
})
job := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{ID: tv.JobID})
check.Args(job.ID).
- Asserts(tpl, policy.ActionRead).Errors(sql.ErrNoRows)
+ Asserts(tpl, policy.ActionRead).
+ ErrorsWithInMemDB(sql.ErrNoRows).
+ Returns([]database.ParameterSchema{})
}))
s.Run("GetWorkspaceAppsByAgentIDs", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
aWs := dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
aBuild := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: aWs.ID, JobID: uuid.New()})
aRes := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: aBuild.JobID})
aAgt := dbgen.WorkspaceAgent(s.T(), db, database.WorkspaceAgent{ResourceID: aRes.ID})
- a := dbgen.WorkspaceApp(s.T(), db, database.WorkspaceApp{AgentID: aAgt.ID})
+ a := dbgen.WorkspaceApp(s.T(), db, database.WorkspaceApp{AgentID: aAgt.ID, OpenIn: database.WorkspaceAppOpenInSlimWindow})
bWs := dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
bBuild := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: bWs.ID, JobID: uuid.New()})
bRes := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: bBuild.JobID})
bAgt := dbgen.WorkspaceAgent(s.T(), db, database.WorkspaceAgent{ResourceID: bRes.ID})
- b := dbgen.WorkspaceApp(s.T(), db, database.WorkspaceApp{AgentID: bAgt.ID})
+ b := dbgen.WorkspaceApp(s.T(), db, database.WorkspaceApp{AgentID: bAgt.ID, OpenIn: database.WorkspaceAppOpenInSlimWindow})
check.Args([]uuid.UUID{a.AgentID, b.AgentID}).
Asserts(rbac.ResourceSystem, policy.ActionRead).
Returns([]database.WorkspaceApp{a, b})
}))
s.Run("GetWorkspaceResourcesByJobIDs", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
tpl := dbgen.Template(s.T(), db, database.Template{})
v := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true}, JobID: uuid.New()})
tJob := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{ID: v.JobID, Type: database.ProvisionerJobTypeTemplateVersionImport})
@@ -2576,6 +3670,7 @@ func (s *MethodTestSuite) TestSystemFunctions() {
Returns([]database.WorkspaceResource{})
}))
s.Run("GetWorkspaceResourceMetadataByResourceIDs", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, JobID: uuid.New()})
_ = dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{ID: build.JobID, Type: database.ProvisionerJobTypeWorkspaceBuild})
@@ -2585,6 +3680,7 @@ func (s *MethodTestSuite) TestSystemFunctions() {
Asserts(rbac.ResourceSystem, policy.ActionRead)
}))
s.Run("GetWorkspaceAgentsByResourceIDs", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, JobID: uuid.New()})
res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: build.JobID})
@@ -2602,15 +3698,18 @@ func (s *MethodTestSuite) TestSystemFunctions() {
Returns(slice.New(a, b))
}))
s.Run("InsertWorkspaceAgent", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
check.Args(database.InsertWorkspaceAgentParams{
ID: uuid.New(),
}).Asserts(rbac.ResourceSystem, policy.ActionCreate)
}))
s.Run("InsertWorkspaceApp", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
check.Args(database.InsertWorkspaceAppParams{
ID: uuid.New(),
Health: database.WorkspaceAppHealthDisabled,
SharingLevel: database.AppSharingLevelOwner,
+ OpenIn: database.WorkspaceAppOpenInSlimWindow,
}).Asserts(rbac.ResourceSystem, policy.ActionCreate)
}))
s.Run("InsertWorkspaceResourceMetadata", s.Subtest(func(db database.Store, check *expects) {
@@ -2619,6 +3718,7 @@ func (s *MethodTestSuite) TestSystemFunctions() {
}).Asserts(rbac.ResourceSystem, policy.ActionCreate)
}))
s.Run("UpdateWorkspaceAgentConnectionByID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, JobID: uuid.New()})
res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: build.JobID})
@@ -2631,9 +3731,14 @@ func (s *MethodTestSuite) TestSystemFunctions() {
// TODO: we need to create a ProvisionerJob resource
j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
StartedAt: sql.NullTime{Valid: false},
+ UpdatedAt: time.Now(),
})
- check.Args(database.AcquireProvisionerJobParams{OrganizationID: j.OrganizationID, Types: []database.ProvisionerType{j.Provisioner}, ProvisionerTags: must(json.Marshal(j.Tags))}).
- Asserts( /*rbac.ResourceSystem, policy.ActionUpdate*/ )
+ check.Args(database.AcquireProvisionerJobParams{
+ StartedAt: sql.NullTime{Valid: true, Time: time.Now()},
+ OrganizationID: j.OrganizationID,
+ Types: []database.ProvisionerType{j.Provisioner},
+ ProvisionerTags: must(json.Marshal(j.Tags)),
+ }).Asserts( /*rbac.ResourceSystem, policy.ActionUpdate*/ )
}))
s.Run("UpdateProvisionerJobWithCompleteByID", s.Subtest(func(db database.Store, check *expects) {
// TODO: we need to create a ProvisionerJob resource
@@ -2651,12 +3756,14 @@ func (s *MethodTestSuite) TestSystemFunctions() {
}).Asserts( /*rbac.ResourceSystem, policy.ActionUpdate*/ )
}))
s.Run("InsertProvisionerJob", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
// TODO: we need to create a ProvisionerJob resource
check.Args(database.InsertProvisionerJobParams{
ID: uuid.New(),
Provisioner: database.ProvisionerTypeEcho,
StorageMethod: database.ProvisionerStorageMethodFile,
Type: database.ProvisionerJobTypeWorkspaceBuild,
+ Input: json.RawMessage("{}"),
}).Asserts( /*rbac.ResourceSystem, policy.ActionCreate*/ )
}))
s.Run("InsertProvisionerJobLogs", s.Subtest(func(db database.Store, check *expects) {
@@ -2674,16 +3781,19 @@ func (s *MethodTestSuite) TestSystemFunctions() {
}).Asserts( /*rbac.ResourceSystem, policy.ActionCreate*/ )
}))
s.Run("UpsertProvisionerDaemon", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
org := dbgen.Organization(s.T(), db, database.Organization{})
pd := rbac.ResourceProvisionerDaemon.InOrg(org.ID)
check.Args(database.UpsertProvisionerDaemonParams{
OrganizationID: org.ID,
+ Provisioners: []database.ProvisionerType{},
Tags: database.StringMap(map[string]string{
provisionersdk.TagScope: provisionersdk.ScopeOrganization,
}),
}).Asserts(pd, policy.ActionCreate)
check.Args(database.UpsertProvisionerDaemonParams{
OrganizationID: org.ID,
+ Provisioners: []database.ProvisionerType{},
Tags: database.StringMap(map[string]string{
provisionersdk.TagScope: provisionersdk.ScopeUser,
provisionersdk.TagOwner: "11111111-1111-1111-1111-111111111111",
@@ -2691,15 +3801,17 @@ func (s *MethodTestSuite) TestSystemFunctions() {
}).Asserts(pd.WithOwner("11111111-1111-1111-1111-111111111111"), policy.ActionCreate)
}))
s.Run("InsertTemplateVersionParameter", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
v := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{})
check.Args(database.InsertTemplateVersionParameterParams{
TemplateVersionID: v.ID,
+ Options: json.RawMessage("{}"),
}).Asserts(rbac.ResourceSystem, policy.ActionCreate)
}))
s.Run("InsertWorkspaceResource", s.Subtest(func(db database.Store, check *expects) {
- r := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{})
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
check.Args(database.InsertWorkspaceResourceParams{
- ID: r.ID,
+ ID: uuid.New(),
Transition: database.WorkspaceTransitionStart,
}).Asserts(rbac.ResourceSystem, policy.ActionCreate)
}))
@@ -2713,6 +3825,7 @@ func (s *MethodTestSuite) TestSystemFunctions() {
check.Args(database.InsertWorkspaceAppStatsParams{}).Asserts(rbac.ResourceSystem, policy.ActionCreate)
}))
s.Run("InsertWorkspaceAgentScriptTimings", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
check.Args(database.InsertWorkspaceAgentScriptTimingsParams{
ScriptID: uuid.New(),
Stage: database.WorkspaceAgentScriptTimingStageStart,
@@ -2723,6 +3836,7 @@ func (s *MethodTestSuite) TestSystemFunctions() {
check.Args(database.InsertWorkspaceAgentScriptsParams{}).Asserts(rbac.ResourceSystem, policy.ActionCreate)
}))
s.Run("InsertWorkspaceAgentMetadata", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
check.Args(database.InsertWorkspaceAgentMetadataParams{}).Asserts(rbac.ResourceSystem, policy.ActionCreate)
}))
s.Run("InsertWorkspaceAgentLogs", s.Subtest(func(db database.Store, check *expects) {
@@ -2735,13 +3849,16 @@ func (s *MethodTestSuite) TestSystemFunctions() {
check.Args(database.GetTemplateDAUsParams{}).Asserts(rbac.ResourceSystem, policy.ActionRead)
}))
s.Run("GetActiveWorkspaceBuildsByTemplateID", s.Subtest(func(db database.Store, check *expects) {
- check.Args(uuid.New()).Asserts(rbac.ResourceSystem, policy.ActionRead).Errors(sql.ErrNoRows)
+ check.Args(uuid.New()).
+ Asserts(rbac.ResourceSystem, policy.ActionRead).
+ ErrorsWithInMemDB(sql.ErrNoRows).
+ Returns([]database.WorkspaceBuild{})
}))
s.Run("GetDeploymentDAUs", s.Subtest(func(db database.Store, check *expects) {
check.Args(int32(0)).Asserts(rbac.ResourceSystem, policy.ActionRead)
}))
s.Run("GetAppSecurityKey", s.Subtest(func(db database.Store, check *expects) {
- check.Args().Asserts(rbac.ResourceSystem, policy.ActionRead)
+ check.Args().Asserts(rbac.ResourceSystem, policy.ActionRead).ErrorsWithPG(sql.ErrNoRows)
}))
s.Run("UpsertAppSecurityKey", s.Subtest(func(db database.Store, check *expects) {
check.Args("foo").Asserts(rbac.ResourceSystem, policy.ActionUpdate)
@@ -2835,13 +3952,17 @@ func (s *MethodTestSuite) TestSystemFunctions() {
check.Args(time.Time{}).Asserts()
}))
s.Run("InsertTemplateVersionVariable", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
check.Args(database.InsertTemplateVersionVariableParams{}).Asserts(rbac.ResourceSystem, policy.ActionCreate)
}))
s.Run("InsertTemplateVersionWorkspaceTag", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
check.Args(database.InsertTemplateVersionWorkspaceTagParams{}).Asserts(rbac.ResourceSystem, policy.ActionCreate)
}))
s.Run("UpdateInactiveUsersToDormant", s.Subtest(func(db database.Store, check *expects) {
- check.Args(database.UpdateInactiveUsersToDormantParams{}).Asserts(rbac.ResourceSystem, policy.ActionCreate).Errors(sql.ErrNoRows)
+ check.Args(database.UpdateInactiveUsersToDormantParams{}).Asserts(rbac.ResourceSystem, policy.ActionCreate).
+ ErrorsWithInMemDB(sql.ErrNoRows).
+ Returns([]database.UpdateInactiveUsersToDormantRow{})
}))
s.Run("GetWorkspaceUniqueOwnerCountByTemplateIDs", s.Subtest(func(db database.Store, check *expects) {
check.Args([]uuid.UUID{uuid.New()}).Asserts(rbac.ResourceSystem, policy.ActionRead)
@@ -2865,8 +3986,24 @@ func (s *MethodTestSuite) TestSystemFunctions() {
check.Args(uuid.New()).Asserts(rbac.ResourceSystem, policy.ActionRead)
}))
s.Run("GetJFrogXrayScanByWorkspaceAndAgentID", s.Subtest(func(db database.Store, check *expects) {
- ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
- agent := dbgen.WorkspaceAgent(s.T(), db, database.WorkspaceAgent{})
+ u := dbgen.User(s.T(), db, database.User{})
+ org := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: org.ID,
+ CreatedBy: u.ID,
+ })
+ ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ OwnerID: u.ID,
+ OrganizationID: org.ID,
+ TemplateID: tpl.ID,
+ })
+ pj := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{})
+ res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{
+ JobID: pj.ID,
+ })
+ agent := dbgen.WorkspaceAgent(s.T(), db, database.WorkspaceAgent{
+ ResourceID: res.ID,
+ })
err := db.UpsertJFrogXrayScanByWorkspaceAndAgentID(context.Background(), database.UpsertJFrogXrayScanByWorkspaceAndAgentIDParams{
AgentID: agent.ID,
@@ -2893,13 +4030,27 @@ func (s *MethodTestSuite) TestSystemFunctions() {
}).Asserts(ws, policy.ActionRead).Returns(expect)
}))
s.Run("UpsertJFrogXrayScanByWorkspaceAndAgentID", s.Subtest(func(db database.Store, check *expects) {
- tpl := dbgen.Template(s.T(), db, database.Template{})
+ u := dbgen.User(s.T(), db, database.User{})
+ org := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: org.ID,
+ CreatedBy: u.ID,
+ })
ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
- TemplateID: tpl.ID,
+ OwnerID: u.ID,
+ OrganizationID: org.ID,
+ TemplateID: tpl.ID,
+ })
+ pj := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{})
+ res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{
+ JobID: pj.ID,
+ })
+ agent := dbgen.WorkspaceAgent(s.T(), db, database.WorkspaceAgent{
+ ResourceID: res.ID,
})
check.Args(database.UpsertJFrogXrayScanByWorkspaceAndAgentIDParams{
WorkspaceID: ws.ID,
- AgentID: uuid.New(),
+ AgentID: agent.ID,
}).Asserts(tpl, policy.ActionCreate)
}))
s.Run("DeleteRuntimeConfig", s.Subtest(func(db database.Store, check *expects) {
@@ -2941,15 +4092,31 @@ func (s *MethodTestSuite) TestSystemFunctions() {
}).Asserts(rbac.ResourceSystem, policy.ActionCreate)
}))
s.Run("GetProvisionerJobTimingsByJobID", s.Subtest(func(db database.Store, check *expects) {
- w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
+ u := dbgen.User(s.T(), db, database.User{})
+ org := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: org.ID,
+ CreatedBy: u.ID,
+ })
+ tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
+ OrganizationID: org.ID,
+ TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true},
+ CreatedBy: u.ID,
+ })
+ w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ OwnerID: u.ID,
+ OrganizationID: org.ID,
+ TemplateID: tpl.ID,
+ })
j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
Type: database.ProvisionerJobTypeWorkspaceBuild,
})
- b := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{JobID: j.ID, WorkspaceID: w.ID})
+ b := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{JobID: j.ID, WorkspaceID: w.ID, TemplateVersionID: tv.ID})
t := dbgen.ProvisionerJobTimings(s.T(), db, b, 2)
check.Args(j.ID).Asserts(w, policy.ActionRead).Returns(t)
}))
s.Run("GetWorkspaceAgentScriptTimingsByBuildID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
workspace := dbgen.Workspace(s.T(), db, database.WorkspaceTable{})
job := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
Type: database.ProvisionerJobTypeWorkspaceBuild,
@@ -2982,6 +4149,9 @@ func (s *MethodTestSuite) TestSystemFunctions() {
}
check.Args(build.ID).Asserts(rbac.ResourceSystem, policy.ActionRead).Returns(rows)
}))
+ s.Run("DisableForeignKeysAndTriggers", s.Subtest(func(db database.Store, check *expects) {
+ check.Args().Asserts()
+ }))
s.Run("InsertWorkspaceModule", s.Subtest(func(db database.Store, check *expects) {
j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
Type: database.ProvisionerJobTypeWorkspaceBuild,
@@ -3013,7 +4183,9 @@ func (s *MethodTestSuite) TestNotifications() {
s.Run("DeleteOldNotificationMessages", s.Subtest(func(_ database.Store, check *expects) {
check.Args().Asserts(rbac.ResourceNotificationMessage, policy.ActionDelete)
}))
- s.Run("EnqueueNotificationMessage", s.Subtest(func(_ database.Store, check *expects) {
+ s.Run("EnqueueNotificationMessage", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
+ // TODO: update this test once we have a specific role for notifications
check.Args(database.EnqueueNotificationMessageParams{
Method: database.NotificationMethodWebhook,
Payload: []byte("{}"),
@@ -3021,7 +4193,9 @@ func (s *MethodTestSuite) TestNotifications() {
}))
s.Run("FetchNewMessageMetadata", s.Subtest(func(db database.Store, check *expects) {
u := dbgen.User(s.T(), db, database.User{})
- check.Args(database.FetchNewMessageMetadataParams{UserID: u.ID}).Asserts(rbac.ResourceNotificationMessage, policy.ActionRead)
+ check.Args(database.FetchNewMessageMetadataParams{UserID: u.ID}).
+ Asserts(rbac.ResourceNotificationMessage, policy.ActionRead).
+ ErrorsWithPG(sql.ErrNoRows)
}))
s.Run("GetNotificationMessagesByStatus", s.Subtest(func(_ database.Store, check *expects) {
check.Args(database.GetNotificationMessagesByStatusParams{
@@ -3032,15 +4206,16 @@ func (s *MethodTestSuite) TestNotifications() {
// Notification templates
s.Run("GetNotificationTemplateByID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
user := dbgen.User(s.T(), db, database.User{})
check.Args(user.ID).Asserts(rbac.ResourceNotificationTemplate, policy.ActionRead).
- Errors(dbmem.ErrUnimplemented)
+ ErrorsWithPG(sql.ErrNoRows).
+ ErrorsWithInMemDB(dbmem.ErrUnimplemented)
}))
s.Run("GetNotificationTemplatesByKind", s.Subtest(func(db database.Store, check *expects) {
check.Args(database.NotificationTemplateKindSystem).
Asserts().
- Errors(dbmem.ErrUnimplemented)
-
+ ErrorsWithInMemDB(dbmem.ErrUnimplemented)
// TODO(dannyk): add support for other database.NotificationTemplateKind types once implemented.
}))
s.Run("UpdateNotificationTemplateMethodByID", s.Subtest(func(db database.Store, check *expects) {
@@ -3048,7 +4223,7 @@ func (s *MethodTestSuite) TestNotifications() {
Method: database.NullNotificationMethod{NotificationMethod: database.NotificationMethodWebhook, Valid: true},
ID: notifications.TemplateWorkspaceDormant,
}).Asserts(rbac.ResourceNotificationTemplate, policy.ActionUpdate).
- Errors(dbmem.ErrUnimplemented)
+ ErrorsWithInMemDB(dbmem.ErrUnimplemented)
}))
// Notification preferences
@@ -3080,12 +4255,23 @@ func (s *MethodTestSuite) TestOAuth2ProviderApps() {
check.Args(app.ID).Asserts(rbac.ResourceOauth2App, policy.ActionRead).Returns(app)
}))
s.Run("GetOAuth2ProviderAppsByUserID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
user := dbgen.User(s.T(), db, database.User{})
key, _ := dbgen.APIKey(s.T(), db, database.APIKey{
UserID: user.ID,
})
- app := dbgen.OAuth2ProviderApp(s.T(), db, database.OAuth2ProviderApp{})
- _ = dbgen.OAuth2ProviderApp(s.T(), db, database.OAuth2ProviderApp{})
+ createdAt := dbtestutil.NowInDefaultTimezone()
+ if !dbtestutil.WillUsePostgres() {
+ createdAt = time.Time{}
+ }
+ app := dbgen.OAuth2ProviderApp(s.T(), db, database.OAuth2ProviderApp{
+ CreatedAt: createdAt,
+ UpdatedAt: createdAt,
+ })
+ _ = dbgen.OAuth2ProviderApp(s.T(), db, database.OAuth2ProviderApp{
+ CreatedAt: createdAt,
+ UpdatedAt: createdAt,
+ })
secret := dbgen.OAuth2ProviderAppSecret(s.T(), db, database.OAuth2ProviderAppSecret{
AppID: app.ID,
})
@@ -3093,6 +4279,7 @@ func (s *MethodTestSuite) TestOAuth2ProviderApps() {
_ = dbgen.OAuth2ProviderAppToken(s.T(), db, database.OAuth2ProviderAppToken{
AppSecretID: secret.ID,
APIKeyID: key.ID,
+ HashPrefix: []byte(fmt.Sprintf("%d", i)),
})
}
check.Args(user.ID).Asserts(rbac.ResourceOauth2AppCodeToken.WithOwner(user.ID.String()), policy.ActionRead).Returns([]database.GetOAuth2ProviderAppsByUserIDRow{
@@ -3102,6 +4289,8 @@ func (s *MethodTestSuite) TestOAuth2ProviderApps() {
CallbackURL: app.CallbackURL,
Icon: app.Icon,
Name: app.Name,
+ CreatedAt: createdAt,
+ UpdatedAt: createdAt,
},
TokenCount: 5,
},
@@ -3111,9 +4300,10 @@ func (s *MethodTestSuite) TestOAuth2ProviderApps() {
check.Args(database.InsertOAuth2ProviderAppParams{}).Asserts(rbac.ResourceOauth2App, policy.ActionCreate)
}))
s.Run("UpdateOAuth2ProviderAppByID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
app := dbgen.OAuth2ProviderApp(s.T(), db, database.OAuth2ProviderApp{})
app.Name = "my-new-name"
- app.UpdatedAt = time.Now()
+ app.UpdatedAt = dbtestutil.NowInDefaultTimezone()
check.Args(database.UpdateOAuth2ProviderAppByIDParams{
ID: app.ID,
Name: app.Name,
@@ -3129,19 +4319,23 @@ func (s *MethodTestSuite) TestOAuth2ProviderApps() {
func (s *MethodTestSuite) TestOAuth2ProviderAppSecrets() {
s.Run("GetOAuth2ProviderAppSecretsByAppID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
app1 := dbgen.OAuth2ProviderApp(s.T(), db, database.OAuth2ProviderApp{})
app2 := dbgen.OAuth2ProviderApp(s.T(), db, database.OAuth2ProviderApp{})
secrets := []database.OAuth2ProviderAppSecret{
dbgen.OAuth2ProviderAppSecret(s.T(), db, database.OAuth2ProviderAppSecret{
- AppID: app1.ID,
- CreatedAt: time.Now().Add(-time.Hour), // For ordering.
+ AppID: app1.ID,
+ CreatedAt: time.Now().Add(-time.Hour), // For ordering.
+ SecretPrefix: []byte("1"),
}),
dbgen.OAuth2ProviderAppSecret(s.T(), db, database.OAuth2ProviderAppSecret{
- AppID: app1.ID,
+ AppID: app1.ID,
+ SecretPrefix: []byte("2"),
}),
}
_ = dbgen.OAuth2ProviderAppSecret(s.T(), db, database.OAuth2ProviderAppSecret{
- AppID: app2.ID,
+ AppID: app2.ID,
+ SecretPrefix: []byte("3"),
})
check.Args(app1.ID).Asserts(rbac.ResourceOauth2AppSecret, policy.ActionRead).Returns(secrets)
}))
@@ -3166,11 +4360,12 @@ func (s *MethodTestSuite) TestOAuth2ProviderAppSecrets() {
}).Asserts(rbac.ResourceOauth2AppSecret, policy.ActionCreate)
}))
s.Run("UpdateOAuth2ProviderAppSecretByID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
app := dbgen.OAuth2ProviderApp(s.T(), db, database.OAuth2ProviderApp{})
secret := dbgen.OAuth2ProviderAppSecret(s.T(), db, database.OAuth2ProviderAppSecret{
AppID: app.ID,
})
- secret.LastUsedAt = sql.NullTime{Time: time.Now(), Valid: true}
+ secret.LastUsedAt = sql.NullTime{Time: dbtestutil.NowInDefaultTimezone(), Valid: true}
check.Args(database.UpdateOAuth2ProviderAppSecretByIDParams{
ID: secret.ID,
LastUsedAt: secret.LastUsedAt,
@@ -3222,12 +4417,14 @@ func (s *MethodTestSuite) TestOAuth2ProviderAppCodes() {
check.Args(code.ID).Asserts(code, policy.ActionDelete)
}))
s.Run("DeleteOAuth2ProviderAppCodesByAppAndUserID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
user := dbgen.User(s.T(), db, database.User{})
app := dbgen.OAuth2ProviderApp(s.T(), db, database.OAuth2ProviderApp{})
for i := 0; i < 5; i++ {
_ = dbgen.OAuth2ProviderAppCode(s.T(), db, database.OAuth2ProviderAppCode{
- AppID: app.ID,
- UserID: user.ID,
+ AppID: app.ID,
+ UserID: user.ID,
+ SecretPrefix: []byte(fmt.Sprintf("%d", i)),
})
}
check.Args(database.DeleteOAuth2ProviderAppCodesByAppAndUserIDParams{
@@ -3268,6 +4465,7 @@ func (s *MethodTestSuite) TestOAuth2ProviderAppTokens() {
check.Args(token.HashPrefix).Asserts(rbac.ResourceOauth2AppCodeToken.WithOwner(user.ID.String()), policy.ActionRead)
}))
s.Run("DeleteOAuth2ProviderAppTokensByAppAndUserID", s.Subtest(func(db database.Store, check *expects) {
+ dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
user := dbgen.User(s.T(), db, database.User{})
key, _ := dbgen.APIKey(s.T(), db, database.APIKey{
UserID: user.ID,
@@ -3280,6 +4478,7 @@ func (s *MethodTestSuite) TestOAuth2ProviderAppTokens() {
_ = dbgen.OAuth2ProviderAppToken(s.T(), db, database.OAuth2ProviderAppToken{
AppSecretID: secret.ID,
APIKeyID: key.ID,
+ HashPrefix: []byte(fmt.Sprintf("%d", i)),
})
}
check.Args(database.DeleteOAuth2ProviderAppTokensByAppAndUserIDParams{
diff --git a/coderd/database/dbauthz/groupsauth_test.go b/coderd/database/dbauthz/groupsauth_test.go
index a72c4db3af38a..04d816629ac65 100644
--- a/coderd/database/dbauthz/groupsauth_test.go
+++ b/coderd/database/dbauthz/groupsauth_test.go
@@ -13,7 +13,6 @@ import (
"github.com/coder/coder/v2/coderd/database"
"github.com/coder/coder/v2/coderd/database/dbauthz"
"github.com/coder/coder/v2/coderd/database/dbgen"
- "github.com/coder/coder/v2/coderd/database/dbmem"
"github.com/coder/coder/v2/coderd/database/dbtestutil"
"github.com/coder/coder/v2/coderd/rbac"
)
@@ -22,13 +21,9 @@ import (
func TestGroupsAuth(t *testing.T) {
t.Parallel()
- if dbtestutil.WillUsePostgres() {
- t.Skip("this test would take too long to run on postgres")
- }
-
authz := rbac.NewAuthorizer(prometheus.NewRegistry())
-
- db := dbauthz.New(dbmem.New(), authz, slogtest.Make(t, &slogtest.Options{
+ store, _ := dbtestutil.NewDB(t)
+ db := dbauthz.New(store, authz, slogtest.Make(t, &slogtest.Options{
IgnoreErrors: true,
}), coderdtest.AccessControlStorePointer())
diff --git a/coderd/database/dbauthz/setup_test.go b/coderd/database/dbauthz/setup_test.go
index 52e8dd42fea9c..fc01e39330d7d 100644
--- a/coderd/database/dbauthz/setup_test.go
+++ b/coderd/database/dbauthz/setup_test.go
@@ -22,8 +22,8 @@ import (
"github.com/coder/coder/v2/coderd/coderdtest"
"github.com/coder/coder/v2/coderd/database"
"github.com/coder/coder/v2/coderd/database/dbauthz"
- "github.com/coder/coder/v2/coderd/database/dbmem"
"github.com/coder/coder/v2/coderd/database/dbmock"
+ "github.com/coder/coder/v2/coderd/database/dbtestutil"
"github.com/coder/coder/v2/coderd/rbac"
"github.com/coder/coder/v2/coderd/rbac/regosql"
"github.com/coder/coder/v2/coderd/util/slice"
@@ -114,7 +114,7 @@ func (s *MethodTestSuite) Subtest(testCaseF func(db database.Store, check *expec
methodName := names[len(names)-1]
s.methodAccounting[methodName]++
- db := dbmem.New()
+ db, _ := dbtestutil.NewDB(t)
fakeAuthorizer := &coderdtest.FakeAuthorizer{}
rec := &coderdtest.RecordingAuthorizer{
Wrapped: fakeAuthorizer,
@@ -217,7 +217,11 @@ func (s *MethodTestSuite) Subtest(testCaseF func(db database.Store, check *expec
}
}
- rec.AssertActor(s.T(), actor, pairs...)
+ if testCase.outOfOrder {
+ rec.AssertOutOfOrder(s.T(), actor, pairs...)
+ } else {
+ rec.AssertActor(s.T(), actor, pairs...)
+ }
s.NoError(rec.AllAsserted(), "all rbac calls must be asserted")
})
}
@@ -236,6 +240,8 @@ func (s *MethodTestSuite) NoActorErrorTest(callMethod func(ctx context.Context)
func (s *MethodTestSuite) NotAuthorizedErrorTest(ctx context.Context, az *coderdtest.FakeAuthorizer, testCase expects, callMethod func(ctx context.Context) ([]reflect.Value, error)) {
s.Run("NotAuthorized", func() {
az.AlwaysReturn(rbac.ForbiddenWithInternal(xerrors.New("Always fail authz"), rbac.Subject{}, "", rbac.Object{}, nil))
+ // Override the SQL filter to always fail.
+ az.OverrideSQLFilter("FALSE")
// If we have assertions, that means the method should FAIL
// if RBAC will disallow the request. The returned error should
@@ -328,6 +334,14 @@ type expects struct {
notAuthorizedExpect string
cancelledCtxExpect string
successAuthorizer func(ctx context.Context, subject rbac.Subject, action policy.Action, obj rbac.Object) error
+ outOfOrder bool
+}
+
+// OutOfOrder is optional. It controls whether the assertions should be
+// asserted in order.
+func (m *expects) OutOfOrder() *expects {
+ m.outOfOrder = true
+ return m
}
// Asserts is required. Asserts the RBAC authorize calls that should be made.
@@ -358,6 +372,24 @@ func (m *expects) Errors(err error) *expects {
return m
}
+// ErrorsWithPG is optional. If it is never called, it will not be asserted.
+// It will only be asserted if the test is running with a Postgres database.
+func (m *expects) ErrorsWithPG(err error) *expects {
+ if dbtestutil.WillUsePostgres() {
+ return m.Errors(err)
+ }
+ return m
+}
+
+// ErrorsWithInMemDB is optional. If it is never called, it will not be asserted.
+// It will only be asserted if the test is running with an in-memory database.
+func (m *expects) ErrorsWithInMemDB(err error) *expects {
+ if !dbtestutil.WillUsePostgres() {
+ return m.Errors(err)
+ }
+ return m
+}
+
func (m *expects) FailSystemObjectChecks() *expects {
return m.WithSuccessAuthorizer(func(ctx context.Context, subject rbac.Subject, action policy.Action, obj rbac.Object) error {
if obj.Type == rbac.ResourceSystem.Type {
diff --git a/coderd/database/dbgen/dbgen.go b/coderd/database/dbgen/dbgen.go
index d0bf6a4ce0f50..dd6a3a2cc1490 100644
--- a/coderd/database/dbgen/dbgen.go
+++ b/coderd/database/dbgen/dbgen.go
@@ -659,6 +659,7 @@ func WorkspaceApp(t testing.TB, db database.Store, orig database.WorkspaceApp) d
Health: takeFirst(orig.Health, database.WorkspaceAppHealthHealthy),
DisplayOrder: takeFirst(orig.DisplayOrder, 1),
Hidden: orig.Hidden,
+ OpenIn: takeFirst(orig.OpenIn, database.WorkspaceAppOpenInSlimWindow),
})
require.NoError(t, err, "insert app")
return resource
diff --git a/coderd/database/dbmem/dbmem.go b/coderd/database/dbmem/dbmem.go
index 7b19790a6d8ea..d3b7b3fb35f5f 100644
--- a/coderd/database/dbmem/dbmem.go
+++ b/coderd/database/dbmem/dbmem.go
@@ -2206,6 +2206,11 @@ func (q *FakeQuerier) DeleteWorkspaceAgentPortSharesByTemplate(_ context.Context
return nil
}
+func (*FakeQuerier) DisableForeignKeysAndTriggers(_ context.Context) error {
+ // This is a no-op in the in-memory database.
+ return nil
+}
+
func (q *FakeQuerier) EnqueueNotificationMessage(_ context.Context, arg database.EnqueueNotificationMessageParams) error {
err := validateDatabaseType(arg)
if err != nil {
@@ -8348,6 +8353,10 @@ func (q *FakeQuerier) InsertWorkspaceApp(_ context.Context, arg database.InsertW
arg.SharingLevel = database.AppSharingLevelOwner
}
+ if arg.OpenIn == "" {
+ arg.OpenIn = database.WorkspaceAppOpenInSlimWindow
+ }
+
// nolint:gosimple
workspaceApp := database.WorkspaceApp{
ID: arg.ID,
@@ -8367,6 +8376,7 @@ func (q *FakeQuerier) InsertWorkspaceApp(_ context.Context, arg database.InsertW
Health: arg.Health,
Hidden: arg.Hidden,
DisplayOrder: arg.DisplayOrder,
+ OpenIn: arg.OpenIn,
}
q.workspaceApps = append(q.workspaceApps, workspaceApp)
return workspaceApp, nil
diff --git a/coderd/database/dbmetrics/querymetrics.go b/coderd/database/dbmetrics/querymetrics.go
index 645357d6f095e..5df5c547a20d6 100644
--- a/coderd/database/dbmetrics/querymetrics.go
+++ b/coderd/database/dbmetrics/querymetrics.go
@@ -413,6 +413,13 @@ func (m queryMetricsStore) DeleteWorkspaceAgentPortSharesByTemplate(ctx context.
return r0
}
+func (m queryMetricsStore) DisableForeignKeysAndTriggers(ctx context.Context) error {
+ start := time.Now()
+ r0 := m.s.DisableForeignKeysAndTriggers(ctx)
+ m.queryLatencies.WithLabelValues("DisableForeignKeysAndTriggers").Observe(time.Since(start).Seconds())
+ return r0
+}
+
func (m queryMetricsStore) EnqueueNotificationMessage(ctx context.Context, arg database.EnqueueNotificationMessageParams) error {
start := time.Now()
r0 := m.s.EnqueueNotificationMessage(ctx, arg)
diff --git a/coderd/database/dbmock/dbmock.go b/coderd/database/dbmock/dbmock.go
index 73a0e6d60af55..6b552fe5060ff 100644
--- a/coderd/database/dbmock/dbmock.go
+++ b/coderd/database/dbmock/dbmock.go
@@ -728,6 +728,20 @@ func (mr *MockStoreMockRecorder) DeleteWorkspaceAgentPortSharesByTemplate(arg0,
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteWorkspaceAgentPortSharesByTemplate", reflect.TypeOf((*MockStore)(nil).DeleteWorkspaceAgentPortSharesByTemplate), arg0, arg1)
}
+// DisableForeignKeysAndTriggers mocks base method.
+func (m *MockStore) DisableForeignKeysAndTriggers(arg0 context.Context) error {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "DisableForeignKeysAndTriggers", arg0)
+ ret0, _ := ret[0].(error)
+ return ret0
+}
+
+// DisableForeignKeysAndTriggers indicates an expected call of DisableForeignKeysAndTriggers.
+func (mr *MockStoreMockRecorder) DisableForeignKeysAndTriggers(arg0 any) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DisableForeignKeysAndTriggers", reflect.TypeOf((*MockStore)(nil).DisableForeignKeysAndTriggers), arg0)
+}
+
// EnqueueNotificationMessage mocks base method.
func (m *MockStore) EnqueueNotificationMessage(arg0 context.Context, arg1 database.EnqueueNotificationMessageParams) error {
m.ctrl.T.Helper()
diff --git a/coderd/database/dbpurge/dbpurge_test.go b/coderd/database/dbpurge/dbpurge_test.go
index 671c65c68790e..4677602328c89 100644
--- a/coderd/database/dbpurge/dbpurge_test.go
+++ b/coderd/database/dbpurge/dbpurge_test.go
@@ -34,7 +34,7 @@ import (
)
func TestMain(m *testing.M) {
- goleak.VerifyTestMain(m)
+ goleak.VerifyTestMain(m, testutil.GoleakOptions...)
}
// Ensures no goroutines leak.
diff --git a/coderd/database/dbrollup/dbrollup_test.go b/coderd/database/dbrollup/dbrollup_test.go
index eae7759d2059c..c5c2d8f9243b0 100644
--- a/coderd/database/dbrollup/dbrollup_test.go
+++ b/coderd/database/dbrollup/dbrollup_test.go
@@ -23,7 +23,7 @@ import (
)
func TestMain(m *testing.M) {
- goleak.VerifyTestMain(m)
+ goleak.VerifyTestMain(m, testutil.GoleakOptions...)
}
func TestRollup_Close(t *testing.T) {
diff --git a/coderd/database/dbtestutil/db.go b/coderd/database/dbtestutil/db.go
index b752d7c4c3a97..c76be1ed52a9d 100644
--- a/coderd/database/dbtestutil/db.go
+++ b/coderd/database/dbtestutil/db.go
@@ -87,6 +87,18 @@ func NewDBWithSQLDB(t testing.TB, opts ...Option) (database.Store, pubsub.Pubsub
return db, ps, sqlDB
}
+var DefaultTimezone = "Canada/Newfoundland"
+
+// NowInDefaultTimezone returns the current time rounded to the nearest microsecond in the default timezone
+// used by postgres in tests. Useful for object equality checks.
+func NowInDefaultTimezone() time.Time {
+ loc, err := time.LoadLocation(DefaultTimezone)
+ if err != nil {
+ panic(err)
+ }
+ return time.Now().In(loc).Round(time.Microsecond)
+}
+
func NewDB(t testing.TB, opts ...Option) (database.Store, pubsub.Pubsub) {
t.Helper()
@@ -115,7 +127,7 @@ func NewDB(t testing.TB, opts ...Option) (database.Store, pubsub.Pubsub) {
// - It has a non-UTC offset
// - It has a fractional hour UTC offset
// - It includes a daylight savings time component
- o.fixedTimezone = "Canada/Newfoundland"
+ o.fixedTimezone = DefaultTimezone
}
dbName := dbNameFromConnectionURL(t, connectionURL)
setDBTimezone(t, connectionURL, dbName, o.fixedTimezone)
@@ -318,3 +330,15 @@ func normalizeDump(schema []byte) []byte {
return schema
}
+
+// Deprecated: disable foreign keys was created to aid in migrating off
+// of the test-only in-memory database. Do not use this in new code.
+func DisableForeignKeysAndTriggers(t *testing.T, db database.Store) {
+ err := db.DisableForeignKeysAndTriggers(context.Background())
+ if t != nil {
+ require.NoError(t, err)
+ }
+ if err != nil {
+ panic(err)
+ }
+}
diff --git a/coderd/database/dbtestutil/postgres_test.go b/coderd/database/dbtestutil/postgres_test.go
index 9cae9411289ad..d4aaacdf909d8 100644
--- a/coderd/database/dbtestutil/postgres_test.go
+++ b/coderd/database/dbtestutil/postgres_test.go
@@ -12,10 +12,11 @@ import (
"github.com/coder/coder/v2/coderd/database/dbtestutil"
"github.com/coder/coder/v2/coderd/database/migrations"
+ "github.com/coder/coder/v2/testutil"
)
func TestMain(m *testing.M) {
- goleak.VerifyTestMain(m)
+ goleak.VerifyTestMain(m, testutil.GoleakOptions...)
}
func TestOpen(t *testing.T) {
diff --git a/coderd/database/dump.sql b/coderd/database/dump.sql
index f91a5371f06f6..50519485dc505 100644
--- a/coderd/database/dump.sql
+++ b/coderd/database/dump.sql
@@ -261,6 +261,12 @@ CREATE TYPE workspace_app_health AS ENUM (
'unhealthy'
);
+CREATE TYPE workspace_app_open_in AS ENUM (
+ 'tab',
+ 'window',
+ 'slim-window'
+);
+
CREATE TYPE workspace_transition AS ENUM (
'start',
'stop',
@@ -1602,7 +1608,8 @@ CREATE TABLE workspace_apps (
slug text NOT NULL,
external boolean DEFAULT false NOT NULL,
display_order integer DEFAULT 0 NOT NULL,
- hidden boolean DEFAULT false NOT NULL
+ hidden boolean DEFAULT false NOT NULL,
+ open_in workspace_app_open_in DEFAULT 'slim-window'::workspace_app_open_in NOT NULL
);
COMMENT ON COLUMN workspace_apps.display_order IS 'Specifies the order in which to display agent app in user interfaces.';
diff --git a/coderd/database/migrations/000282_workspace_app_add_open_in.down.sql b/coderd/database/migrations/000282_workspace_app_add_open_in.down.sql
new file mode 100644
index 0000000000000..9f866022f555e
--- /dev/null
+++ b/coderd/database/migrations/000282_workspace_app_add_open_in.down.sql
@@ -0,0 +1,3 @@
+ALTER TABLE workspace_apps DROP COLUMN open_in;
+
+DROP TYPE workspace_app_open_in;
diff --git a/coderd/database/migrations/000282_workspace_app_add_open_in.up.sql b/coderd/database/migrations/000282_workspace_app_add_open_in.up.sql
new file mode 100644
index 0000000000000..ccde2b09d6557
--- /dev/null
+++ b/coderd/database/migrations/000282_workspace_app_add_open_in.up.sql
@@ -0,0 +1,3 @@
+CREATE TYPE workspace_app_open_in AS ENUM ('tab', 'window', 'slim-window');
+
+ALTER TABLE workspace_apps ADD COLUMN open_in workspace_app_open_in NOT NULL DEFAULT 'slim-window'::workspace_app_open_in;
diff --git a/coderd/database/migrations/migrate_test.go b/coderd/database/migrations/migrate_test.go
index c64c2436da18d..7d016f7978fb1 100644
--- a/coderd/database/migrations/migrate_test.go
+++ b/coderd/database/migrations/migrate_test.go
@@ -28,7 +28,7 @@ import (
)
func TestMain(m *testing.M) {
- goleak.VerifyTestMain(m)
+ goleak.VerifyTestMain(m, testutil.GoleakOptions...)
}
func TestMigrate(t *testing.T) {
diff --git a/coderd/database/models.go b/coderd/database/models.go
index e9a5f93051ba5..9ca80d119a502 100644
--- a/coderd/database/models.go
+++ b/coderd/database/models.go
@@ -2151,6 +2151,67 @@ func AllWorkspaceAppHealthValues() []WorkspaceAppHealth {
}
}
+type WorkspaceAppOpenIn string
+
+const (
+ WorkspaceAppOpenInTab WorkspaceAppOpenIn = "tab"
+ WorkspaceAppOpenInWindow WorkspaceAppOpenIn = "window"
+ WorkspaceAppOpenInSlimWindow WorkspaceAppOpenIn = "slim-window"
+)
+
+func (e *WorkspaceAppOpenIn) Scan(src interface{}) error {
+ switch s := src.(type) {
+ case []byte:
+ *e = WorkspaceAppOpenIn(s)
+ case string:
+ *e = WorkspaceAppOpenIn(s)
+ default:
+ return fmt.Errorf("unsupported scan type for WorkspaceAppOpenIn: %T", src)
+ }
+ return nil
+}
+
+type NullWorkspaceAppOpenIn struct {
+ WorkspaceAppOpenIn WorkspaceAppOpenIn `json:"workspace_app_open_in"`
+ Valid bool `json:"valid"` // Valid is true if WorkspaceAppOpenIn is not NULL
+}
+
+// Scan implements the Scanner interface.
+func (ns *NullWorkspaceAppOpenIn) Scan(value interface{}) error {
+ if value == nil {
+ ns.WorkspaceAppOpenIn, ns.Valid = "", false
+ return nil
+ }
+ ns.Valid = true
+ return ns.WorkspaceAppOpenIn.Scan(value)
+}
+
+// Value implements the driver Valuer interface.
+func (ns NullWorkspaceAppOpenIn) Value() (driver.Value, error) {
+ if !ns.Valid {
+ return nil, nil
+ }
+ return string(ns.WorkspaceAppOpenIn), nil
+}
+
+func (e WorkspaceAppOpenIn) Valid() bool {
+ switch e {
+ case WorkspaceAppOpenInTab,
+ WorkspaceAppOpenInWindow,
+ WorkspaceAppOpenInSlimWindow:
+ return true
+ }
+ return false
+}
+
+func AllWorkspaceAppOpenInValues() []WorkspaceAppOpenIn {
+ return []WorkspaceAppOpenIn{
+ WorkspaceAppOpenInTab,
+ WorkspaceAppOpenInWindow,
+ WorkspaceAppOpenInSlimWindow,
+ }
+}
+
type WorkspaceTransition string
const (
@@ -3092,7 +3153,8 @@ type WorkspaceApp struct {
// Specifies the order in which to display agent app in user interfaces.
DisplayOrder int32 `db:"display_order" json:"display_order"`
// Determines if the app is not shown in user interfaces.
- Hidden bool `db:"hidden" json:"hidden"`
+ Hidden bool `db:"hidden" json:"hidden"`
+ OpenIn WorkspaceAppOpenIn `db:"open_in" json:"open_in"`
}
// A record of workspace app usage statistics
diff --git a/coderd/database/querier.go b/coderd/database/querier.go
index 2128315ce6dad..620cc14b3fd26 100644
--- a/coderd/database/querier.go
+++ b/coderd/database/querier.go
@@ -106,6 +106,10 @@ type sqlcQuerier interface {
DeleteTailnetTunnel(ctx context.Context, arg DeleteTailnetTunnelParams) (DeleteTailnetTunnelRow, error)
DeleteWorkspaceAgentPortShare(ctx context.Context, arg DeleteWorkspaceAgentPortShareParams) error
DeleteWorkspaceAgentPortSharesByTemplate(ctx context.Context, templateID uuid.UUID) error
+ // Disable foreign keys and triggers for all tables.
+ // Deprecated: disable foreign keys was created to aid in migrating off
+ // of the test-only in-memory database. Do not use this in new code.
+ DisableForeignKeysAndTriggers(ctx context.Context) error
EnqueueNotificationMessage(ctx context.Context, arg EnqueueNotificationMessageParams) error
FavoriteWorkspace(ctx context.Context, id uuid.UUID) error
// This is used to build up the notification_message's JSON payload.
diff --git a/coderd/database/queries.sql.go b/coderd/database/queries.sql.go
index 1a7911bc64b4d..8fbb7c0b5be6c 100644
--- a/coderd/database/queries.sql.go
+++ b/coderd/database/queries.sql.go
@@ -9796,6 +9796,33 @@ func (q *sqlQuerier) InsertTemplateVersionWorkspaceTag(ctx context.Context, arg
return i, err
}
+const disableForeignKeysAndTriggers = `-- name: DisableForeignKeysAndTriggers :exec
+DO $$
+DECLARE
+ table_record record;
+BEGIN
+ FOR table_record IN
+ SELECT table_schema, table_name
+ FROM information_schema.tables
+ WHERE table_schema NOT IN ('pg_catalog', 'information_schema')
+ AND table_type = 'BASE TABLE'
+ LOOP
+ EXECUTE format('ALTER TABLE %I.%I DISABLE TRIGGER ALL',
+ table_record.table_schema,
+ table_record.table_name);
+ END LOOP;
+END;
+$$
+`
+
+// Disable foreign keys and triggers for all tables.
+// Deprecated: disable foreign keys was created to aid in migrating off
+// of the test-only in-memory database. Do not use this in new code.
+func (q *sqlQuerier) DisableForeignKeysAndTriggers(ctx context.Context) error {
+ _, err := q.db.ExecContext(ctx, disableForeignKeysAndTriggers)
+ return err
+}
+
const getUserLinkByLinkedID = `-- name: GetUserLinkByLinkedID :one
SELECT
user_links.user_id, user_links.login_type, user_links.linked_id, user_links.oauth_access_token, user_links.oauth_refresh_token, user_links.oauth_expiry, user_links.oauth_access_token_key_id, user_links.oauth_refresh_token_key_id, user_links.claims
@@ -13157,7 +13184,7 @@ func (q *sqlQuerier) InsertWorkspaceAgentStats(ctx context.Context, arg InsertWo
}
const getWorkspaceAppByAgentIDAndSlug = `-- name: GetWorkspaceAppByAgentIDAndSlug :one
-SELECT id, created_at, agent_id, display_name, icon, command, url, healthcheck_url, healthcheck_interval, healthcheck_threshold, health, subdomain, sharing_level, slug, external, display_order, hidden FROM workspace_apps WHERE agent_id = $1 AND slug = $2
+SELECT id, created_at, agent_id, display_name, icon, command, url, healthcheck_url, healthcheck_interval, healthcheck_threshold, health, subdomain, sharing_level, slug, external, display_order, hidden, open_in FROM workspace_apps WHERE agent_id = $1 AND slug = $2
`
type GetWorkspaceAppByAgentIDAndSlugParams struct {
@@ -13186,12 +13213,13 @@ func (q *sqlQuerier) GetWorkspaceAppByAgentIDAndSlug(ctx context.Context, arg Ge
&i.External,
&i.DisplayOrder,
&i.Hidden,
+ &i.OpenIn,
)
return i, err
}
const getWorkspaceAppsByAgentID = `-- name: GetWorkspaceAppsByAgentID :many
-SELECT id, created_at, agent_id, display_name, icon, command, url, healthcheck_url, healthcheck_interval, healthcheck_threshold, health, subdomain, sharing_level, slug, external, display_order, hidden FROM workspace_apps WHERE agent_id = $1 ORDER BY slug ASC
+SELECT id, created_at, agent_id, display_name, icon, command, url, healthcheck_url, healthcheck_interval, healthcheck_threshold, health, subdomain, sharing_level, slug, external, display_order, hidden, open_in FROM workspace_apps WHERE agent_id = $1 ORDER BY slug ASC
`
func (q *sqlQuerier) GetWorkspaceAppsByAgentID(ctx context.Context, agentID uuid.UUID) ([]WorkspaceApp, error) {
@@ -13221,6 +13249,7 @@ func (q *sqlQuerier) GetWorkspaceAppsByAgentID(ctx context.Context, agentID uuid
&i.External,
&i.DisplayOrder,
&i.Hidden,
+ &i.OpenIn,
); err != nil {
return nil, err
}
@@ -13236,7 +13265,7 @@ func (q *sqlQuerier) GetWorkspaceAppsByAgentID(ctx context.Context, agentID uuid
}
const getWorkspaceAppsByAgentIDs = `-- name: GetWorkspaceAppsByAgentIDs :many
-SELECT id, created_at, agent_id, display_name, icon, command, url, healthcheck_url, healthcheck_interval, healthcheck_threshold, health, subdomain, sharing_level, slug, external, display_order, hidden FROM workspace_apps WHERE agent_id = ANY($1 :: uuid [ ]) ORDER BY slug ASC
+SELECT id, created_at, agent_id, display_name, icon, command, url, healthcheck_url, healthcheck_interval, healthcheck_threshold, health, subdomain, sharing_level, slug, external, display_order, hidden, open_in FROM workspace_apps WHERE agent_id = ANY($1 :: uuid [ ]) ORDER BY slug ASC
`
func (q *sqlQuerier) GetWorkspaceAppsByAgentIDs(ctx context.Context, ids []uuid.UUID) ([]WorkspaceApp, error) {
@@ -13266,6 +13295,7 @@ func (q *sqlQuerier) GetWorkspaceAppsByAgentIDs(ctx context.Context, ids []uuid.
&i.External,
&i.DisplayOrder,
&i.Hidden,
+ &i.OpenIn,
); err != nil {
return nil, err
}
@@ -13281,7 +13311,7 @@ func (q *sqlQuerier) GetWorkspaceAppsByAgentIDs(ctx context.Context, ids []uuid.
}
const getWorkspaceAppsCreatedAfter = `-- name: GetWorkspaceAppsCreatedAfter :many
-SELECT id, created_at, agent_id, display_name, icon, command, url, healthcheck_url, healthcheck_interval, healthcheck_threshold, health, subdomain, sharing_level, slug, external, display_order, hidden FROM workspace_apps WHERE created_at > $1 ORDER BY slug ASC
+SELECT id, created_at, agent_id, display_name, icon, command, url, healthcheck_url, healthcheck_interval, healthcheck_threshold, health, subdomain, sharing_level, slug, external, display_order, hidden, open_in FROM workspace_apps WHERE created_at > $1 ORDER BY slug ASC
`
func (q *sqlQuerier) GetWorkspaceAppsCreatedAfter(ctx context.Context, createdAt time.Time) ([]WorkspaceApp, error) {
@@ -13311,6 +13341,7 @@ func (q *sqlQuerier) GetWorkspaceAppsCreatedAfter(ctx context.Context, createdAt
&i.External,
&i.DisplayOrder,
&i.Hidden,
+ &i.OpenIn,
); err != nil {
return nil, err
}
@@ -13344,10 +13375,11 @@ INSERT INTO
healthcheck_threshold,
health,
display_order,
- hidden
+ hidden,
+ open_in
)
VALUES
- ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17) RETURNING id, created_at, agent_id, display_name, icon, command, url, healthcheck_url, healthcheck_interval, healthcheck_threshold, health, subdomain, sharing_level, slug, external, display_order, hidden
+ ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18) RETURNING id, created_at, agent_id, display_name, icon, command, url, healthcheck_url, healthcheck_interval, healthcheck_threshold, health, subdomain, sharing_level, slug, external, display_order, hidden, open_in
`
type InsertWorkspaceAppParams struct {
@@ -13368,6 +13400,7 @@ type InsertWorkspaceAppParams struct {
Health WorkspaceAppHealth `db:"health" json:"health"`
DisplayOrder int32 `db:"display_order" json:"display_order"`
Hidden bool `db:"hidden" json:"hidden"`
+ OpenIn WorkspaceAppOpenIn `db:"open_in" json:"open_in"`
}
func (q *sqlQuerier) InsertWorkspaceApp(ctx context.Context, arg InsertWorkspaceAppParams) (WorkspaceApp, error) {
@@ -13389,6 +13422,7 @@ func (q *sqlQuerier) InsertWorkspaceApp(ctx context.Context, arg InsertWorkspace
arg.Health,
arg.DisplayOrder,
arg.Hidden,
+ arg.OpenIn,
)
var i WorkspaceApp
err := row.Scan(
@@ -13409,6 +13443,7 @@ func (q *sqlQuerier) InsertWorkspaceApp(ctx context.Context, arg InsertWorkspace
&i.External,
&i.DisplayOrder,
&i.Hidden,
+ &i.OpenIn,
)
return i, err
}
diff --git a/coderd/database/queries/testadmin.sql b/coderd/database/queries/testadmin.sql
new file mode 100644
index 0000000000000..77d39ce52768c
--- /dev/null
+++ b/coderd/database/queries/testadmin.sql
@@ -0,0 +1,20 @@
+-- name: DisableForeignKeysAndTriggers :exec
+-- Disable foreign keys and triggers for all tables.
+-- Deprecated: disable foreign keys was created to aid in migrating off
+-- of the test-only in-memory database. Do not use this in new code.
+DO $$
+DECLARE
+ table_record record;
+BEGIN
+ FOR table_record IN
+ SELECT table_schema, table_name
+ FROM information_schema.tables
+ WHERE table_schema NOT IN ('pg_catalog', 'information_schema')
+ AND table_type = 'BASE TABLE'
+ LOOP
+ EXECUTE format('ALTER TABLE %I.%I DISABLE TRIGGER ALL',
+ table_record.table_schema,
+ table_record.table_name);
+ END LOOP;
+END;
+$$;
diff --git a/coderd/database/queries/workspaceapps.sql b/coderd/database/queries/workspaceapps.sql
index 9ae1367093efd..2f431268a4c41 100644
--- a/coderd/database/queries/workspaceapps.sql
+++ b/coderd/database/queries/workspaceapps.sql
@@ -29,10 +29,11 @@ INSERT INTO
healthcheck_threshold,
health,
display_order,
- hidden
+ hidden,
+ open_in
)
VALUES
- ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17) RETURNING *;
+ ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18) RETURNING *;
-- name: UpdateWorkspaceAppHealthByID :exec
UPDATE
diff --git a/coderd/jwtutils/jws.go b/coderd/jwtutils/jws.go
index 0c8ca9aa30f39..eca8752e1a88d 100644
--- a/coderd/jwtutils/jws.go
+++ b/coderd/jwtutils/jws.go
@@ -38,7 +38,7 @@ type Claims interface {
}
const (
- signingAlgo = jose.HS512
+ SigningAlgo = jose.HS512
)
type SigningKeyManager interface {
@@ -62,7 +62,7 @@ func Sign(ctx context.Context, s SigningKeyProvider, claims Claims) (string, err
}
signer, err := jose.NewSigner(jose.SigningKey{
- Algorithm: signingAlgo,
+ Algorithm: SigningAlgo,
Key: key,
}, &jose.SignerOptions{
ExtraHeaders: map[jose.HeaderKey]interface{}{
@@ -109,7 +109,7 @@ func Verify(ctx context.Context, v VerifyKeyProvider, token string, claims Claim
RegisteredClaims: jwt.Expected{
Time: time.Now(),
},
- SignatureAlgorithm: signingAlgo,
+ SignatureAlgorithm: SigningAlgo,
}
for _, opt := range opts {
@@ -127,8 +127,8 @@ func Verify(ctx context.Context, v VerifyKeyProvider, token string, claims Claim
signature := object.Signatures[0]
- if signature.Header.Algorithm != string(signingAlgo) {
- return xerrors.Errorf("expected JWS algorithm to be %q, got %q", signingAlgo, object.Signatures[0].Header.Algorithm)
+ if signature.Header.Algorithm != string(SigningAlgo) {
+ return xerrors.Errorf("expected JWS algorithm to be %q, got %q", SigningAlgo, object.Signatures[0].Header.Algorithm)
}
kid := signature.Header.KeyID
diff --git a/coderd/notifications/dispatch/smtp_test.go b/coderd/notifications/dispatch/smtp_test.go
index b448dd2582e67..c424d81d79683 100644
--- a/coderd/notifications/dispatch/smtp_test.go
+++ b/coderd/notifications/dispatch/smtp_test.go
@@ -26,7 +26,7 @@ import (
)
func TestMain(m *testing.M) {
- goleak.VerifyTestMain(m)
+ goleak.VerifyTestMain(m, testutil.GoleakOptions...)
}
func TestSMTP(t *testing.T) {
diff --git a/coderd/notifications/notifications_test.go b/coderd/notifications/notifications_test.go
index 1c4be51974b05..e404f4afb3c19 100644
--- a/coderd/notifications/notifications_test.go
+++ b/coderd/notifications/notifications_test.go
@@ -56,7 +56,7 @@ import (
var updateGoldenFiles = flag.Bool("update", false, "Update golden files")
func TestMain(m *testing.M) {
- goleak.VerifyTestMain(m)
+ goleak.VerifyTestMain(m, testutil.GoleakOptions...)
}
// TestBasicNotificationRoundtrip enqueues a message to the store, waits for it to be acquired by a notifier,
diff --git a/coderd/provisionerdserver/acquirer_test.go b/coderd/provisionerdserver/acquirer_test.go
index 269b035d50edd..bc8fc3d6f5869 100644
--- a/coderd/provisionerdserver/acquirer_test.go
+++ b/coderd/provisionerdserver/acquirer_test.go
@@ -28,7 +28,7 @@ import (
)
func TestMain(m *testing.M) {
- goleak.VerifyTestMain(m)
+ goleak.VerifyTestMain(m, testutil.GoleakOptions...)
}
// TestAcquirer_Store tests that a database.Store is accepted as a provisionerdserver.AcquirerStore
diff --git a/coderd/provisionerdserver/provisionerdserver.go b/coderd/provisionerdserver/provisionerdserver.go
index 8899aa999f503..e3bbb621e57f6 100644
--- a/coderd/provisionerdserver/provisionerdserver.go
+++ b/coderd/provisionerdserver/provisionerdserver.go
@@ -1509,6 +1509,7 @@ func (s *server) CompleteJob(ctx context.Context, completed *proto.CompletedJob)
dur := time.Duration(protoAgent.GetConnectionTimeoutSeconds()) * time.Second
agentTimeouts[dur] = true
}
+
err = InsertWorkspaceResource(ctx, db, job.ID, workspaceBuild.Transition, protoResource, telemetrySnapshot)
if err != nil {
return xerrors.Errorf("insert provisioner job: %w", err)
@@ -2012,6 +2013,14 @@ func InsertWorkspaceResource(ctx context.Context, db database.Store, jobID uuid.
sharingLevel = database.AppSharingLevelPublic
}
+ openIn := database.WorkspaceAppOpenInSlimWindow
+ switch app.OpenIn {
+ case sdkproto.AppOpenIn_TAB:
+ openIn = database.WorkspaceAppOpenInTab
+ case sdkproto.AppOpenIn_WINDOW:
+ openIn = database.WorkspaceAppOpenInWindow
+ }
+
dbApp, err := db.InsertWorkspaceApp(ctx, database.InsertWorkspaceAppParams{
ID: uuid.New(),
CreatedAt: dbtime.Now(),
@@ -2036,6 +2045,7 @@ func InsertWorkspaceResource(ctx context.Context, db database.Store, jobID uuid.
Health: health,
DisplayOrder: int32(app.Order),
Hidden: app.Hidden,
+ OpenIn: openIn,
})
if err != nil {
return xerrors.Errorf("insert app: %w", err)
diff --git a/coderd/rbac/README.md b/coderd/rbac/README.md
index f6d432d124344..07bfaf061ca94 100644
--- a/coderd/rbac/README.md
+++ b/coderd/rbac/README.md
@@ -36,7 +36,7 @@ Both **negative** and **positive** permissions override **abstain** at the same
This can be represented by the following truth table, where Y represents _positive_, N represents _negative_, and \_ represents _abstain_:
| Action | Positive | Negative | Result |
-| ------ | -------- | -------- | ------ |
+|--------|----------|----------|--------|
| read | Y | \_ | Y |
| read | Y | N | N |
| read | \_ | \_ | \_ |
@@ -63,10 +63,10 @@ This can be represented by the following truth table, where Y represents _positi
A _role_ is a set of permissions. When evaluating a role's permission to form an action, all the relevant permissions for the role are combined at each level. Permissions at a higher level override permissions at a lower level.
The following table shows the per-level role evaluation.
-Y indicates that the role provides positive permissions, N indicates the role provides negative permissions, and _ indicates the role does not provide positive or negative permissions. YN_ indicates that the value in the cell does not matter for the access result.
+Y indicates that the role provides positive permissions, N indicates the role provides negative permissions, and _indicates the role does not provide positive or negative permissions. YN_ indicates that the value in the cell does not matter for the access result.
| Role (example) | Site | Org | User | Result |
-| --------------- | ---- | ---- | ---- | ------ |
+|-----------------|------|------|------|--------|
| site-admin | Y | YN\_ | YN\_ | Y |
| no-permission | N | YN\_ | YN\_ | N |
| org-admin | \_ | Y | YN\_ | Y |
@@ -102,7 +102,7 @@ Example of a scope for a workspace agent token, using an `allow_list` containing
}
```
-# Testing
+## Testing
You can test outside of golang by using the `opa` cli.
diff --git a/coderd/rbac/USAGE.md b/coderd/rbac/USAGE.md
index 76bff69a88c5a..b2a20bf5cbb4d 100644
--- a/coderd/rbac/USAGE.md
+++ b/coderd/rbac/USAGE.md
@@ -1,6 +1,6 @@
# Using RBAC
-# Overview
+## Overview
> _NOTE: you should probably read [`README.md`](README.md) beforehand, but it's
> not essential._
@@ -19,7 +19,7 @@ We have a number of roles (some of which have legacy connotations back to v1).
These can be found in `coderd/rbac/roles.go`.
| Role | Description | Example resources (non-exhaustive) |
-| -------------------- | ------------------------------------------------------------------- | -------------------------------------------- |
+|----------------------|---------------------------------------------------------------------|----------------------------------------------|
| **owner** | Super-user, first user in Coder installation, has all\* permissions | all\* |
| **member** | A regular user | workspaces, own details, provisioner daemons |
| **auditor** | Viewer of audit log events, read-only access to a few resources | audit logs, templates, users, groups |
@@ -43,7 +43,7 @@ Roles are collections of permissions (we call them _actions_).
These can be found in `coderd/rbac/policy/policy.go`.
| Action | Description |
-| ----------------------- | --------------------------------------- |
+|-------------------------|-----------------------------------------|
| **create** | Create a resource |
| **read** | Read a resource |
| **update** | Update a resource |
@@ -58,7 +58,7 @@ These can be found in `coderd/rbac/policy/policy.go`.
| **stop** | Stop a workspace |
| **assign** | Assign user to role / org |
-# Creating a new noun
+## Creating a new noun
In the following example, we're going to create a new RBAC noun for a new entity
called a "frobulator" _(just some nonsense word for demonstration purposes)_.
@@ -291,7 +291,7 @@ frobulator, but no test case covered it.
**NOTE: don't just add cases which make the tests pass; consider all the ways in
which your resource must be used, and test all of those scenarios!**
-# Database authorization
+## Database authorization
Now that we have the RBAC system fully configured, we need to make use of it.
@@ -350,7 +350,7 @@ before we validate (this explains the `fetchWithPostFilter` naming).
All queries are executed through `dbauthz`, and now our little frobulators are
protected!
-# API authorization
+## API authorization
API authorization is not strictly required because we have database
authorization in place, but it's a good practice to reject requests as soon as
diff --git a/coderd/rbac/authz.go b/coderd/rbac/authz.go
index ff4f9ce2371d4..aaba7d6eae3af 100644
--- a/coderd/rbac/authz.go
+++ b/coderd/rbac/authz.go
@@ -12,7 +12,7 @@ import (
"github.com/ammario/tlru"
"github.com/open-policy-agent/opa/ast"
- "github.com/open-policy-agent/opa/rego"
+ "github.com/open-policy-agent/opa/v1/rego"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
"go.opentelemetry.io/otel/attribute"
diff --git a/coderd/rbac/error.go b/coderd/rbac/error.go
index 98735ade322c4..1ea16dca7f13f 100644
--- a/coderd/rbac/error.go
+++ b/coderd/rbac/error.go
@@ -6,8 +6,8 @@ import (
"flag"
"fmt"
- "github.com/open-policy-agent/opa/rego"
"github.com/open-policy-agent/opa/topdown"
+ "github.com/open-policy-agent/opa/v1/rego"
"golang.org/x/xerrors"
"github.com/coder/coder/v2/coderd/httpapi/httpapiconstraints"
diff --git a/coderd/rbac/policy.rego b/coderd/rbac/policy.rego
index bf7a38c3cc194..ea381fa88d8e4 100644
--- a/coderd/rbac/policy.rego
+++ b/coderd/rbac/policy.rego
@@ -1,5 +1,7 @@
package authz
-import future.keywords
+
+import rego.v1
+
# A great playground: https://play.openpolicyagent.org/
# Helpful cli commands to debug.
# opa eval --format=pretty 'data.authz.allow' -d policy.rego -i input.json
@@ -29,67 +31,74 @@ import future.keywords
# bool_flip lets you assign a value to an inverted bool.
# You cannot do 'x := !false', but you can do 'x := bool_flip(false)'
-bool_flip(b) = flipped {
- b
- flipped = false
+bool_flip(b) := flipped if {
+ b
+ flipped = false
}
-bool_flip(b) = flipped {
- not b
- flipped = true
+bool_flip(b) := flipped if {
+ not b
+ flipped = true
}
# number is a quick way to get a set of {true, false} and convert it to
# -1: {false, true} or {false}
# 0: {}
# 1: {true}
-number(set) = c {
+number(set) := c if {
count(set) == 0
- c := 0
+ c := 0
}
-number(set) = c {
+number(set) := c if {
false in set
- c := -1
+ c := -1
}
-number(set) = c {
+number(set) := c if {
not false in set
set[_]
- c := 1
+ c := 1
}
# site, org, and user rules are all similar. Each rule should return a number
# from [-1, 1]. The number corresponds to "negative", "abstain", and "positive"
# for the given level. See the 'allow' rules for how these numbers are used.
-default site = 0
+default site := 0
+
site := site_allow(input.subject.roles)
+
default scope_site := 0
+
scope_site := site_allow([input.subject.scope])
-site_allow(roles) := num {
+site_allow(roles) := num if {
# allow is a set of boolean values without duplicates.
- allow := { x |
+ allow := {x |
# Iterate over all site permissions in all roles
- perm := roles[_].site[_]
- perm.action in [input.action, "*"]
+ perm := roles[_].site[_]
+ perm.action in [input.action, "*"]
perm.resource_type in [input.object.type, "*"]
+
# x is either 'true' or 'false' if a matching permission exists.
- x := bool_flip(perm.negate)
- }
- num := number(allow)
+ x := bool_flip(perm.negate)
+ }
+ num := number(allow)
}
# org_members is the list of organizations the actor is apart of.
-org_members := { orgID |
+org_members := {orgID |
input.subject.roles[_].org[orgID]
}
# org is the same as 'site' except we need to iterate over each organization
# that the actor is a member of.
-default org = 0
+default org := 0
+
org := org_allow(input.subject.roles)
+
default scope_org := 0
+
scope_org := org_allow([input.scope])
# org_allow_set is a helper function that iterates over all orgs that the actor
@@ -102,10 +111,10 @@ scope_org := org_allow([input.scope])
# The reason we calculate this for all orgs, and not just the input.object.org_owner
# is that sometimes the input.object.org_owner is unknown. In those cases
# we have a list of org_ids that can we use in a SQL 'WHERE' clause.
-org_allow_set(roles) := allow_set {
- allow_set := { id: num |
+org_allow_set(roles) := allow_set if {
+ allow_set := {id: num |
id := org_members[_]
- set := { x |
+ set := {x |
perm := roles[_].org[id][_]
perm.action in [input.action, "*"]
perm.resource_type in [input.object.type, "*"]
@@ -115,7 +124,7 @@ org_allow_set(roles) := allow_set {
}
}
-org_allow(roles) := num {
+org_allow(roles) := num if {
# If the object has "any_org" set to true, then use the other
# org_allow block.
not input.object.any_org
@@ -135,78 +144,82 @@ org_allow(roles) := num {
# This is useful for UI elements when we want to conclude, "Can the user create
# a new template in any organization?"
# It is easier than iterating over every organization the user is apart of.
-org_allow(roles) := num {
+org_allow(roles) := num if {
input.object.any_org # if this is false, this code block is not used
allow := org_allow_set(roles)
-
# allow is a map of {"": }. We only care about values
# that are 1, and ignore the rest.
num := number([
- keep |
- # for every value in the mapping
- value := allow[_]
- # only keep values > 0.
- # 1 = allow, 0 = abstain, -1 = deny
- # We only need 1 explicit allow to allow the action.
- # deny's and abstains are intentionally ignored.
- value > 0
- # result set is a set of [true,false,...]
- # which "number()" will convert to a number.
- keep := true
+ keep |
+ # for every value in the mapping
+ value := allow[_]
+
+ # only keep values > 0.
+ # 1 = allow, 0 = abstain, -1 = deny
+ # We only need 1 explicit allow to allow the action.
+ # deny's and abstains are intentionally ignored.
+ value > 0
+
+ # result set is a set of [true,false,...]
+ # which "number()" will convert to a number.
+ keep := true
])
}
# 'org_mem' is set to true if the user is an org member
# If 'any_org' is set to true, use the other block to determine org membership.
-org_mem := true {
+org_mem if {
not input.object.any_org
input.object.org_owner != ""
input.object.org_owner in org_members
}
-org_mem := true {
+org_mem if {
input.object.any_org
count(org_members) > 0
}
-org_ok {
+org_ok if {
org_mem
}
# If the object has no organization, then the user is also considered part of
# the non-existent org.
-org_ok {
+org_ok if {
input.object.org_owner == ""
not input.object.any_org
}
# User is the same as the site, except it only applies if the user owns the object and
# the user is apart of the org (if the object has an org).
-default user = 0
+default user := 0
+
user := user_allow(input.subject.roles)
+
default user_scope := 0
+
scope_user := user_allow([input.scope])
-user_allow(roles) := num {
- input.object.owner != ""
- input.subject.id = input.object.owner
- allow := { x |
- perm := roles[_].user[_]
- perm.action in [input.action, "*"]
+user_allow(roles) := num if {
+ input.object.owner != ""
+ input.subject.id = input.object.owner
+ allow := {x |
+ perm := roles[_].user[_]
+ perm.action in [input.action, "*"]
perm.resource_type in [input.object.type, "*"]
- x := bool_flip(perm.negate)
- }
- num := number(allow)
+ x := bool_flip(perm.negate)
+ }
+ num := number(allow)
}
# Scope allow_list is a list of resource IDs explicitly allowed by the scope.
# If the list is '*', then all resources are allowed.
-scope_allow_list {
+scope_allow_list if {
"*" in input.subject.scope.allow_list
}
-scope_allow_list {
+scope_allow_list if {
# If the wildcard is listed in the allow_list, we do not care about the
# object.id. This line is included to prevent partial compilations from
# ever needing to include the object.id.
@@ -226,39 +239,41 @@ scope_allow_list {
# Allow query:
# data.authz.role_allow = true data.authz.scope_allow = true
-role_allow {
+role_allow if {
site = 1
}
-role_allow {
+role_allow if {
not site = -1
org = 1
}
-role_allow {
+role_allow if {
not site = -1
not org = -1
+
# If we are not a member of an org, and the object has an org, then we are
# not authorized. This is an "implied -1" for not being in the org.
org_ok
user = 1
}
-scope_allow {
+scope_allow if {
scope_allow_list
scope_site = 1
}
-scope_allow {
+scope_allow if {
scope_allow_list
not scope_site = -1
scope_org = 1
}
-scope_allow {
+scope_allow if {
scope_allow_list
not scope_site = -1
not scope_org = -1
+
# If we are not a member of an org, and the object has an org, then we are
# not authorized. This is an "implied -1" for not being in the org.
org_ok
@@ -266,26 +281,28 @@ scope_allow {
}
# ACL for users
-acl_allow {
+acl_allow if {
# Should you have to be a member of the org too?
perms := input.object.acl_user_list[input.subject.id]
+
# Either the input action or wildcard
[input.action, "*"][_] in perms
}
# ACL for groups
-acl_allow {
+acl_allow if {
# If there is no organization owner, the object cannot be owned by an
# org_scoped team.
org_mem
group := input.subject.groups[_]
perms := input.object.acl_group_list[group]
+
# Either the input action or wildcard
[input.action, "*"][_] in perms
}
# ACL for 'all_users' special group
-acl_allow {
+acl_allow if {
org_mem
perms := input.object.acl_group_list[input.object.org_owner]
[input.action, "*"][_] in perms
@@ -296,13 +313,13 @@ acl_allow {
# The role or the ACL must allow the action. Scopes can be used to limit,
# so scope_allow must always be true.
-allow {
+allow if {
role_allow
scope_allow
}
# ACL list must also have the scope_allow to pass
-allow {
+allow if {
acl_allow
scope_allow
}
diff --git a/coderd/rbac/regosql/compile.go b/coderd/rbac/regosql/compile.go
index 69ef2a018f36c..7c843d619aa26 100644
--- a/coderd/rbac/regosql/compile.go
+++ b/coderd/rbac/regosql/compile.go
@@ -5,7 +5,7 @@ import (
"strings"
"github.com/open-policy-agent/opa/ast"
- "github.com/open-policy-agent/opa/rego"
+ "github.com/open-policy-agent/opa/v1/rego"
"golang.org/x/xerrors"
"github.com/coder/coder/v2/coderd/rbac/regosql/sqltypes"
diff --git a/coderd/rbac/regosql/compile_test.go b/coderd/rbac/regosql/compile_test.go
index be0385bf83699..a6b59d1fdd4bd 100644
--- a/coderd/rbac/regosql/compile_test.go
+++ b/coderd/rbac/regosql/compile_test.go
@@ -4,7 +4,7 @@ import (
"testing"
"github.com/open-policy-agent/opa/ast"
- "github.com/open-policy-agent/opa/rego"
+ "github.com/open-policy-agent/opa/v1/rego"
"github.com/stretchr/testify/require"
"github.com/coder/coder/v2/coderd/rbac/regosql"
diff --git a/coderd/telemetry/telemetry_test.go b/coderd/telemetry/telemetry_test.go
index 2b70cd2a6d2c3..e0cbfd1cfa193 100644
--- a/coderd/telemetry/telemetry_test.go
+++ b/coderd/telemetry/telemetry_test.go
@@ -27,7 +27,7 @@ import (
)
func TestMain(m *testing.M) {
- goleak.VerifyTestMain(m)
+ goleak.VerifyTestMain(m, testutil.GoleakOptions...)
}
func TestTelemetry(t *testing.T) {
@@ -59,6 +59,7 @@ func TestTelemetry(t *testing.T) {
_ = dbgen.WorkspaceApp(t, db, database.WorkspaceApp{
SharingLevel: database.AppSharingLevelOwner,
Health: database.WorkspaceAppHealthDisabled,
+ OpenIn: database.WorkspaceAppOpenInSlimWindow,
})
group := dbgen.Group(t, db, database.Group{})
_ = dbgen.GroupMember(t, db, database.GroupMemberTable{UserID: user.ID, GroupID: group.ID})
diff --git a/coderd/templateversions_test.go b/coderd/templateversions_test.go
index 1a67508880188..b2ec822f998bc 100644
--- a/coderd/templateversions_test.go
+++ b/coderd/templateversions_test.go
@@ -293,6 +293,11 @@ func TestPostTemplateVersionsByOrganization(t *testing.T) {
type = string
default = "2"
}
+ data "coder_parameter" "unrelated" {
+ name = "unrelated"
+ type = "list(string)"
+ default = jsonencode(["a", "b"])
+ }
resource "null_resource" "test" {}`,
},
wantTags: map[string]string{"owner": "", "scope": "organization"},
@@ -301,18 +306,23 @@ func TestPostTemplateVersionsByOrganization(t *testing.T) {
name: "main.tf with empty workspace tags",
files: map[string]string{
`main.tf`: `
- variable "a" {
- type = string
- default = "1"
- }
- data "coder_parameter" "b" {
- type = string
- default = "2"
- }
- resource "null_resource" "test" {}
- data "coder_workspace_tags" "tags" {
- tags = {}
- }`,
+ variable "a" {
+ type = string
+ default = "1"
+ }
+ data "coder_parameter" "b" {
+ type = string
+ default = "2"
+ }
+ data "coder_parameter" "unrelated" {
+ name = "unrelated"
+ type = "list(string)"
+ default = jsonencode(["a", "b"])
+ }
+ resource "null_resource" "test" {}
+ data "coder_workspace_tags" "tags" {
+ tags = {}
+ }`,
},
wantTags: map[string]string{"owner": "", "scope": "organization"},
},
@@ -328,6 +338,11 @@ func TestPostTemplateVersionsByOrganization(t *testing.T) {
type = string
default = "2"
}
+ data "coder_parameter" "unrelated" {
+ name = "unrelated"
+ type = "list(string)"
+ default = jsonencode(["a", "b"])
+ }
resource "null_resource" "test" {}
data "coder_workspace_tags" "tags" {
tags = {
@@ -343,22 +358,28 @@ func TestPostTemplateVersionsByOrganization(t *testing.T) {
name: "main.tf with workspace tags and request tags",
files: map[string]string{
`main.tf`: `
- variable "a" {
- type = string
- default = "1"
- }
- data "coder_parameter" "b" {
- type = string
- default = "2"
- }
- resource "null_resource" "test" {}
- data "coder_workspace_tags" "tags" {
- tags = {
- "foo": "bar",
- "a": var.a,
- "b": data.coder_parameter.b.value,
+ // This file is the same as the above, except for this comment.
+ variable "a" {
+ type = string
+ default = "1"
+ }
+ data "coder_parameter" "b" {
+ type = string
+ default = "2"
}
- }`,
+ data "coder_parameter" "unrelated" {
+ name = "unrelated"
+ type = "list(string)"
+ default = jsonencode(["a", "b"])
+ }
+ resource "null_resource" "test" {}
+ data "coder_workspace_tags" "tags" {
+ tags = {
+ "foo": "bar",
+ "a": var.a,
+ "b": data.coder_parameter.b.value,
+ }
+ }`,
},
reqTags: map[string]string{"baz": "zap", "foo": "noclobber"},
wantTags: map[string]string{"owner": "", "scope": "organization", "foo": "bar", "baz": "zap", "a": "1", "b": "2"},
@@ -375,6 +396,11 @@ func TestPostTemplateVersionsByOrganization(t *testing.T) {
type = string
default = "2"
}
+ data "coder_parameter" "unrelated" {
+ name = "unrelated"
+ type = "list(string)"
+ default = jsonencode(["a", "b"])
+ }
resource "null_resource" "test" {
name = "foo"
}
@@ -401,6 +427,11 @@ func TestPostTemplateVersionsByOrganization(t *testing.T) {
type = string
default = "2"
}
+ data "coder_parameter" "unrelated" {
+ name = "unrelated"
+ type = "list(string)"
+ default = jsonencode(["a", "b"])
+ }
resource "null_resource" "test" {
name = "foo"
}
@@ -423,6 +454,11 @@ func TestPostTemplateVersionsByOrganization(t *testing.T) {
name: "main.tf with workspace tags that attempts to set user scope",
files: map[string]string{
`main.tf`: `
+ data "coder_parameter" "unrelated" {
+ name = "unrelated"
+ type = "list(string)"
+ default = jsonencode(["a", "b"])
+ }
resource "null_resource" "test" {}
data "coder_workspace_tags" "tags" {
tags = {
@@ -437,6 +473,11 @@ func TestPostTemplateVersionsByOrganization(t *testing.T) {
name: "main.tf with workspace tags that attempt to clobber org ID",
files: map[string]string{
`main.tf`: `
+ data "coder_parameter" "unrelated" {
+ name = "unrelated"
+ type = "list(string)"
+ default = jsonencode(["a", "b"])
+ }
resource "null_resource" "test" {}
data "coder_workspace_tags" "tags" {
tags = {
@@ -451,6 +492,11 @@ func TestPostTemplateVersionsByOrganization(t *testing.T) {
name: "main.tf with workspace tags that set scope=user",
files: map[string]string{
`main.tf`: `
+ data "coder_parameter" "unrelated" {
+ name = "unrelated"
+ type = "list(string)"
+ default = jsonencode(["a", "b"])
+ }
resource "null_resource" "test" {}
data "coder_workspace_tags" "tags" {
tags = {
@@ -460,6 +506,19 @@ func TestPostTemplateVersionsByOrganization(t *testing.T) {
},
wantTags: map[string]string{"owner": templateAdminUser.ID.String(), "scope": "user"},
},
+ // Ref: https://github.com/coder/coder/issues/16021
+ {
+ name: "main.tf with no workspace_tags and a function call in a parameter default",
+ files: map[string]string{
+ `main.tf`: `
+ data "coder_parameter" "unrelated" {
+ name = "unrelated"
+ type = "list(string)"
+ default = jsonencode(["a", "b"])
+ }`,
+ },
+ wantTags: map[string]string{"owner": "", "scope": "organization"},
+ },
} {
tt := tt
t.Run(tt.name, func(t *testing.T) {
diff --git a/coderd/unhanger/detector_test.go b/coderd/unhanger/detector_test.go
index 4300d7d1b8661..43eb62bfa884b 100644
--- a/coderd/unhanger/detector_test.go
+++ b/coderd/unhanger/detector_test.go
@@ -28,7 +28,7 @@ import (
)
func TestMain(m *testing.M) {
- goleak.VerifyTestMain(m)
+ goleak.VerifyTestMain(m, testutil.GoleakOptions...)
}
func TestDetectorNoJobs(t *testing.T) {
diff --git a/coderd/updatecheck/updatecheck_test.go b/coderd/updatecheck/updatecheck_test.go
index afc0f57cbdd41..3e21309c5ff71 100644
--- a/coderd/updatecheck/updatecheck_test.go
+++ b/coderd/updatecheck/updatecheck_test.go
@@ -154,5 +154,5 @@ func TestChecker_Latest(t *testing.T) {
}
func TestMain(m *testing.M) {
- goleak.VerifyTestMain(m)
+ goleak.VerifyTestMain(m, testutil.GoleakOptions...)
}
diff --git a/coderd/workspaceapps/apptest/apptest.go b/coderd/workspaceapps/apptest/apptest.go
index c6e251806230d..91d8d7b3fbd6a 100644
--- a/coderd/workspaceapps/apptest/apptest.go
+++ b/coderd/workspaceapps/apptest/apptest.go
@@ -20,7 +20,7 @@ import (
"testing"
"time"
- "github.com/go-jose/go-jose/v3"
+ "github.com/go-jose/go-jose/v4"
"github.com/google/uuid"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
@@ -28,6 +28,7 @@ import (
"github.com/coder/coder/v2/coderd/coderdtest"
"github.com/coder/coder/v2/coderd/database"
+ "github.com/coder/coder/v2/coderd/jwtutils"
"github.com/coder/coder/v2/coderd/rbac"
"github.com/coder/coder/v2/coderd/workspaceapps"
"github.com/coder/coder/v2/codersdk"
@@ -430,7 +431,7 @@ func Run(t *testing.T, appHostIsPrimary bool, factory DeploymentFactory) {
require.NotNil(t, appTokenCookie, "no signed app token cookie in response")
require.Equal(t, appTokenCookie.Path, u.Path, "incorrect path on app token cookie")
- object, err := jose.ParseSigned(appTokenCookie.Value)
+ object, err := jose.ParseSigned(appTokenCookie.Value, []jose.SignatureAlgorithm{jwtutils.SigningAlgo})
require.NoError(t, err)
require.Len(t, object.Signatures, 1)
@@ -712,7 +713,7 @@ func Run(t *testing.T, appHostIsPrimary bool, factory DeploymentFactory) {
// Parse the JWT without verifying it (since we can't access the key
// from this test).
- object, err := jose.ParseSigned(appTokenCookie.Value)
+ object, err := jose.ParseSigned(appTokenCookie.Value, []jose.SignatureAlgorithm{jwtutils.SigningAlgo})
require.NoError(t, err)
require.Len(t, object.Signatures, 1)
@@ -1192,7 +1193,7 @@ func Run(t *testing.T, appHostIsPrimary bool, factory DeploymentFactory) {
require.NotNil(t, appTokenCookie, "no signed token cookie in response")
require.Equal(t, appTokenCookie.Path, "/", "incorrect path on signed token cookie")
- object, err := jose.ParseSigned(appTokenCookie.Value)
+ object, err := jose.ParseSigned(appTokenCookie.Value, []jose.SignatureAlgorithm{jwtutils.SigningAlgo})
require.NoError(t, err)
require.Len(t, object.Signatures, 1)
diff --git a/coderd/workspacebuilds.go b/coderd/workspacebuilds.go
index 0a19f7dfdaf0a..7ee6398d1d857 100644
--- a/coderd/workspacebuilds.go
+++ b/coderd/workspacebuilds.go
@@ -655,8 +655,8 @@ func (api *API) workspaceBuildParameters(rw http.ResponseWriter, r *http.Request
// @Produce json
// @Tags Builds
// @Param workspacebuild path string true "Workspace build ID"
-// @Param before query int false "Before Unix timestamp"
-// @Param after query int false "After Unix timestamp"
+// @Param before query int false "Before log id"
+// @Param after query int false "After log id"
// @Param follow query bool false "Follow log stream"
// @Success 200 {array} codersdk.ProvisionerJobLog
// @Router /workspacebuilds/{workspacebuild}/logs [get]
diff --git a/coderd/workspacestats/tracker_test.go b/coderd/workspacestats/tracker_test.go
index e43e297fd2ddd..2803e5a5322b3 100644
--- a/coderd/workspacestats/tracker_test.go
+++ b/coderd/workspacestats/tracker_test.go
@@ -219,5 +219,5 @@ func TestTracker_MultipleInstances(t *testing.T) {
}
func TestMain(m *testing.M) {
- goleak.VerifyTestMain(m)
+ goleak.VerifyTestMain(m, testutil.GoleakOptions...)
}
diff --git a/coderd/wsbuilder/wsbuilder.go b/coderd/wsbuilder/wsbuilder.go
index 2123322356a3c..3d757f4c5590b 100644
--- a/coderd/wsbuilder/wsbuilder.go
+++ b/coderd/wsbuilder/wsbuilder.go
@@ -381,6 +381,10 @@ func (b *Builder) buildTx(authFunc func(action policy.Action, object rbac.Object
code := http.StatusInternalServerError
if rbac.IsUnauthorizedError(err) {
code = http.StatusForbidden
+ } else if database.IsUniqueViolation(err) {
+ // Concurrent builds may result in duplicate
+ // workspace_builds_workspace_id_build_number_key.
+ code = http.StatusConflict
}
return BuildError{code, "insert workspace build", err}
}
diff --git a/codersdk/deployment.go b/codersdk/deployment.go
index bfd359402525f..134ce573ca164 100644
--- a/codersdk/deployment.go
+++ b/codersdk/deployment.go
@@ -793,7 +793,7 @@ func DefaultSupportLinks(docsURL string) []LinkConfig {
},
{
Name: "Report a bug",
- Target: "https://github.com/coder/coder/issues/new?labels=needs+grooming&body=" + buildInfo,
+ Target: "https://github.com/coder/coder/issues/new?labels=needs+triage&body=" + buildInfo,
Icon: "bug",
},
{
@@ -2376,7 +2376,7 @@ when required by your organization's security policy.`,
Flag: "agent-fallback-troubleshooting-url",
Env: "CODER_AGENT_FALLBACK_TROUBLESHOOTING_URL",
Hidden: true,
- Default: "https://coder.com/docs/templates/troubleshooting",
+ Default: "https://coder.com/docs/admin/templates/troubleshooting",
Value: &c.AgentFallbackTroubleshootingURL,
YAML: "agentFallbackTroubleshootingURL",
},
diff --git a/codersdk/idpsync.go b/codersdk/idpsync.go
index 3a2e707ccb623..2cc1f51ee3011 100644
--- a/codersdk/idpsync.go
+++ b/codersdk/idpsync.go
@@ -5,6 +5,7 @@ import (
"encoding/json"
"fmt"
"net/http"
+ "net/url"
"regexp"
"github.com/google/uuid"
@@ -163,3 +164,35 @@ func (c *Client) GetOrganizationAvailableIDPSyncFields(ctx context.Context, orgI
var resp []string
return resp, json.NewDecoder(res.Body).Decode(&resp)
}
+
+func (c *Client) GetIDPSyncFieldValues(ctx context.Context, claimField string) ([]string, error) {
+ qv := url.Values{}
+ qv.Add("claimField", claimField)
+ res, err := c.Request(ctx, http.MethodGet, fmt.Sprintf("/api/v2/settings/idpsync/field-values?%s", qv.Encode()), nil)
+ if err != nil {
+ return nil, xerrors.Errorf("make request: %w", err)
+ }
+ defer res.Body.Close()
+
+ if res.StatusCode != http.StatusOK {
+ return nil, ReadBodyAsError(res)
+ }
+ var resp []string
+ return resp, json.NewDecoder(res.Body).Decode(&resp)
+}
+
+func (c *Client) GetOrganizationIDPSyncFieldValues(ctx context.Context, orgID string, claimField string) ([]string, error) {
+ qv := url.Values{}
+ qv.Add("claimField", claimField)
+ res, err := c.Request(ctx, http.MethodGet, fmt.Sprintf("/api/v2/organizations/%s/settings/idpsync/field-values?%s", orgID, qv.Encode()), nil)
+ if err != nil {
+ return nil, xerrors.Errorf("make request: %w", err)
+ }
+ defer res.Body.Close()
+
+ if res.StatusCode != http.StatusOK {
+ return nil, ReadBodyAsError(res)
+ }
+ var resp []string
+ return resp, json.NewDecoder(res.Body).Decode(&resp)
+}
diff --git a/codersdk/workspaceapps.go b/codersdk/workspaceapps.go
index e2ef9f2695419..3b4528087800c 100644
--- a/codersdk/workspaceapps.go
+++ b/codersdk/workspaceapps.go
@@ -34,6 +34,20 @@ var MapWorkspaceAppSharingLevels = map[WorkspaceAppSharingLevel]struct{}{
WorkspaceAppSharingLevelPublic: {},
}
+type WorkspaceAppOpenIn string
+
+const (
+ WorkspaceAppOpenInSlimWindow WorkspaceAppOpenIn = "slim-window"
+ WorkspaceAppOpenInWindow WorkspaceAppOpenIn = "window"
+ WorkspaceAppOpenInTab WorkspaceAppOpenIn = "tab"
+)
+
+var MapWorkspaceAppOpenIns = map[WorkspaceAppOpenIn]struct{}{
+ WorkspaceAppOpenInSlimWindow: {},
+ WorkspaceAppOpenInWindow: {},
+ WorkspaceAppOpenInTab: {},
+}
+
type WorkspaceApp struct {
ID uuid.UUID `json:"id" format:"uuid"`
// URL is the address being proxied to inside the workspace.
@@ -62,6 +76,7 @@ type WorkspaceApp struct {
Healthcheck Healthcheck `json:"healthcheck"`
Health WorkspaceAppHealth `json:"health"`
Hidden bool `json:"hidden"`
+ OpenIn WorkspaceAppOpenIn `json:"open_in"`
}
type Healthcheck struct {
diff --git a/docs/CONTRIBUTING.md b/docs/CONTRIBUTING.md
index 4ec589f3356bb..22f169c556b9f 100644
--- a/docs/CONTRIBUTING.md
+++ b/docs/CONTRIBUTING.md
@@ -332,7 +332,7 @@ Breaking changes can be triggered in two ways:
### Security
> If you find a vulnerability, **DO NOT FILE AN ISSUE**. Instead, send an email
-> to security@coder.com.
+> to .
The
[`security`](https://github.com/coder/coder/issues?q=sort%3Aupdated-desc+label%3Asecurity)
diff --git a/docs/admin/external-auth.md b/docs/admin/external-auth.md
index d859467aa6d7a..ee6510d751a44 100644
--- a/docs/admin/external-auth.md
+++ b/docs/admin/external-auth.md
@@ -1,88 +1,98 @@
# External Authentication
+Coder supports external authentication via OAuth2.0. This allows enabling any OAuth provider as well as integrations with Git providers,
+such as GitHub, GitLab, and Bitbucket.
+
+External authentication can also be used to integrate with external services
+like JFrog Artifactory and others.
+
To add an external authentication provider, you'll need to create an OAuth
-application. The following providers are supported:
+application. The following providers have been tested and work with Coder:
-- [GitHub](#github)
-- [GitLab](https://docs.gitlab.com/ee/integration/oauth_provider.html)
-- [BitBucket](https://support.atlassian.com/bitbucket-cloud/docs/use-oauth-on-bitbucket-cloud/)
- [Azure DevOps](https://learn.microsoft.com/en-us/azure/devops/integrate/get-started/authentication/oauth?view=azure-devops)
- [Azure DevOps (via Entra ID)](https://learn.microsoft.com/en-us/entra/architecture/auth-oauth2)
+- [BitBucket](https://support.atlassian.com/bitbucket-cloud/docs/use-oauth-on-bitbucket-cloud/)
+- [GitHub](#github)
+- [GitLab](https://docs.gitlab.com/ee/integration/oauth_provider.html)
+
+If you have experience with a provider that is not listed here, please
+[file an issue](https://github.com/coder/internal/issues/new?title=request%28docs%29%3A+external-auth+-+request+title+here%0D%0A&labels=["customer-feedback","docs"]&body=doc%3A+%5Bexternal-auth%5D%28https%3A%2F%2Fcoder.com%2Fdocs%2Fadmin%2Fexternal-auth%29%0D%0A%0D%0Aplease+enter+your+request+here%0D%0A)
-The next step is to configure the Coder server to use the OAuth application by
-setting the following environment variables:
+## Configuration
+
+After you create an OAuth application, set environment variables to configure the Coder server to use it:
```env
CODER_EXTERNAL_AUTH_0_ID=""
CODER_EXTERNAL_AUTH_0_TYPE=
-CODER_EXTERNAL_AUTH_0_CLIENT_ID=xxxxxx
-CODER_EXTERNAL_AUTH_0_CLIENT_SECRET=xxxxxxx
+CODER_EXTERNAL_AUTH_0_CLIENT_ID=
+CODER_EXTERNAL_AUTH_0_CLIENT_SECRET=
-# Optionally, configure a custom display name and icon
+# Optionally, configure a custom display name and icon:
CODER_EXTERNAL_AUTH_0_DISPLAY_NAME="Google Calendar"
CODER_EXTERNAL_AUTH_0_DISPLAY_ICON="https://mycustomicon.com/google.svg"
```
The `CODER_EXTERNAL_AUTH_0_ID` environment variable is used for internal
-reference. Therefore, it can be set arbitrarily (e.g., `primary-github` for your
-GitHub provider).
+reference. Set it with a value that helps you identify it. For example, you can use `CODER_EXTERNAL_AUTH_0_ID="primary-github"` for your
+GitHub provider.
-## GitHub
+Add the following code to any template to add a button to the workspace setup page which will allow you to authenticate with your provider:
-> If you don't require fine-grained access control, it's easier to configure a
-> GitHub OAuth app!
+```tf
+data "coder_external_auth" "" {
+ id = ""
+}
-1. [Create a GitHub App](https://docs.github.com/en/apps/creating-github-apps/registering-a-github-app/registering-a-github-app)
+# GitHub Example (CODER_EXTERNAL_AUTH_0_ID="primary-github")
+# makes a GitHub authentication token available at data.coder_external_auth.github.access_token
+data "coder_external_auth" "github" {
+ id = "primary-github"
+}
- - Set the callback URL to
- `https://coder.example.com/external-auth/USER_DEFINED_ID/callback`.
- - Deactivate Webhooks.
- - Enable fine-grained access to specific repositories or a subset of
- permissions for security.
+```
- ![Register GitHub App](../images/admin/github-app-register.png)
+Inside your Terraform code, you now have access to authentication variables. Reference the documentation for your chosen provider for more information on how to supply it with a token.
-2. Adjust the GitHub App permissions. You can use more or less permissions than
- are listed here, this is merely a suggestion that allows users to clone
- repositories:
+### Workspace CLI
- ![Adjust GitHub App Permissions](../images/admin/github-app-permissions.png)
+Use [`external-auth`](../reference/cli/external-auth.md) in the Coder CLI to access a token within the workspace:
- | Name | Permission | Description |
- | ------------- | ------------ | ------------------------------------------------------ |
- | Contents | Read & Write | Grants access to code and commit statuses. |
- | Pull requests | Read & Write | Grants access to create and update pull requests. |
- | Workflows | Read & Write | Grants access to update files in `.github/workflows/`. |
- | Metadata | Read-only | Grants access to metadata written by GitHub Apps. |
- | Members | Read-only | Grants access to organization members and teams. |
+```shell
+coder external-auth access-token
+```
-3. Install the App for your organization. You may select a subset of
- repositories to grant access to.
+## Git-provider specific env variables
- ![Install GitHub App](../images/admin/github-app-install.png)
+### Azure DevOps
+
+Azure DevOps requires the following environment variables:
```env
-CODER_EXTERNAL_AUTH_0_ID="USER_DEFINED_ID"
-CODER_EXTERNAL_AUTH_0_TYPE=github
+CODER_EXTERNAL_AUTH_0_ID="primary-azure-devops"
+CODER_EXTERNAL_AUTH_0_TYPE=azure-devops
CODER_EXTERNAL_AUTH_0_CLIENT_ID=xxxxxx
+# Ensure this value is your "Client Secret", not "App Secret"
CODER_EXTERNAL_AUTH_0_CLIENT_SECRET=xxxxxxx
+CODER_EXTERNAL_AUTH_0_AUTH_URL="https://app.vssps.visualstudio.com/oauth2/authorize"
+CODER_EXTERNAL_AUTH_0_TOKEN_URL="https://app.vssps.visualstudio.com/oauth2/token"
```
-## GitHub Enterprise
+### Azure DevOps (via Entra ID)
-GitHub Enterprise requires the following environment variables:
+Azure DevOps (via Entra ID) requires the following environment variables:
```env
-CODER_EXTERNAL_AUTH_0_ID="primary-github"
-CODER_EXTERNAL_AUTH_0_TYPE=github
+CODER_EXTERNAL_AUTH_0_ID="primary-azure-devops"
+CODER_EXTERNAL_AUTH_0_TYPE=azure-devops-entra
CODER_EXTERNAL_AUTH_0_CLIENT_ID=xxxxxx
CODER_EXTERNAL_AUTH_0_CLIENT_SECRET=xxxxxxx
-CODER_EXTERNAL_AUTH_0_VALIDATE_URL="https://github.example.com/api/v3/user"
-CODER_EXTERNAL_AUTH_0_AUTH_URL="https://github.example.com/login/oauth/authorize"
-CODER_EXTERNAL_AUTH_0_TOKEN_URL="https://github.example.com/login/oauth/access_token"
+CODER_EXTERNAL_AUTH_0_AUTH_URL="https://login.microsoftonline.com//oauth2/authorize"
```
-## Bitbucket Server
+> Note: Your app registration in Entra ID requires the `vso.code_write` scope
+
+### Bitbucket Server
Bitbucket Server requires the following environment variables:
@@ -94,35 +104,50 @@ CODER_EXTERNAL_AUTH_0_CLIENT_SECRET=xxx
CODER_EXTERNAL_AUTH_0_AUTH_URL=https://bitbucket.domain.com/rest/oauth2/latest/authorize
```
-## Azure DevOps
+### Gitea
-Azure DevOps requires the following environment variables:
+```env
+CODER_EXTERNAL_AUTH_0_ID="gitea"
+CODER_EXTERNAL_AUTH_0_TYPE=gitea
+CODER_EXTERNAL_AUTH_0_CLIENT_ID=xxxxxxx
+CODER_EXTERNAL_AUTH_0_CLIENT_SECRET=xxxxxxx
+# If self managed, set the Auth URL to your Gitea instance
+CODER_EXTERNAL_AUTH_0_AUTH_URL="https://gitea.com/login/oauth/authorize"
+```
+
+The Redirect URI for Gitea should be
+`https://coder.company.org/external-auth/gitea/callback`.
+
+### GitHub
+
+
+
+If you don't require fine-grained access control, it's easier to [configure a GitHub OAuth app](#configure-a-github-oauth-app).
+
+
```env
-CODER_EXTERNAL_AUTH_0_ID="primary-azure-devops"
-CODER_EXTERNAL_AUTH_0_TYPE=azure-devops
+CODER_EXTERNAL_AUTH_0_ID="USER_DEFINED_ID"
+CODER_EXTERNAL_AUTH_0_TYPE=github
CODER_EXTERNAL_AUTH_0_CLIENT_ID=xxxxxx
-# Ensure this value is your "Client Secret", not "App Secret"
CODER_EXTERNAL_AUTH_0_CLIENT_SECRET=xxxxxxx
-CODER_EXTERNAL_AUTH_0_AUTH_URL="https://app.vssps.visualstudio.com/oauth2/authorize"
-CODER_EXTERNAL_AUTH_0_TOKEN_URL="https://app.vssps.visualstudio.com/oauth2/token"
```
-## Azure DevOps (via Entra ID)
+### GitHub Enterprise
-Azure DevOps (via Entra ID) requires the following environment variables:
+GitHub Enterprise requires the following environment variables:
```env
-CODER_EXTERNAL_AUTH_0_ID="primary-azure-devops"
-CODER_EXTERNAL_AUTH_0_TYPE=azure-devops-entra
+CODER_EXTERNAL_AUTH_0_ID="primary-github"
+CODER_EXTERNAL_AUTH_0_TYPE=github
CODER_EXTERNAL_AUTH_0_CLIENT_ID=xxxxxx
CODER_EXTERNAL_AUTH_0_CLIENT_SECRET=xxxxxxx
-CODER_EXTERNAL_AUTH_0_AUTH_URL="https://login.microsoftonline.com//oauth2/authorize"
+CODER_EXTERNAL_AUTH_0_VALIDATE_URL="https://github.example.com/api/v3/user"
+CODER_EXTERNAL_AUTH_0_AUTH_URL="https://github.example.com/login/oauth/authorize"
+CODER_EXTERNAL_AUTH_0_TOKEN_URL="https://github.example.com/login/oauth/access_token"
```
-> Note: Your app registration in Entra ID requires the `vso.code_write` scope
-
-## GitLab self-managed
+### GitLab self-managed
GitLab self-managed requires the following environment variables:
@@ -138,21 +163,11 @@ CODER_EXTERNAL_AUTH_0_TOKEN_URL="https://gitlab.company.org/oauth/token"
CODER_EXTERNAL_AUTH_0_REGEX=gitlab\.company\.org
```
-## Gitea
+### JFrog Artifactory
-```env
-CODER_EXTERNAL_AUTH_0_ID="gitea"
-CODER_EXTERNAL_AUTH_0_TYPE=gitea
-CODER_EXTERNAL_AUTH_0_CLIENT_ID=xxxxxxx
-CODER_EXTERNAL_AUTH_0_CLIENT_SECRET=xxxxxxx
-# If self managed, set the Auth URL to your Gitea instance
-CODER_EXTERNAL_AUTH_0_AUTH_URL="https://gitea.com/login/oauth/authorize"
-```
+Visit the [JFrog Artifactory](../admin/integrations/jfrog-artifactory.md) guide for instructions on how to set up for JFrog Artifactory.
-The Redirect URI for Gitea should be
-https://coder.company.org/external-auth/gitea/callback
-
-## Self-managed git providers
+## Self-managed Git providers
Custom authentication and token URLs should be used for self-managed Git
provider deployments.
@@ -166,11 +181,6 @@ CODER_EXTERNAL_AUTH_0_REGEX=github\.company\.org
> Note: The `REGEX` variable must be set if using a custom git domain.
-## JFrog Artifactory
-
-See [this](../admin/integrations/jfrog-artifactory.md) guide on instructions on
-how to set up for JFrog Artifactory.
-
## Custom scopes
Optionally, you can request custom scopes:
@@ -179,6 +189,39 @@ Optionally, you can request custom scopes:
CODER_EXTERNAL_AUTH_0_SCOPES="repo:read repo:write write:gpg_key"
```
+## OAuth provider
+
+### Configure a GitHub OAuth app
+
+1. [Create a GitHub App](https://docs.github.com/en/apps/creating-github-apps/registering-a-github-app/registering-a-github-app)
+
+ - Set the callback URL to
+ `https://coder.example.com/external-auth/USER_DEFINED_ID/callback`.
+ - Deactivate Webhooks.
+ - Enable fine-grained access to specific repositories or a subset of
+ permissions for security.
+
+ ![Register GitHub App](../images/admin/github-app-register.png)
+
+1. Adjust the GitHub app permissions. You can use more or fewer permissions than
+ are listed here, this example allows users to clone
+ repositories:
+
+ ![Adjust GitHub App Permissions](../images/admin/github-app-permissions.png)
+
+ | Name | Permission | Description |
+ |---------------|--------------|--------------------------------------------------------|
+ | Contents | Read & Write | Grants access to code and commit statuses. |
+ | Pull requests | Read & Write | Grants access to create and update pull requests. |
+ | Workflows | Read & Write | Grants access to update files in `.github/workflows/`. |
+ | Metadata | Read-only | Grants access to metadata written by GitHub Apps. |
+ | Members | Read-only | Grants access to organization members and teams. |
+
+1. Install the App for your organization. You may select a subset of
+ repositories to grant access to.
+
+ ![Install GitHub App](../images/admin/github-app-install.png)
+
## Multiple External Providers
@@ -190,6 +233,16 @@ Multiple providers is an Enterprise and Premium feature.
Below is an example configuration with multiple providers:
+
+
+**Note:** To support regex matching for paths like `github\.com/org`, add the following `git config` line to the [Coder agent startup script](https://registry.terraform.io/providers/coder/coder/latest/docs/resources/agent#startup_script):
+
+```shell
+git config --global credential.useHttpPath true
+```
+
+
+
```env
# Provider 1) github.com
CODER_EXTERNAL_AUTH_0_ID=primary-github
@@ -208,11 +261,3 @@ CODER_EXTERNAL_AUTH_1_AUTH_URL="https://github.example.com/login/oauth/authorize
CODER_EXTERNAL_AUTH_1_TOKEN_URL="https://github.example.com/login/oauth/access_token"
CODER_EXTERNAL_AUTH_1_VALIDATE_URL="https://github.example.com/api/v3/user"
```
-
-To support regex matching for paths (e.g. `github\.com/org`), you'll need to add
-this to the
-[Coder agent startup script](https://registry.terraform.io/providers/coder/coder/latest/docs/resources/agent#startup_script):
-
-```shell
-git config --global credential.useHttpPath true
-```
diff --git a/docs/admin/index.md b/docs/admin/index.md
index a1552bcf22b9c..7dcdbc3ce91df 100644
--- a/docs/admin/index.md
+++ b/docs/admin/index.md
@@ -17,7 +17,7 @@ For any information not strictly contained in these sections, check out our
## What is an image, template, dev container, or workspace
-**Image**
+### Image
- A [base image](./templates/managing-templates/image-management.md) contains
OS-level packages and utilities that the Coder workspace is built on. It can
@@ -26,28 +26,28 @@ For any information not strictly contained in these sections, check out our
defined in each template.
- Managed by: Externally to Coder.
-**Template**
+### Template
- [Templates](./templates/index.md) include infrastructure-level dependencies
for the workspace. For example, a template can include Kubernetes
PersistentVolumeClaims, Docker containers, or EC2 VMs.
- Managed by: Template administrators from within the Coder deployment.
-**Startup scripts**
+### Startup scripts
- Agent startup scripts apply to all users of a template. This is an
intentionally flexible area that template authors have at their disposal to
manage the "last mile" of workspace creation.
- Managed by: Coder template administrators.
-**Workspace**
+### Workspace
- A [workspace](../user-guides/workspace-management.md) is the environment that
a developer works in. Developers on a team each work from their own workspace
and can use [multiple IDEs](../user-guides/workspace-access/index.md).
- Managed by: Developers
-**Development containers (dev containers)**
+### Development containers (dev containers)
- A
[Development Container](./templates/managing-templates/devcontainers/index.md)
@@ -57,7 +57,7 @@ For any information not strictly contained in these sections, check out our
will be built on-demand.
- Managed by: Dev Teams
-**Dotfiles / personalization**
+### Dotfiles / personalization
- Users may have their own specific preferences relating to shell prompt, custom
keybindings, color schemes, and more. Users can leverage Coder's
diff --git a/docs/admin/infrastructure/scale-testing.md b/docs/admin/infrastructure/scale-testing.md
index 09d6fdc837a91..37a79f5d6f742 100644
--- a/docs/admin/infrastructure/scale-testing.md
+++ b/docs/admin/infrastructure/scale-testing.md
@@ -40,12 +40,12 @@ Our scale tests include the following stages:
The scale tests runner can distribute the workload to overlap single scenarios
based on the workflow configuration:
-| | T0 | T1 | T2 | T3 | T4 | T5 | T6 |
-| -------------------- | --- | --- | --- | --- | --- | --- | --- |
-| SSH connections | X | X | X | X | | | |
-| Web Terminal (PTY) | | X | X | X | X | | |
-| Workspace apps | | | X | X | X | X | |
-| Dashboard (headless) | | | | X | X | X | X |
+| | T0 | T1 | T2 | T3 | T4 | T5 | T6 |
+|----------------------|----|----|----|----|----|----|----|
+| SSH connections | X | X | X | X | | | |
+| Web Terminal (PTY) | | X | X | X | X | | |
+| Workspace apps | | | X | X | X | X | |
+| Dashboard (headless) | | | | X | X | X | X |
This pattern closely reflects how our customers naturally use the system. SSH
connections are heavily utilized because they're the primary communication
@@ -137,7 +137,7 @@ When determining scaling requirements, consider the following factors:
connections: For a very high number of proxied connections, more memory is
required.
-**HTTP API latency**
+#### HTTP API latency
For a reliable Coder deployment dealing with medium to high loads, it's
important that API calls for workspace/template queries and workspace build
@@ -152,7 +152,7 @@ between users and the load balancer. Fortunately, the latency can be improved
with a deployment of Coder
[workspace proxies](../networking/workspace-proxies.md).
-**Node Autoscaling**
+#### Node Autoscaling
We recommend disabling the autoscaling for `coderd` nodes. Autoscaling can cause
interruptions for user connections, see
@@ -186,7 +186,7 @@ When determining scaling requirements, consider the following factors:
provisioners are free/available, the more concurrent workspace builds can be
performed.
-**Node Autoscaling**
+#### Node Autoscaling
Autoscaling provisioners is not an easy problem to solve unless it can be
predicted when a number of concurrent workspace builds increases.
@@ -219,7 +219,7 @@ When determining scaling requirements, consider the following factors:
running Coder agent and occasional CPU and memory bursts for building
projects.
-**Node Autoscaling**
+#### Node Autoscaling
Workspace nodes can be set to operate in autoscaling mode to mitigate the risk
of prolonged high resource utilization.
diff --git a/docs/admin/infrastructure/scale-utility.md b/docs/admin/infrastructure/scale-utility.md
index d5835f0b27706..b3094c49fbca4 100644
--- a/docs/admin/infrastructure/scale-utility.md
+++ b/docs/admin/infrastructure/scale-utility.md
@@ -17,7 +17,7 @@ Learn more about [Coder’s architecture](./architecture.md) and our
> hardware sizing recommendations.
| Environment | Coder CPU | Coder RAM | Coder Replicas | Database | Users | Concurrent builds | Concurrent connections (Terminal/SSH) | Coder Version | Last tested |
-| ---------------- | --------- | --------- | -------------- | ----------------- | ----- | ----------------- | ------------------------------------- | ------------- | ------------ |
+|------------------|-----------|-----------|----------------|-------------------|-------|-------------------|---------------------------------------|---------------|--------------|
| Kubernetes (GKE) | 3 cores | 12 GB | 1 | db-f1-micro | 200 | 3 | 200 simulated | `v0.24.1` | Jun 26, 2023 |
| Kubernetes (GKE) | 4 cores | 8 GB | 1 | db-custom-1-3840 | 1500 | 20 | 1,500 simulated | `v0.24.1` | Jun 27, 2023 |
| Kubernetes (GKE) | 2 cores | 4 GB | 1 | db-custom-1-3840 | 500 | 20 | 500 simulated | `v0.27.2` | Jul 27, 2023 |
@@ -48,14 +48,14 @@ specified template and extra parameters.
```shell
coder exp scaletest create-workspaces \
- --retry 5 \
- --count "${SCALETEST_PARAM_NUM_WORKSPACES}" \
- --template "${SCALETEST_PARAM_TEMPLATE}" \
- --concurrency "${SCALETEST_PARAM_CREATE_CONCURRENCY}" \
- --timeout 5h \
- --job-timeout 5h \
- --no-cleanup \
- --output json:"${SCALETEST_RESULTS_DIR}/create-workspaces.json"
+ --retry 5 \
+ --count "${SCALETEST_PARAM_NUM_WORKSPACES}" \
+ --template "${SCALETEST_PARAM_TEMPLATE}" \
+ --concurrency "${SCALETEST_PARAM_CREATE_CONCURRENCY}" \
+ --timeout 5h \
+ --job-timeout 5h \
+ --no-cleanup \
+ --output json:"${SCALETEST_RESULTS_DIR}/create-workspaces.json"
# Run `coder exp scaletest create-workspaces --help` for all usage
```
@@ -79,14 +79,14 @@ Terminal against those workspaces.
```shell
# Produce load at about 1000MB/s (25MB/40ms).
coder exp scaletest workspace-traffic \
- --template "${SCALETEST_PARAM_GREEDY_AGENT_TEMPLATE}" \
- --bytes-per-tick $((1024 * 1024 * 25)) \
- --tick-interval 40ms \
- --timeout "$((delay))s" \
- --job-timeout "$((delay))s" \
- --scaletest-prometheus-address 0.0.0.0:21113 \
- --target-workspaces "0:100" \
- --trace=false \
+ --template "${SCALETEST_PARAM_GREEDY_AGENT_TEMPLATE}" \
+ --bytes-per-tick $((1024 * 1024 * 25)) \
+ --tick-interval 40ms \
+ --timeout "$((delay))s" \
+ --job-timeout "$((delay))s" \
+ --scaletest-prometheus-address 0.0.0.0:21113 \
+ --target-workspaces "0:100" \
+ --trace=false \
--output json:"${SCALETEST_RESULTS_DIR}/traffic-${type}-greedy-agent.json"
```
@@ -114,8 +114,8 @@ wish to clean up all workspaces, you can run the following command:
```shell
coder exp scaletest cleanup \
- --cleanup-job-timeout 2h \
- --cleanup-timeout 15min
+ --cleanup-job-timeout 2h \
+ --cleanup-timeout 15min
```
This will delete all workspaces and users with the prefix `scaletest-`.
@@ -168,7 +168,7 @@ that operators can deploy depending on the traffic projections.
There are a few cluster options available:
| Workspace size | vCPU | Memory | Persisted storage | Details |
-| -------------- | ---- | ------ | ----------------- | ----------------------------------------------------- |
+|----------------|------|--------|-------------------|-------------------------------------------------------|
| minimal | 1 | 2 Gi | None | |
| small | 1 | 1 Gi | None | |
| medium | 2 | 2 Gi | None | Medium-sized cluster offers the greedy agent variant. |
diff --git a/docs/admin/infrastructure/validated-architectures/1k-users.md b/docs/admin/infrastructure/validated-architectures/1k-users.md
index 158eb10392e79..7828a3d339de8 100644
--- a/docs/admin/infrastructure/validated-architectures/1k-users.md
+++ b/docs/admin/infrastructure/validated-architectures/1k-users.md
@@ -13,7 +13,7 @@ tech startups, educational units, or small to mid-sized enterprises.
### Coderd nodes
| Users | Node capacity | Replicas | GCP | AWS | Azure |
-| ----------- | ------------------- | ------------------- | --------------- | ---------- | ----------------- |
+|-------------|---------------------|---------------------|-----------------|------------|-------------------|
| Up to 1,000 | 2 vCPU, 8 GB memory | 1-2 / 1 coderd each | `n1-standard-2` | `t3.large` | `Standard_D2s_v3` |
**Footnotes**:
@@ -24,7 +24,7 @@ tech startups, educational units, or small to mid-sized enterprises.
### Provisioner nodes
| Users | Node capacity | Replicas | GCP | AWS | Azure |
-| ----------- | -------------------- | ------------------------------ | ---------------- | ------------ | ----------------- |
+|-------------|----------------------|--------------------------------|------------------|--------------|-------------------|
| Up to 1,000 | 8 vCPU, 32 GB memory | 2 nodes / 30 provisioners each | `t2d-standard-8` | `t3.2xlarge` | `Standard_D8s_v3` |
**Footnotes**:
@@ -34,7 +34,7 @@ tech startups, educational units, or small to mid-sized enterprises.
### Workspace nodes
| Users | Node capacity | Replicas | GCP | AWS | Azure |
-| ----------- | -------------------- | ----------------------- | ---------------- | ------------ | ----------------- |
+|-------------|----------------------|-------------------------|------------------|--------------|-------------------|
| Up to 1,000 | 8 vCPU, 32 GB memory | 64 / 16 workspaces each | `t2d-standard-8` | `t3.2xlarge` | `Standard_D8s_v3` |
**Footnotes**:
@@ -47,5 +47,5 @@ tech startups, educational units, or small to mid-sized enterprises.
### Database nodes
| Users | Node capacity | Replicas | Storage | GCP | AWS | Azure |
-| ----------- | ------------------- | -------- | ------- | ------------------ | ------------- | ----------------- |
+|-------------|---------------------|----------|---------|--------------------|---------------|-------------------|
| Up to 1,000 | 2 vCPU, 8 GB memory | 1 | 512 GB | `db-custom-2-7680` | `db.t3.large` | `Standard_D2s_v3` |
diff --git a/docs/admin/infrastructure/validated-architectures/2k-users.md b/docs/admin/infrastructure/validated-architectures/2k-users.md
index 04ff5bf4ec19a..8c367c52dd914 100644
--- a/docs/admin/infrastructure/validated-architectures/2k-users.md
+++ b/docs/admin/infrastructure/validated-architectures/2k-users.md
@@ -18,13 +18,13 @@ deployment reliability under load.
### Coderd nodes
| Users | Node capacity | Replicas | GCP | AWS | Azure |
-| ----------- | -------------------- | ----------------------- | --------------- | ----------- | ----------------- |
+|-------------|----------------------|-------------------------|-----------------|-------------|-------------------|
| Up to 2,000 | 4 vCPU, 16 GB memory | 2 nodes / 1 coderd each | `n1-standard-4` | `t3.xlarge` | `Standard_D4s_v3` |
### Provisioner nodes
| Users | Node capacity | Replicas | GCP | AWS | Azure |
-| ----------- | -------------------- | ------------------------------ | ---------------- | ------------ | ----------------- |
+|-------------|----------------------|--------------------------------|------------------|--------------|-------------------|
| Up to 2,000 | 8 vCPU, 32 GB memory | 4 nodes / 30 provisioners each | `t2d-standard-8` | `t3.2xlarge` | `Standard_D8s_v3` |
**Footnotes**:
@@ -37,7 +37,7 @@ deployment reliability under load.
### Workspace nodes
| Users | Node capacity | Replicas | GCP | AWS | Azure |
-| ----------- | -------------------- | ------------------------ | ---------------- | ------------ | ----------------- |
+|-------------|----------------------|--------------------------|------------------|--------------|-------------------|
| Up to 2,000 | 8 vCPU, 32 GB memory | 128 / 16 workspaces each | `t2d-standard-8` | `t3.2xlarge` | `Standard_D8s_v3` |
**Footnotes**:
@@ -50,7 +50,7 @@ deployment reliability under load.
### Database nodes
| Users | Node capacity | Replicas | Storage | GCP | AWS | Azure |
-| ----------- | -------------------- | -------- | ------- | ------------------- | -------------- | ----------------- |
+|-------------|----------------------|----------|---------|---------------------|----------------|-------------------|
| Up to 2,000 | 4 vCPU, 16 GB memory | 1 | 1 TB | `db-custom-4-15360` | `db.t3.xlarge` | `Standard_D4s_v3` |
**Footnotes**:
diff --git a/docs/admin/infrastructure/validated-architectures/3k-users.md b/docs/admin/infrastructure/validated-architectures/3k-users.md
index 093ec21c5c52c..3d251427cad75 100644
--- a/docs/admin/infrastructure/validated-architectures/3k-users.md
+++ b/docs/admin/infrastructure/validated-architectures/3k-users.md
@@ -19,13 +19,13 @@ continuously improve the reliability and performance of the platform.
### Coderd nodes
| Users | Node capacity | Replicas | GCP | AWS | Azure |
-| ----------- | -------------------- | ----------------- | --------------- | ----------- | ----------------- |
+|-------------|----------------------|-------------------|-----------------|-------------|-------------------|
| Up to 3,000 | 8 vCPU, 32 GB memory | 4 / 1 coderd each | `n1-standard-4` | `t3.xlarge` | `Standard_D4s_v3` |
### Provisioner nodes
| Users | Node capacity | Replicas | GCP | AWS | Azure |
-| ----------- | -------------------- | ------------------------ | ---------------- | ------------ | ----------------- |
+|-------------|----------------------|--------------------------|------------------|--------------|-------------------|
| Up to 3,000 | 8 vCPU, 32 GB memory | 8 / 30 provisioners each | `t2d-standard-8` | `t3.2xlarge` | `Standard_D8s_v3` |
**Footnotes**:
@@ -39,7 +39,7 @@ continuously improve the reliability and performance of the platform.
### Workspace nodes
| Users | Node capacity | Replicas | GCP | AWS | Azure |
-| ----------- | -------------------- | ------------------------------ | ---------------- | ------------ | ----------------- |
+|-------------|----------------------|--------------------------------|------------------|--------------|-------------------|
| Up to 3,000 | 8 vCPU, 32 GB memory | 256 nodes / 12 workspaces each | `t2d-standard-8` | `t3.2xlarge` | `Standard_D8s_v3` |
**Footnotes**:
@@ -53,7 +53,7 @@ continuously improve the reliability and performance of the platform.
### Database nodes
| Users | Node capacity | Replicas | Storage | GCP | AWS | Azure |
-| ----------- | -------------------- | -------- | ------- | ------------------- | --------------- | ----------------- |
+|-------------|----------------------|----------|---------|---------------------|-----------------|-------------------|
| Up to 3,000 | 8 vCPU, 32 GB memory | 2 | 1.5 TB | `db-custom-8-30720` | `db.t3.2xlarge` | `Standard_D8s_v3` |
**Footnotes**:
diff --git a/docs/admin/infrastructure/validated-architectures/index.md b/docs/admin/infrastructure/validated-architectures/index.md
index 530f7d62cb5d7..6b81291648e78 100644
--- a/docs/admin/infrastructure/validated-architectures/index.md
+++ b/docs/admin/infrastructure/validated-architectures/index.md
@@ -23,7 +23,7 @@ This guide targets the following personas. It assumes a basic understanding of
cloud/on-premise computing, containerization, and the Coder platform.
| Role | Description |
-| ------------------------- | ------------------------------------------------------------------------------ |
+|---------------------------|--------------------------------------------------------------------------------|
| Platform Engineers | Responsible for deploying, operating the Coder deployment and infrastructure |
| Enterprise Architects | Responsible for architecting Coder deployments to meet enterprise requirements |
| Managed Service Providers | Entities that deploy and run Coder software as a service for customers |
@@ -31,7 +31,7 @@ cloud/on-premise computing, containerization, and the Coder platform.
## CVA Guidance
| CVA provides: | CVA does not provide: |
-| ---------------------------------------------- | ---------------------------------------------------------------------------------------- |
+|------------------------------------------------|------------------------------------------------------------------------------------------|
| Single and multi-region K8s deployment options | Prescribing OS, or cloud vs. on-premise |
| Reference architectures for up to 3,000 users | An approval of your architecture; the CVA solely provides recommendations and guidelines |
| Best practices for building a Coder deployment | Recommendations for every possible deployment scenario |
diff --git a/docs/admin/integrations/island.md b/docs/admin/integrations/island.md
index 74cd449f4257f..d5159e9e28868 100644
--- a/docs/admin/integrations/island.md
+++ b/docs/admin/integrations/island.md
@@ -3,23 +3,22 @@
April 24, 2024
---
-[Island](https://www.island.io/) is an enterprise-grade browser, offering a
-Chromium-based experience similar to popular web browsers like Chrome and Edge.
-It includes built-in security features for corporate applications and data,
-aiming to bridge the gap between consumer-focused browsers and the security
-needs of the enterprise.
+[Island](https://www.island.io/) is an enterprise-grade browser, offering a Chromium-based experience
+similar to popular web browsers like Chrome and Edge. It includes built-in
+security features for corporate applications and data, aiming to bridge the gap
+between consumer-focused browsers and the security needs of the enterprise.
-Coder natively integrates with Island's feature set, which include data loss
-protection (DLP), application awareness, browser session recording, and single
-sign-on (SSO). This guide intends to document these feature categories and how
-they apply to your Coder deployment.
+Coder natively integrates with Island's feature set, which include data
+loss protection (DLP), application awareness, browser session recording, and
+single sign-on (SSO). This guide intends to document these feature categories
+and how they apply to your Coder deployment.
## General Configuration
@@ -33,90 +32,85 @@ creating browser policies.
## Advanced Data Loss Protection
-Integrate Island's advanced data loss prevention (DLP) capabilities with Coder's
-cloud development environment (CDE), enabling you to control the “last mile”
-between developers’ CDE and their local devices, ensuring that sensitive IP
-remains in your centralized environment.
+Integrate Island's advanced data loss prevention (DLP) capabilities with
+Coder's cloud development environment (CDE), enabling you to control the
+"last mile" between developers' CDE and their local devices,
+ensuring that sensitive IP remains in your centralized environment.
### Block cut, copy, paste, printing, screen share
-1. [Create a Data Sandbox Profile](https://documentation.island.io/docs/create-and-configure-a-data-sandbox-profile)
+1. [Create a Data Sandbox Profile](https://documentation.island.io/docs/create-and-configure-a-data-sandbox-profile).
1. Configure the following actions to allow/block (based on your security
- requirements):
+ requirements).
-- Screenshot and Screen Share
-- Printing
-- Save Page
-- Clipboard Limitations
+ - Screenshot and Screen Share
+ - Printing
+ - Save Page
+ - Clipboard Limitations
-1. [Create a Policy Rule](https://documentation.island.io/docs/create-and-configure-a-policy-rule-general)
- to apply the Data Sandbox Profile
+1. [Create a Policy Rule](https://documentation.island.io/docs/create-and-configure-a-policy-rule-general) to apply the Data Sandbox Profile.
-1. Define the Coder Application group as the Destination Object
+1. Define the Coder Application group as the Destination Object.
1. Define the Data Sandbox Profile as the Action in the Last Mile Protection
- section
+ section.
### Conditionally allow copy on Coder's CLI authentication page
-1. [Create a URL Object](https://documentation.island.io/docs/create-and-configure-a-policy-rule-general)
- with the following configuration:
+1. [Create a URL Object](https://documentation.island.io/docs/create-and-configure-a-policy-rule-general) with the following configuration.
-- **Include**
-- **URL type**: Wildcard
-- **URL address**: `coder.example.com/cli-auth`
-- **Casing**: Insensitive
+ - **Include**
+ - **URL type**: Wildcard
+ - **URL address**: `coder.example.com/cli-auth`
+ - **Casing**: Insensitive
-1. [Create a Data Sandbox Profile](https://documentation.island.io/docs/create-and-configure-a-data-sandbox-profile)
+1. [Create a Data Sandbox Profile](https://documentation.island.io/docs/create-and-configure-a-data-sandbox-profile).
-1. Configure action to allow copy/paste
+1. Configure action to allow copy/paste.
-1. [Create a Policy Rule](https://documentation.island.io/docs/create-and-configure-a-policy-rule-general)
- to apply the Data Sandbox Profile
+1. [Create a Policy Rule](https://documentation.island.io/docs/create-and-configure-a-policy-rule-general) to apply the Data Sandbox Profile.
-1. Define the URL Object you created as the Destination Object
+1. Define the URL Object you created as the Destination Object.
1. Define the Data Sandbox Profile as the Action in the Last Mile Protection
- section
+ section.
### Prevent file upload/download from the browser
-1. Create a Protection Profiles for both upload/download
+1. Create a Protection Profiles for both upload/download.
-- [Upload documentation](https://documentation.island.io/docs/create-and-configure-an-upload-protection-profile)
-- [Download documentation](https://documentation.island.io/v1/docs/en/create-and-configure-a-download-protection-profile)
+ - [Upload documentation](https://documentation.island.io/docs/create-and-configure-an-upload-protection-profile)
+ - [Download documentation](https://documentation.island.io/v1/docs/en/create-and-configure-a-download-protection-profile)
-1. [Create a Policy Rule](https://documentation.island.io/docs/create-and-configure-a-policy-rule-general)
- to apply the Protection Profiles
+1. [Create a Policy Rule](https://documentation.island.io/docs/create-and-configure-a-policy-rule-general) to apply the Protection Profiles.
-1. Define the Coder Application group as the Destination Object
+1. Define the Coder Application group as the Destination Object.
1. Define the applicable Protection Profile as the Action in the Data Protection
- section
+ section.
### Scan files for sensitive data
-1. [Create a Data Loss Prevention scanner](https://documentation.island.io/docs/create-a-data-loss-prevention-scanner)
+1. [Create a Data Loss Prevention scanner](https://documentation.island.io/docs/create-a-data-loss-prevention-scanner).
-1. [Create a Policy Rule](https://documentation.island.io/docs/create-and-configure-a-policy-rule-general)
- to apply the DLP Scanner
+1. [Create a Policy Rule](https://documentation.island.io/docs/create-and-configure-a-policy-rule-general) to apply the DLP Scanner.
-1. Define the Coder Application group as the Destination Object
+1. Define the Coder Application group as the Destination Object.
-1. Define the DLP Scanner as the Action in the Data Protection section
+1. Define the DLP Scanner as the Action in the Data Protection section.
## Application Awareness and Boundaries
Ensure that Coder is only accessed through the Island browser, guaranteeing that
-your browser-level DLP policies are always enforced, and developers can’t
+your browser-level DLP policies are always enforced, and developers can't
sidestep such policies simply by using another browser.
### Configure browser enforcement, conditional access policies
-1. Create a conditional access policy for your configured identity provider.
+Create a conditional access policy for your configured identity provider.
-> Note: the configured IdP must be the same for both Coder and Island
+Note that the configured IdP must be the same for both Coder and Island.
- [Azure Active Directory/Entra ID](https://documentation.island.io/docs/configure-browser-enforcement-for-island-with-azure-ad#create-and-apply-a-conditional-access-policy)
- [Okta](https://documentation.island.io/docs/configure-browser-enforcement-for-island-with-okta)
@@ -129,35 +123,34 @@ screenshots, mouse clicks, and keystrokes.
### Activity Logging Module
-1. [Create an Activity Logging Profile](https://documentation.island.io/docs/create-and-configure-an-activity-logging-profile)
+1. [Create an Activity Logging Profile](https://documentation.island.io/docs/create-and-configure-an-activity-logging-profile). Supported browser
+ events include:
-Supported browser events include:
+ - Web Navigation
+ - File Download
+ - File Upload
+ - Clipboard/Drag & Drop
+ - Print
+ - Save As
+ - Screenshots
+ - Mouse Clicks
+ - Keystrokes
-- Web Navigation
-- File Download
-- File Upload
-- Clipboard/Drag & Drop
-- Print
-- Save As
-- Screenshots
-- Mouse Clicks
-- Keystrokes
+1. [Create a Policy Rule](https://documentation.island.io/docs/create-and-configure-a-policy-rule-general) to apply the Activity Logging Profile.
-1. [Create a Policy Rule](https://documentation.island.io/docs/create-and-configure-a-policy-rule-general)
- to apply the Activity Logging Profile
-
-1. Define the Coder Application group as the Destination Object
+1. Define the Coder Application group as the Destination Object.
1. Define the Activity Logging Profile as the Action in the Security &
- Visibility section
+ Visibility section.
## Identity-aware logins (SSO)
-Integrate Island's identity management system with Coder's authentication
-mechanisms to enable identity-aware logins.
+Integrate Island's identity management system with Coder's
+authentication mechanisms to enable identity-aware logins.
### Configure single sign-on (SSO) seamless authentication between Coder and Island
Configure the same identity provider (IdP) for both your Island and Coder
-deployment. Upon initial login to the Island browser, the user's session token
-will automatically be passed to Coder and authenticate their Coder session.
+deployment. Upon initial login to the Island browser, the user's session
+token will automatically be passed to Coder and authenticate their Coder
+session.
diff --git a/docs/admin/integrations/jfrog-artifactory.md b/docs/admin/integrations/jfrog-artifactory.md
index 89a8ac99cf52e..afc94d6158b94 100644
--- a/docs/admin/integrations/jfrog-artifactory.md
+++ b/docs/admin/integrations/jfrog-artifactory.md
@@ -3,7 +3,7 @@
January 24, 2024
@@ -31,145 +31,122 @@ by using our official Coder [modules](https://registry.coder.com). We publish
two type of modules that automate the JFrog Artifactory and Coder integration.
1. [JFrog-OAuth](https://registry.coder.com/modules/jfrog-oauth)
-2. [JFrog-Token](https://registry.coder.com/modules/jfrog-token)
+1. [JFrog-Token](https://registry.coder.com/modules/jfrog-token)
### JFrog-OAuth
This module is usable by JFrog self-hosted (on-premises) Artifactory as it
-requires configuring a custom integration. This integration benefits from
-Coder's [external-auth](https://coder.com/docs/admin/external-auth) feature and
-allows each user to authenticate with Artifactory using an OAuth flow and issues
-user-scoped tokens to each user.
+requires configuring a custom integration. This integration benefits from Coder's [external-auth](../../admin/external-auth.md) feature allows each user to authenticate with Artifactory using an OAuth flow and issues user-scoped tokens to each user.
To set this up, follow these steps:
-1. Modify your Helm chart `values.yaml` for JFrog Artifactory to add,
-
-```yaml
-artifactory:
- enabled: true
- frontend:
- extraEnvironmentVariables:
- - name: JF_FRONTEND_FEATURETOGGLER_ACCESSINTEGRATION
- value: "true"
- access:
- accessConfig:
- integrations-enabled: true
- integration-templates:
- - id: "1"
- name: "CODER"
- redirect-uri: "https://CODER_URL/external-auth/jfrog/callback"
- scope: "applied-permissions/user"
-```
-
-> Note Replace `CODER_URL` with your Coder deployment URL, e.g.,
->
-
-2. Create a new Application Integration by going to
- and select the
+1. Add the following to your Helm chart `values.yaml` for JFrog Artifactory. Replace `CODER_URL` with your JFrog Artifactory base URL:
+
+ ```yaml
+ artifactory:
+ enabled: true
+ frontend:
+ extraEnvironmentVariables:
+ - name: JF_FRONTEND_FEATURETOGGLER_ACCESSINTEGRATION
+ value: "true"
+ access:
+ accessConfig:
+ integrations-enabled: true
+ integration-templates:
+ - id: "1"
+ name: "CODER"
+ redirect-uri: "https://CODER_URL/external-auth/jfrog/callback"
+ scope: "applied-permissions/user"
+ ```
+
+1. Create a new Application Integration by going to
+ `https://JFROG_URL/ui/admin/configuration/integrations/new` and select the
Application Type as the integration you created in step 1.
-![JFrog Platform new integration](../../images/guides/artifactory-integration/jfrog-oauth-app.png)
-
-3. Add a new
- [external authentication](https://coder.com/docs/admin/external-auth) to
- Coder by setting these env variables,
-
-```env
-# JFrog Artifactory External Auth
-CODER_EXTERNAL_AUTH_1_ID="jfrog"
-CODER_EXTERNAL_AUTH_1_TYPE="jfrog"
-CODER_EXTERNAL_AUTH_1_CLIENT_ID="YYYYYYYYYYYYYYY"
-CODER_EXTERNAL_AUTH_1_CLIENT_SECRET="XXXXXXXXXXXXXXXXXXX"
-CODER_EXTERNAL_AUTH_1_DISPLAY_NAME="JFrog Artifactory"
-CODER_EXTERNAL_AUTH_1_DISPLAY_ICON="/icon/jfrog.svg"
-CODER_EXTERNAL_AUTH_1_AUTH_URL="https://JFROG_URL/ui/authorization"
-CODER_EXTERNAL_AUTH_1_SCOPES="applied-permissions/user"
-```
-
-> Note Replace `JFROG_URL` with your JFrog Artifactory base URL, e.g.,
->
-
-4. Create or edit a Coder template and use the
- [JFrog-OAuth](https://registry.coder.com/modules/jfrog-oauth) module to
- configure the integration.
-
-```tf
-module "jfrog" {
- source = "registry.coder.com/modules/jfrog-oauth/coder"
- version = "1.0.0"
- agent_id = coder_agent.example.id
- jfrog_url = "https://jfrog.example.com"
- configure_code_server = true # this depends on the code-server
- username_field = "username" # If you are using GitHub to login to both Coder and Artifactory, use username_field = "username"
- package_managers = {
- "npm": "npm",
- "go": "go",
- "pypi": "pypi"
- }
-}
-```
+1. Add a new [external authentication](../../admin/external-auth.md) to Coder by setting these
+ environment variables in a manner consistent with your Coder deployment. Replace `JFROG_URL` with your JFrog Artifactory base URL:
+
+ ```env
+ # JFrog Artifactory External Auth
+ CODER_EXTERNAL_AUTH_1_ID="jfrog"
+ CODER_EXTERNAL_AUTH_1_TYPE="jfrog"
+ CODER_EXTERNAL_AUTH_1_CLIENT_ID="YYYYYYYYYYYYYYY"
+ CODER_EXTERNAL_AUTH_1_CLIENT_SECRET="XXXXXXXXXXXXXXXXXXX"
+ CODER_EXTERNAL_AUTH_1_DISPLAY_NAME="JFrog Artifactory"
+ CODER_EXTERNAL_AUTH_1_DISPLAY_ICON="/icon/jfrog.svg"
+ CODER_EXTERNAL_AUTH_1_AUTH_URL="https://JFROG_URL/ui/authorization"
+ CODER_EXTERNAL_AUTH_1_SCOPES="applied-permissions/user"
+ ```
+
+1. Create or edit a Coder template and use the [JFrog-OAuth](https://registry.coder.com/modules/jfrog-oauth) module to configure the integration:
+
+ ```tf
+ module "jfrog" {
+ source = "registry.coder.com/modules/jfrog-oauth/coder"
+ version = "1.0.0"
+ agent_id = coder_agent.example.id
+ jfrog_url = "https://jfrog.example.com"
+ configure_code_server = true # this depends on the code-server
+ username_field = "username" # If you are using GitHub to login to both Coder and Artifactory, use username_field = "username"
+ package_managers = {
+ "npm": "npm",
+ "go": "go",
+ "pypi": "pypi"
+ }
+ }
+ ```
### JFrog-Token
-This module makes use of the
-[Artifactory terraform provider](https://registry.terraform.io/providers/jfrog/artifactory/latest/docs)
-and an admin-scoped token to create user-scoped tokens for each user by matching
-their Coder email or username with Artifactory. This can be used for both SaaS
-and self-hosted(on-premises) Artifactory instances.
+This module makes use of the [Artifactory terraform
+provider](https://registry.terraform.io/providers/jfrog/artifactory/latest/docs) and an admin-scoped token to create
+user-scoped tokens for each user by matching their Coder email or username with
+Artifactory. This can be used for both SaaS and self-hosted (on-premises)
+Artifactory instances.
To set this up, follow these steps:
-1. Get a JFrog access token from your Artifactory instance. The token must be an
- [admin token](https://registry.terraform.io/providers/jfrog/artifactory/latest/docs#access-token)
- with scope `applied-permissions/admin`.
-2. Create or edit a Coder template and use the
- [JFrog-Token](https://registry.coder.com/modules/jfrog-token) module to
- configure the integration and pass the admin token. It is recommended to
- store the token in a sensitive terraform variable to prevent it from being
- displayed in plain text in the terraform state.
-
-```tf
-variable "artifactory_access_token" {
- type = string
- sensitive = true
-}
-
-module "jfrog" {
- source = "registry.coder.com/modules/jfrog-token/coder"
- version = "1.0.0"
- agent_id = coder_agent.example.id
- jfrog_url = "https://example.jfrog.io"
- configure_code_server = true # this depends on the code-server
- artifactory_access_token = var.artifactory_access_token
- package_managers = {
- "npm": "npm",
- "go": "go",
- "pypi": "pypi"
- }
-}
-```
-
-
-The admin-level access token is used to provision user tokens and is never exposed to
-developers or stored in workspaces.
-
-
-If you do not want to use the official modules, you can check example template
-that uses Docker as the underlying compute
-[here](https://github.com/coder/coder/tree/main/examples/jfrog/docker). The same
-concepts apply to all compute types.
+1. Get a JFrog access token from your Artifactory instance. The token must be an [admin token](https://registry.terraform.io/providers/jfrog/artifactory/latest/docs#access-token) with scope `applied-permissions/admin`.
+
+1. Create or edit a Coder template and use the [JFrog-Token](https://registry.coder.com/modules/jfrog-token) module to configure the integration and pass the admin token. It is recommended to store the token in a sensitive Terraform variable to prevent it from being displayed in plain text in the terraform state:
+
+ ```tf
+ variable "artifactory_access_token" {
+ type = string
+ sensitive = true
+ }
+
+ module "jfrog" {
+ source = "registry.coder.com/modules/jfrog-token/coder"
+ version = "1.0.0"
+ agent_id = coder_agent.example.id
+ jfrog_url = "https://example.jfrog.io"
+ configure_code_server = true # this depends on the code-server
+ artifactory_access_token = var.artifactory_access_token
+ package_managers = {
+ "npm": "npm",
+ "go": "go",
+ "pypi": "pypi"
+ }
+ }
+ ```
+
+
+
+ The admin-level access token is used to provision user tokens and is never exposed to developers or stored in workspaces.
+
+
+
+If you don't want to use the official modules, you can read through the [example template](https://github.com/coder/coder/tree/main/examples/jfrog/docker), which uses Docker as the underlying compute. The
+same concepts apply to all compute types.
## Offline Deployments
-See the
-[offline deployments](../templates/extending-templates/modules.md#offline-installations)
-section for instructions on how to use coder-modules in an offline environment
-with Artifactory.
+See the [offline deployments](../templates/extending-templates/modules.md#offline-installations) section for instructions on how to use Coder modules in an offline environment with Artifactory.
+
+## Next Steps
-## More reading
+- See the [full example Docker template](https://github.com/coder/coder/tree/main/examples/jfrog/docker).
-- See the full example template
- [here](https://github.com/coder/coder/tree/main/examples/jfrog/docker).
- To serve extensions from your own VS Code Marketplace, check out
[code-marketplace](https://github.com/coder/code-marketplace#artifactory-storage).
diff --git a/docs/admin/integrations/jfrog-xray.md b/docs/admin/integrations/jfrog-xray.md
index 933bf2e475edd..bb1b9db106611 100644
--- a/docs/admin/integrations/jfrog-xray.md
+++ b/docs/admin/integrations/jfrog-xray.md
@@ -3,68 +3,71 @@
March 17, 2024
---
-This guide will walk you through the process of adding
-[JFrog Xray](https://jfrog.com/xray/) integration to Coder Kubernetes workspaces
-using Coder's [JFrog Xray Integration](https://github.com/coder/coder-xray).
+This guide describes the process of integrating [JFrog Xray](https://jfrog.com/xray/) to Coder Kubernetes-backed
+workspaces using Coder's [JFrog Xray Integration](https://github.com/coder/coder-xray).
## Prerequisites
- A self-hosted JFrog Platform instance.
- Kubernetes workspaces running on Coder.
-## Deploying the Coder - JFrog Xray Integration
+## Deploy the **Coder - JFrog Xray** Integration
-1. Create a JFrog Platform
- [Access Token](https://jfrog.com/help/r/jfrog-platform-administration-documentation/access-tokens)
- with a user that has the read
- [permission](https://jfrog.com/help/r/jfrog-platform-administration-documentation/permissions)
+1. Create a JFrog Platform [Access Token](https://jfrog.com/help/r/jfrog-platform-administration-documentation/access-tokens) with a user that has the `read` [permission](https://jfrog.com/help/r/jfrog-platform-administration-documentation/permissions)
for the repositories you want to scan.
-1. Create a Coder [token](../../reference/cli/tokens_create.md#tokens-create)
- with a user that has the [`owner`](../users/index.md#roles) role.
+
+1. Create a Coder [token](../../reference/cli/tokens_create.md#tokens-create) with a user that has the [`owner`](../users#roles) role.
+
1. Create Kubernetes secrets for the JFrog Xray and Coder tokens.
```bash
- kubectl create secret generic coder-token --from-literal=coder-token=''
- kubectl create secret generic jfrog-token --from-literal=user='' --from-literal=token=''
+ kubectl create secret generic coder-token \
+ --from-literal=coder-token=''
```
-1. Deploy the Coder - JFrog Xray integration.
+ ```bash
+ kubectl create secret generic jfrog-token \
+ --from-literal=user='' \
+ --from-literal=token=''
+ ```
+
+1. Deploy the **Coder - JFrog Xray** integration.
```bash
helm repo add coder-xray https://helm.coder.com/coder-xray
+ ```
+ ```bash
helm upgrade --install coder-xray coder-xray/coder-xray \
- --namespace coder-xray \
- --create-namespace \
- --set namespace="" \ # Replace with your Coder workspaces namespace
- --set coder.url="https://" \
- --set coder.secretName="coder-token" \
- --set artifactory.url="https://" \
- --set artifactory.secretName="jfrog-token"
+ --namespace coder-xray \
+ --create-namespace \
+ --set namespace="" \
+ --set coder.url="https://" \
+ --set coder.secretName="coder-token" \
+ --set artifactory.url="https://" \
+ --set artifactory.secretName="jfrog-token"
```
-### Updating the Coder template
+
+
+ To authenticate with the Artifactory registry, you may need to
+ create a [Docker config](https://jfrog.com/help/r/jfrog-artifactory-documentation/docker-advanced-topics) and use it in the
+ `imagePullSecrets` field of the Kubernetes Pod. See the [Defining ImagePullSecrets for Coder workspaces](../../tutorials/image-pull-secret.md) guide for more
+ information.
-[`coder-xray`](https://github.com/coder/coder-xray) will scan all kubernetes
-workspaces in the specified namespace. It depends on the `image` available in
-Artifactory and indexed by Xray. To ensure that the images are available in
-Artifactory, update the Coder template to use the Artifactory registry.
+
-```tf
-image = "//:"
-```
+## Validate your installation
-> **Note**: To authenticate with the Artifactory registry, you may need to
-> create a
-> [Docker config](https://jfrog.com/help/r/jfrog-artifactory-documentation/docker-advanced-topics)
-> and use it in the `imagePullSecrets` field of the kubernetes pod. See this
-> [guide](../../tutorials/image-pull-secret.md) for more information.
+Once installed, configured workspaces will now have a banner appear on any
+workspace with vulnerabilities reported by JFrog Xray.
-![JFrog Xray Integration](../../images/guides/xray-integration/example.png)
+
diff --git a/docs/admin/integrations/prometheus.md b/docs/admin/integrations/prometheus.md
index 059e19da126cc..d849f192aaa3d 100644
--- a/docs/admin/integrations/prometheus.md
+++ b/docs/admin/integrations/prometheus.md
@@ -3,9 +3,8 @@
Coder exposes many metrics which can be consumed by a Prometheus server, and
give insight into the current state of a live Coder deployment.
-If you don't have an Prometheus server installed, you can follow the Prometheus
-[Getting started](https://prometheus.io/docs/prometheus/latest/getting_started/)
-guide.
+If you don't have a Prometheus server installed, you can follow the Prometheus
+[Getting started](https://prometheus.io/docs/prometheus/latest/getting_started/) guide.
## Enable Prometheus metrics
@@ -19,7 +18,7 @@ use either the environment variable `CODER_PROMETHEUS_ADDRESS` or the flag
address.
If `coder server --prometheus-enable` is started locally, you can preview the
-metrics endpoint in your browser or by using curl:
+metrics endpoint in your browser or with `curl`:
```console
$ curl http://localhost:2112/
@@ -31,13 +30,12 @@ coderd_api_active_users_duration_hour 0
### Kubernetes deployment
-The Prometheus endpoint can be enabled in the
-[Helm chart's](https://github.com/coder/coder/tree/main/helm) `values.yml` by
-setting the environment variable `CODER_PROMETHEUS_ADDRESS` to `0.0.0.0:2112`.
-The environment variable `CODER_PROMETHEUS_ENABLE` will be enabled
-automatically. A Service Endpoint will not be exposed; if you need to expose the
-Prometheus port on a Service, (for example, to use a `ServiceMonitor`), create a
-separate headless service instead:
+The Prometheus endpoint can be enabled in the [Helm chart's](https://github.com/coder/coder/tree/main/helm)
+`values.yml` by setting the environment variable `CODER_PROMETHEUS_ADDRESS` to
+`0.0.0.0:2112`. The environment variable `CODER_PROMETHEUS_ENABLE` will be
+enabled automatically. A Service Endpoint will not be exposed; if you need to
+expose the Prometheus port on a Service, (for example, to use a
+`ServiceMonitor`), create a separate headless service instead.
```yaml
apiVersion: v1
@@ -61,22 +59,23 @@ spec:
### Prometheus configuration
To allow Prometheus to scrape the Coder metrics, you will need to create a
-`scape_config` in your `prometheus.yml` file, or in the Prometheus Helm chart
-values. Below is an example `scrape_config`:
+`scrape_config` in your `prometheus.yml` file, or in the Prometheus Helm chart
+values. The following is an example `scrape_config`.
```yaml
scrape_configs:
- job_name: "coder"
scheme: "http"
static_configs:
- - targets: [":2112"] # replace with the the IP address of the Coder pod or server
+ # replace with the the IP address of the Coder pod or server
+ - targets: [":2112"]
labels:
apps: "coder"
```
To use the Kubernetes Prometheus operator to scrape metrics, you will need to
-create a `ServiceMonitor` in your Coder deployment namespace. Below is an
-example `ServiceMonitor`:
+create a `ServiceMonitor` in your Coder deployment namespace. The following is
+an example `ServiceMonitor`.
```yaml
apiVersion: monitoring.coreos.com/v1
@@ -96,7 +95,7 @@ spec:
## Available metrics
-`coderd_agentstats_*` metrics must first be enabled with the flag
+You must first enable `coderd_agentstats_*` with the flag
`--prometheus-collect-agent-stats`, or the environment variable
`CODER_PROMETHEUS_COLLECT_AGENT_STATS` before they can be retrieved from the
deployment. They will always be available from the agent.
@@ -104,7 +103,7 @@ deployment. They will always be available from the agent.
| Name | Type | Description | Labels |
-| ------------------------------------------------------------- | --------- | -------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------ |
+|---------------------------------------------------------------|-----------|----------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------|
| `agent_scripts_executed_total` | counter | Total number of scripts executed by the Coder agent. Includes cron scheduled scripts. | `agent_name` `success` `template_name` `username` `workspace_name` |
| `coderd_agents_apps` | gauge | Agent applications with statuses. | `agent_name` `app_name` `health` `username` `workspace_name` |
| `coderd_agents_connection_latencies_seconds` | gauge | Agent connection latencies in seconds. | `agent_name` `derp_region` `preferred` `username` `workspace_name` |
diff --git a/docs/admin/integrations/vault.md b/docs/admin/integrations/vault.md
index 4a75008f221cd..4894a7ebda0a1 100644
--- a/docs/admin/integrations/vault.md
+++ b/docs/admin/integrations/vault.md
@@ -3,29 +3,27 @@
August 05, 2024
---
-This guide will walk you through the process of adding
-[HashiCorp Vault](https://www.vaultproject.io/) integration to Coder workspaces.
+This guide describes the process of integrating [HashiCorp Vault](https://www.vaultproject.io/) into Coder workspaces.
Coder makes it easy to integrate HashiCorp Vault with your workspaces by
-providing official terraform modules to integrate Vault with Coder. This guide
+providing official Terraform modules to integrate Vault with Coder. This guide
will show you how to use these modules to integrate HashiCorp Vault with Coder.
-## `vault-github`
+## The `vault-github` module
-[`vault-github`](https://registry.coder.com/modules/vault-github) is a terraform
-module that allows you to authenticate with Vault using a GitHub token. This
-modules uses the existing GitHub [external authentication](../external-auth.md)
-to get the token and authenticate with Vault.
+The [`vault-github`](https://registry.coder.com/modules/vault-github) module is a Terraform module that allows you to
+authenticate with Vault using a GitHub token. This module uses the existing
+GitHub [external authentication](../external-auth.md) to get the token and authenticate with Vault.
-To use this module, you need to add the following code to your terraform
-configuration:
+To use this module, add the following code to your Terraform configuration.
```tf
module "vault" {
@@ -37,11 +35,10 @@ module "vault" {
}
```
-This module will install and authenticate the `vault` CLI in your Coder
-workspace.
+This module installs and authenticates the `vault` CLI in your Coder workspace.
-Users then can use the `vault` CLI to interact with the vault, e.g., to het a kv
-secret,
+Users then can use the `vault` CLI to interact with Vault; for example, to fetch
+a secret stored in the KV backend.
```shell
vault kv get -namespace=YOUR_NAMESPACE -mount=MOUNT_NAME SECRET_NAME
diff --git a/docs/admin/monitoring/health-check.md b/docs/admin/monitoring/health-check.md
index 51c0e8082afff..0a5c135c6d50f 100644
--- a/docs/admin/monitoring/health-check.md
+++ b/docs/admin/monitoring/health-check.md
@@ -24,7 +24,7 @@ If there is an issue, you may see one of the following errors reported:
### EACS01
-_Access URL not set_
+### Access URL not set
**Problem:** no access URL has been configured.
@@ -32,7 +32,7 @@ _Access URL not set_
### EACS02
-_Access URL invalid_
+#### Access URL invalid
**Problem:** `${CODER_ACCESS_URL}/healthz` is not a valid URL.
@@ -44,7 +44,7 @@ _Access URL invalid_
### EACS03
-_Failed to fetch `/healthz`_
+#### Failed to fetch `/healthz`
**Problem:** Coder was unable to execute a GET request to
`${CODER_ACCESS_URL}/healthz`.
@@ -74,7 +74,7 @@ The output of this command should aid further diagnosis.
### EACS04
-_/healthz did not return 200 OK_
+#### /healthz did not return 200 OK
**Problem:** Coder was able to execute a GET request to
`${CODER_ACCESS_URL}/healthz`, but the response code was not `200 OK` as
@@ -97,7 +97,7 @@ its configured database, and also measures the median latency over 5 attempts.
### EDB01
-_Database Ping Failed_
+#### Database Ping Failed
**Problem:** This error code is returned if any attempt to execute this database
query fails.
@@ -106,7 +106,7 @@ query fails.
### EDB02
-_Database Latency High_
+#### Database Latency High
**Problem:** This code is returned if the median latency is higher than the
[configured threshold](../../reference/cli/server.md#--health-check-threshold-database).
@@ -117,14 +117,15 @@ Coder's current activity and usage. It may be necessary to increase the
resources allocated to Coder's database. Alternatively, you can raise the
configured threshold to a higher value (this will not address the root cause).
-> [!TIP]
->
-> - You can enable
-> [detailed database metrics](../../reference/cli/server.md#--prometheus-collect-db-metrics)
-> in Coder's Prometheus endpoint.
-> - If you have [tracing enabled](../../reference/cli/server.md#--trace), these
-> traces may also contain useful information regarding Coder's database
-> activity.
+
+
+You can enable
+[detailed database metrics](../../reference/cli/server.md#--prometheus-collect-db-metrics)
+in Coder's Prometheus endpoint. If you have
+[tracing enabled](../../reference/cli/server.md#--trace), these traces may also
+contain useful information regarding Coder's database activity.
+
+
## DERP
@@ -138,7 +139,7 @@ following:
### EDERP01
-_DERP Node Uses Websocket_
+#### DERP Node Uses Websocket
**Problem:** When Coder attempts to establish a connection to one or more DERP
servers, it sends a specific `Upgrade: derp` HTTP header. Some load balancers
@@ -149,15 +150,19 @@ This is not necessarily a fatal error, but a possible indication of a
misconfigured reverse HTTP proxy. Additionally, while workspace users should
still be able to reach their workspaces, connection performance may be degraded.
-> **Note:** This may also be shown if you have
-> [forced websocket connections for DERP](../../reference/cli/server.md#--derp-force-websockets).
+
+
+**Note:** This may also be shown if you have
+[forced websocket connections for DERP](../../reference/cli/server.md#--derp-force-websockets).
+
+
**Solution:** ensure that any proxies you use allow connection upgrade with the
`Upgrade: derp` header.
### EDERP02
-_One or more DERP nodes are unhealthy_
+#### One or more DERP nodes are unhealthy
**Problem:** This is shown if Coder is unable to reach one or more configured
DERP servers. Clients will fall back to use the remaining DERP servers, but
@@ -176,7 +181,7 @@ curl -v "https://coder.company.com/derp"
### ESTUN01
-_No STUN servers available._
+#### No STUN servers available
**Problem:** This is shown if no STUN servers are available. Coder will use STUN
to establish [direct connections](../networking/stun.md). Without at least one
@@ -189,7 +194,7 @@ configured port.
### ESTUN02
-_STUN returned different addresses; you may be behind a hard NAT._
+#### STUN returned different addresses; you may be behind a hard NAT
**Problem:** This is a warning shown when multiple attempts to determine our
public IP address/port via STUN resulted in different `ip:port` combinations.
@@ -218,7 +223,7 @@ message over the connection, and attempt to read back that same message.
### EWS01
-_Failed to establish a WebSocket connection_
+#### Failed to establish a WebSocket connection
**Problem:** Coder was unable to establish a WebSocket connection over its own
Access URL.
@@ -237,7 +242,7 @@ Access URL.
### EWS02
-_Failed to echo a WebSocket message_
+#### Failed to echo a WebSocket message
**Problem:** Coder was able to establish a WebSocket connection, but was unable
to write a message.
@@ -258,7 +263,7 @@ Coder will periodically query their availability and show their status here.
### EWP01
-_Error Updating Workspace Proxy Health_
+#### Error Updating Workspace Proxy Health
**Problem:** Coder was unable to query the connected workspace proxies for their
health status.
@@ -268,7 +273,7 @@ connectivity issue.
### EWP02
-_Error Fetching Workspace Proxies_
+#### Error Fetching Workspace Proxies
**Problem:** Coder was unable to fetch the stored workspace proxy health data
from the database.
@@ -278,7 +283,7 @@ issue with Coder's configured database.
### EWP04
-_One or more Workspace Proxies Unhealthy_
+#### One or more Workspace Proxies Unhealthy
**Problem:** One or more workspace proxies are not reachable.
@@ -287,7 +292,7 @@ workspace proxies.
### EPD01
-_No Provisioner Daemons Available_
+#### No Provisioner Daemons Available
**Problem:** No provisioner daemons are registered with Coder. No workspaces can
be built until there is at least one provisioner daemon running.
@@ -300,12 +305,16 @@ that they are able to successfully connect to Coder. Otherwise, ensure
[`--provisioner-daemons`](../../reference/cli/server.md#--provisioner-daemons)
is set to a value greater than 0.
-> Note: This may be a transient issue if you are currently in the process of
-> updating your deployment.
+
+
+**Note:** This may be a transient issue if you are currently in the process of
+updating your deployment.
+
+
### EPD02
-_Provisioner Daemon Version Mismatch_
+#### Provisioner Daemon Version Mismatch
**Problem:** One or more provisioner daemons are more than one major or minor
version out of date with the main deployment. It is important that provisioner
@@ -315,12 +324,16 @@ of API incompatibility.
**Solution:** Update the provisioner daemon to match the currently running
version of Coder.
-> Note: This may be a transient issue if you are currently in the process of
-> updating your deployment.
+
+
+**Note:** This may be a transient issue if you are currently in the process of
+updating your deployment.
+
+
### EPD03
-_Provisioner Daemon API Version Mismatch_
+#### Provisioner Daemon API Version Mismatch
**Problem:** One or more provisioner daemons are using APIs that are marked as
deprecated. These deprecated APIs may be removed in a future release of Coder,
@@ -330,12 +343,16 @@ connect to Coder.
**Solution:** Update the provisioner daemon to match the currently running
version of Coder.
-> Note: This may be a transient issue if you are currently in the process of
-> updating your deployment.
+
+
+**Note:** This may be a transient issue if you are currently in the process of
+updating your deployment.
+
+
-## EUNKNOWN
+### EUNKNOWN
-_Unknown Error_
+#### Unknown Error
**Problem:** This error is shown when an unexpected error occurred evaluating
deployment health. It may resolve on its own.
diff --git a/docs/admin/monitoring/metrics.md b/docs/admin/monitoring/metrics.md
index 167aa2237159b..5a30076f1db57 100644
--- a/docs/admin/monitoring/metrics.md
+++ b/docs/admin/monitoring/metrics.md
@@ -8,7 +8,7 @@ If you don't have an Prometheus server installed, you can follow the Prometheus
[Getting started](https://prometheus.io/docs/prometheus/latest/getting_started/)
guide.
-### Setting up metrics
+## Setting up metrics
To set up metrics monitoring, please read our
[Prometheus integration guide](../integrations/prometheus.md). The following
diff --git a/docs/admin/monitoring/notifications/index.md b/docs/admin/monitoring/notifications/index.md
index f527d7743ff87..aa54ad9c143dc 100644
--- a/docs/admin/monitoring/notifications/index.md
+++ b/docs/admin/monitoring/notifications/index.md
@@ -64,7 +64,7 @@ You can modify the notification delivery behavior using the following server
flags.
| Required | CLI | Env | Type | Description | Default |
-| :------: | ----------------------------------- | --------------------------------------- | ---------- | --------------------------------------------------------------------------------------------------------------------- | ------- |
+|:--------:|-------------------------------------|-----------------------------------------|------------|-----------------------------------------------------------------------------------------------------------------------|---------|
| ✔️ | `--notifications-dispatch-timeout` | `CODER_NOTIFICATIONS_DISPATCH_TIMEOUT` | `duration` | How long to wait while a notification is being sent before giving up. | 1m |
| ✔️ | `--notifications-method` | `CODER_NOTIFICATIONS_METHOD` | `string` | Which delivery method to use (available options: 'smtp', 'webhook'). See [Delivery Methods](#delivery-methods) below. | smtp |
| -️ | `--notifications-max-send-attempts` | `CODER_NOTIFICATIONS_MAX_SEND_ATTEMPTS` | `int` | The upper limit of attempts to send a notification. | 5 |
@@ -90,15 +90,15 @@ existing one.
**Server Settings:**
| Required | CLI | Env | Type | Description | Default |
-| :------: | ------------------- | ----------------------- | -------- | ----------------------------------------- | --------- |
+|:--------:|---------------------|-------------------------|----------|-------------------------------------------|-----------|
| ✔️ | `--email-from` | `CODER_EMAIL_FROM` | `string` | The sender's address to use. | |
-| ✔️ | `--email-smarthost` | `CODER_EMAIL_SMARTHOST` | `string` | The SMTP relay to send messages |
+| ✔️ | `--email-smarthost` | `CODER_EMAIL_SMARTHOST` | `string` | The SMTP relay to send messages | |
| ✔️ | `--email-hello` | `CODER_EMAIL_HELLO` | `string` | The hostname identifying the SMTP server. | localhost |
**Authentication Settings:**
| Required | CLI | Env | Type | Description |
-| :------: | ---------------------------- | -------------------------------- | -------- | ------------------------------------------------------------------------- |
+|:--------:|------------------------------|----------------------------------|----------|---------------------------------------------------------------------------|
| - | `--email-auth-username` | `CODER_EMAIL_AUTH_USERNAME` | `string` | Username to use with PLAIN/LOGIN authentication. |
| - | `--email-auth-password` | `CODER_EMAIL_AUTH_PASSWORD` | `string` | Password to use with PLAIN/LOGIN authentication. |
| - | `--email-auth-password-file` | `CODER_EMAIL_AUTH_PASSWORD_FILE` | `string` | File from which to load password for use with PLAIN/LOGIN authentication. |
@@ -106,14 +106,14 @@ existing one.
**TLS Settings:**
-| Required | CLI | Env | Type | Description | Default |
-| :------: | --------------------------- | ----------------------------- | -------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------- |
-| - | `--email-force-tls` | `CODER_EMAIL_FORCE_TLS` | `bool` | Force a TLS connection to the configured SMTP smarthost. If port 465 is used, TLS will be forced. See https://datatracker.ietf.org/doc/html/rfc8314#section-3.3. | false |
-| - | `--email-tls-starttls` | `CODER_EMAIL_TLS_STARTTLS` | `bool` | Enable STARTTLS to upgrade insecure SMTP connections using TLS. Ignored if `CODER_NOTIFICATIONS_EMAIL_FORCE_TLS` is set. | false |
-| - | `--email-tls-skip-verify` | `CODER_EMAIL_TLS_SKIPVERIFY` | `bool` | Skip verification of the target server's certificate (**insecure**). | false |
-| - | `--email-tls-server-name` | `CODER_EMAIL_TLS_SERVERNAME` | `string` | Server name to verify against the target certificate. | |
-| - | `--email-tls-cert-file` | `CODER_EMAIL_TLS_CERTFILE` | `string` | Certificate file to use. | |
-| - | `--email-tls-cert-key-file` | `CODER_EMAIL_TLS_CERTKEYFILE` | `string` | Certificate key file to use. | |
+| Required | CLI | Env | Type | Description | Default |
+|:--------:|-----------------------------|-------------------------------|----------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------|
+| - | `--email-force-tls` | `CODER_EMAIL_FORCE_TLS` | `bool` | Force a TLS connection to the configured SMTP smarthost. If port 465 is used, TLS will be forced. See . | false |
+| - | `--email-tls-starttls` | `CODER_EMAIL_TLS_STARTTLS` | `bool` | Enable STARTTLS to upgrade insecure SMTP connections using TLS. Ignored if `CODER_NOTIFICATIONS_EMAIL_FORCE_TLS` is set. | false |
+| - | `--email-tls-skip-verify` | `CODER_EMAIL_TLS_SKIPVERIFY` | `bool` | Skip verification of the target server's certificate (**insecure**). | false |
+| - | `--email-tls-server-name` | `CODER_EMAIL_TLS_SERVERNAME` | `string` | Server name to verify against the target certificate. | |
+| - | `--email-tls-cert-file` | `CODER_EMAIL_TLS_CERTFILE` | `string` | Certificate file to use. | |
+| - | `--email-tls-cert-key-file` | `CODER_EMAIL_TLS_CERTKEYFILE` | `string` | Certificate key file to use. | |
**NOTE:** you _MUST_ use `CODER_EMAIL_FORCE_TLS` if your smarthost supports TLS
on a port other than `465`.
@@ -123,9 +123,11 @@ on a port other than `465`.
After setting the required fields above:
1. Create an [App Password](https://myaccount.google.com/apppasswords) using the
- account you wish to send from
-2. Set the following configuration options:
- ```
+ account you wish to send from.
+
+1. Set the following configuration options:
+
+ ```text
CODER_EMAIL_SMARTHOST=smtp.gmail.com:465
CODER_EMAIL_AUTH_USERNAME=@
CODER_EMAIL_AUTH_PASSWORD=""
@@ -140,8 +142,9 @@ for more options.
After setting the required fields above:
1. Setup an account on Microsoft 365 or outlook.com
-2. Set the following configuration options:
- ```
+1. Set the following configuration options:
+
+ ```text
CODER_EMAIL_SMARTHOST=smtp-mail.outlook.com:587
CODER_EMAIL_TLS_STARTTLS=true
CODER_EMAIL_AUTH_USERNAME=@
@@ -161,40 +164,40 @@ systems.
**Settings**:
| Required | CLI | Env | Type | Description |
-| :------: | ---------------------------------- | -------------------------------------- | ----- | --------------------------------------- |
+|:--------:|------------------------------------|----------------------------------------|-------|-----------------------------------------|
| ✔️ | `--notifications-webhook-endpoint` | `CODER_NOTIFICATIONS_WEBHOOK_ENDPOINT` | `url` | The endpoint to which to send webhooks. |
Here is an example payload for Coder's webhook notification:
```json
{
- "_version": "1.0",
- "msg_id": "88750cad-77d4-4663-8bc0-f46855f5019b",
- "payload": {
- "_version": "1.0",
- "notification_name": "Workspace Deleted",
- "user_id": "4ac34fcb-8155-44d5-8301-e3cd46e88b35",
- "user_email": "danny@coder.com",
- "user_name": "danny",
- "user_username": "danny",
- "actions": [
- {
- "label": "View workspaces",
- "url": "https://et23ntkhpueak.pit-1.try.coder.app/workspaces"
- },
- {
- "label": "View templates",
- "url": "https://et23ntkhpueak.pit-1.try.coder.app/templates"
- }
- ],
- "labels": {
- "initiator": "danny",
- "name": "my-workspace",
- "reason": "initiated by user"
- }
- },
- "title": "Workspace \"my-workspace\" deleted",
- "body": "Hi danny\n\nYour workspace my-workspace was deleted.\nThe specified reason was \"initiated by user (danny)\"."
+ "_version": "1.0",
+ "msg_id": "88750cad-77d4-4663-8bc0-f46855f5019b",
+ "payload": {
+ "_version": "1.0",
+ "notification_name": "Workspace Deleted",
+ "user_id": "4ac34fcb-8155-44d5-8301-e3cd46e88b35",
+ "user_email": "danny@coder.com",
+ "user_name": "danny",
+ "user_username": "danny",
+ "actions": [
+ {
+ "label": "View workspaces",
+ "url": "https://et23ntkhpueak.pit-1.try.coder.app/workspaces"
+ },
+ {
+ "label": "View templates",
+ "url": "https://et23ntkhpueak.pit-1.try.coder.app/templates"
+ }
+ ],
+ "labels": {
+ "initiator": "danny",
+ "name": "my-workspace",
+ "reason": "initiated by user"
+ }
+ },
+ "title": "Workspace \"my-workspace\" deleted",
+ "body": "Hi danny\n\nYour workspace my-workspace was deleted.\nThe specified reason was \"initiated by user (danny)\"."
}
```
diff --git a/docs/admin/monitoring/notifications/slack.md b/docs/admin/monitoring/notifications/slack.md
index 8b788dc658fff..58bf9338ea2ae 100644
--- a/docs/admin/monitoring/notifications/slack.md
+++ b/docs/admin/monitoring/notifications/slack.md
@@ -34,9 +34,9 @@ To integrate Slack with Coder, follow these steps to create a Slack application:
3. Under "OAuth & Permissions", add the following OAuth scopes:
-- `chat:write`: To send messages as the app.
-- `users:read`: To find the user details.
-- `users:read.email`: To find user emails.
+ - `chat:write`: To send messages as the app.
+ - `users:read`: To find the user details.
+ - `users:read.email`: To find user emails.
4. Install the app to your workspace and note down the **Bot User OAuth Token**
from the "OAuth & Permissions" section.
@@ -52,128 +52,128 @@ To build the server to receive webhooks and interact with Slack:
1. Initialize your project by running:
-```bash
-npm init -y
-```
+ ```bash
+ npm init -y
+ ```
2. Install the Bolt library:
-```bash
-npm install @slack/bolt
-```
+ ```bash
+ npm install @slack/bolt
+ ```
3. Create and edit the `app.js` file. Below is an example of the basic
structure:
-```js
-const { App, LogLevel, ExpressReceiver } = require("@slack/bolt");
-const bodyParser = require("body-parser");
-
-const port = process.env.PORT || 6000;
-
-// Create a Bolt Receiver
-const receiver = new ExpressReceiver({
- signingSecret: process.env.SLACK_SIGNING_SECRET,
-});
-receiver.router.use(bodyParser.json());
-
-// Create the Bolt App, using the receiver
-const app = new App({
- token: process.env.SLACK_BOT_TOKEN,
- logLevel: LogLevel.DEBUG,
- receiver,
-});
-
-receiver.router.post("/v1/webhook", async (req, res) => {
- try {
- if (!req.body) {
- return res.status(400).send("Error: request body is missing");
- }
-
- const { title, body } = req.body;
- if (!title || !body) {
- return res.status(400).send('Error: missing fields: "title", or "body"');
- }
-
- const payload = req.body.payload;
- if (!payload) {
- return res.status(400).send('Error: missing "payload" field');
- }
-
- const { user_email, actions } = payload;
- if (!user_email || !actions) {
- return res
- .status(400)
- .send('Error: missing fields: "user_email", "actions"');
- }
-
- // Get the user ID using Slack API
- const userByEmail = await app.client.users.lookupByEmail({
- email: user_email,
- });
-
- const slackMessage = {
- channel: userByEmail.user.id,
- text: body,
- blocks: [
- {
- type: "header",
- text: { type: "plain_text", text: title },
- },
- {
- type: "section",
- text: { type: "mrkdwn", text: body },
- },
- ],
- };
-
- // Add action buttons if they exist
- if (actions && actions.length > 0) {
- slackMessage.blocks.push({
- type: "actions",
- elements: actions.map((action) => ({
- type: "button",
- text: { type: "plain_text", text: action.label },
- url: action.url,
- })),
- });
- }
-
- // Post message to the user on Slack
- await app.client.chat.postMessage(slackMessage);
-
- res.status(204).send();
- } catch (error) {
- console.error("Error sending message:", error);
- res.status(500).send();
- }
-});
-
-// Acknowledge clicks on link_button, otherwise Slack UI
-// complains about missing events.
-app.action("button_click", async ({ body, ack, say }) => {
- await ack(); // no specific action needed
-});
-
-// Start the Bolt app
-(async () => {
- await app.start(port);
- console.log("⚡️ Coder Slack bot is running!");
-})();
-```
-
-3. Set environment variables to identify the Slack app:
-
-```bash
-export SLACK_BOT_TOKEN=xoxb-...
-export SLACK_SIGNING_SECRET=0da4b...
-```
-
-4. Start the web application by running:
-
-```bash
-node app.js
-```
+ ```js
+ const { App, LogLevel, ExpressReceiver } = require("@slack/bolt");
+ const bodyParser = require("body-parser");
+
+ const port = process.env.PORT || 6000;
+
+ // Create a Bolt Receiver
+ const receiver = new ExpressReceiver({
+ signingSecret: process.env.SLACK_SIGNING_SECRET,
+ });
+ receiver.router.use(bodyParser.json());
+
+ // Create the Bolt App, using the receiver
+ const app = new App({
+ token: process.env.SLACK_BOT_TOKEN,
+ logLevel: LogLevel.DEBUG,
+ receiver,
+ });
+
+ receiver.router.post("/v1/webhook", async (req, res) => {
+ try {
+ if (!req.body) {
+ return res.status(400).send("Error: request body is missing");
+ }
+
+ const { title, body } = req.body;
+ if (!title || !body) {
+ return res.status(400).send('Error: missing fields: "title", or "body"');
+ }
+
+ const payload = req.body.payload;
+ if (!payload) {
+ return res.status(400).send('Error: missing "payload" field');
+ }
+
+ const { user_email, actions } = payload;
+ if (!user_email || !actions) {
+ return res
+ .status(400)
+ .send('Error: missing fields: "user_email", "actions"');
+ }
+
+ // Get the user ID using Slack API
+ const userByEmail = await app.client.users.lookupByEmail({
+ email: user_email,
+ });
+
+ const slackMessage = {
+ channel: userByEmail.user.id,
+ text: body,
+ blocks: [
+ {
+ type: "header",
+ text: { type: "plain_text", text: title },
+ },
+ {
+ type: "section",
+ text: { type: "mrkdwn", text: body },
+ },
+ ],
+ };
+
+ // Add action buttons if they exist
+ if (actions && actions.length > 0) {
+ slackMessage.blocks.push({
+ type: "actions",
+ elements: actions.map((action) => ({
+ type: "button",
+ text: { type: "plain_text", text: action.label },
+ url: action.url,
+ })),
+ });
+ }
+
+ // Post message to the user on Slack
+ await app.client.chat.postMessage(slackMessage);
+
+ res.status(204).send();
+ } catch (error) {
+ console.error("Error sending message:", error);
+ res.status(500).send();
+ }
+ });
+
+ // Acknowledge clicks on link_button, otherwise Slack UI
+ // complains about missing events.
+ app.action("button_click", async ({ body, ack, say }) => {
+ await ack(); // no specific action needed
+ });
+
+ // Start the Bolt app
+ (async () => {
+ await app.start(port);
+ console.log("⚡️ Coder Slack bot is running!");
+ })();
+ ```
+
+4. Set environment variables to identify the Slack app:
+
+ ```bash
+ export SLACK_BOT_TOKEN=xoxb-...
+ export SLACK_SIGNING_SECRET=0da4b...
+ ```
+
+5. Start the web application by running:
+
+ ```bash
+ node app.js
+ ```
## Enable Interactivity in Slack
diff --git a/docs/admin/monitoring/notifications/teams.md b/docs/admin/monitoring/notifications/teams.md
index bf913ac003ea2..5846cfc83bc48 100644
--- a/docs/admin/monitoring/notifications/teams.md
+++ b/docs/admin/monitoring/notifications/teams.md
@@ -21,115 +21,115 @@ following:
The process of setting up a Teams workflow consists of three key steps:
-1. Configure the Webhook Trigger.
-
- Begin by configuring the trigger: **"When a Teams webhook request is
- received"**.
-
- Ensure the trigger access level is set to **"Anyone"**.
-
-2. Setup the JSON Parsing Action.
-
- Next, add the **"Parse JSON"** action, linking the content to the **"Body"**
- of the received webhook request. Use the following schema to parse the
- notification payload:
-
- ```json
- {
- "type": "object",
- "properties": {
- "_version": {
- "type": "string"
- },
- "payload": {
- "type": "object",
- "properties": {
- "_version": {
- "type": "string"
- },
- "user_email": {
- "type": "string"
- },
- "actions": {
- "type": "array",
- "items": {
- "type": "object",
- "properties": {
- "label": {
- "type": "string"
- },
- "url": {
- "type": "string"
- }
- },
- "required": ["label", "url"]
- }
- }
- }
- },
- "title": {
- "type": "string"
- },
- "body": {
- "type": "string"
- }
- }
- }
- ```
-
- This action parses the notification's title, body, and the recipient's email
- address.
-
-3. Configure the Adaptive Card Action.
-
- Finally, set up the **"Post Adaptive Card in a chat or channel"** action
- with the following recommended settings:
-
- **Post as**: Flow Bot
-
- **Post in**: Chat with Flow Bot
-
- **Recipient**: `user_email`
-
- Use the following _Adaptive Card_ template:
-
- ```json
- {
- "$schema": "https://adaptivecards.io/schemas/adaptive-card.json",
- "type": "AdaptiveCard",
- "version": "1.0",
- "body": [
- {
- "type": "Image",
- "url": "https://coder.com/coder-logo-horizontal.png",
- "height": "40px",
- "altText": "Coder",
- "horizontalAlignment": "center"
- },
- {
- "type": "TextBlock",
- "text": "**@{replace(body('Parse_JSON')?['title'], '"', '\"')}**"
- },
- {
- "type": "TextBlock",
- "text": "@{replace(body('Parse_JSON')?['body'], '"', '\"')}",
- "wrap": true
- },
- {
- "type": "ActionSet",
- "actions": [@{replace(replace(join(body('Parse_JSON')?['payload']?['actions'], ','), '{', '{"type": "Action.OpenUrl",'), '"label"', '"title"')}]
- }
- ]
- }
- ```
-
- _Notice_: The Coder `actions` format differs from the `ActionSet` schema, so
- its properties need to be modified: include `Action.OpenUrl` type, rename
- `label` to `title`. Unfortunately, there is no straightforward solution for
- `for-each` pattern.
-
- Feel free to customize the payload to modify the logo, notification title,
- or body content to suit your needs.
+1. Configure the Webhook Trigger.
+
+ Begin by configuring the trigger: **"When a Teams webhook request is
+ received"**.
+
+ Ensure the trigger access level is set to **"Anyone"**.
+
+1. Setup the JSON Parsing Action.
+
+ Add the **"Parse JSON"** action, linking the content to the **"Body"** of the
+ received webhook request. Use the following schema to parse the notification
+ payload:
+
+ ```json
+ {
+ "type": "object",
+ "properties": {
+ "_version": {
+ "type": "string"
+ },
+ "payload": {
+ "type": "object",
+ "properties": {
+ "_version": {
+ "type": "string"
+ },
+ "user_email": {
+ "type": "string"
+ },
+ "actions": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "label": {
+ "type": "string"
+ },
+ "url": {
+ "type": "string"
+ }
+ },
+ "required": ["label", "url"]
+ }
+ }
+ }
+ },
+ "title": {
+ "type": "string"
+ },
+ "body": {
+ "type": "string"
+ }
+ }
+ }
+ ```
+
+ This action parses the notification's title, body, and the recipient's email
+ address.
+
+1. Configure the Adaptive Card Action.
+
+ Finally, set up the **"Post Adaptive Card in a chat or channel"** action with
+ the following recommended settings:
+
+ **Post as**: Flow Bot
+
+ **Post in**: Chat with Flow Bot
+
+ **Recipient**: `user_email`
+
+ Use the following _Adaptive Card_ template:
+
+ ```json
+ {
+ "$schema": "https://adaptivecards.io/schemas/adaptive-card.json",
+ "type": "AdaptiveCard",
+ "version": "1.0",
+ "body": [
+ {
+ "type": "Image",
+ "url": "https://coder.com/coder-logo-horizontal.png",
+ "height": "40px",
+ "altText": "Coder",
+ "horizontalAlignment": "center"
+ },
+ {
+ "type": "TextBlock",
+ "text": "**@{replace(body('Parse_JSON')?['title'], '"', '\"')}**"
+ },
+ {
+ "type": "TextBlock",
+ "text": "@{replace(body('Parse_JSON')?['body'], '"', '\"')}",
+ "wrap": true
+ },
+ {
+ "type": "ActionSet",
+ "actions": [@{replace(replace(join(body('Parse_JSON')?['payload']?['actions'], ','), '{', '{"type": "Action.OpenUrl",'), '"label"', '"title"')}]
+ }
+ ]
+ }
+ ```
+
+ _Notice_: The Coder `actions` format differs from the `ActionSet` schema, so
+ its properties need to be modified: include `Action.OpenUrl` type, rename
+ `label` to `title`. Unfortunately, there is no straightforward solution for
+ `for-each` pattern.
+
+ Feel free to customize the payload to modify the logo, notification title, or
+ body content to suit your needs.
## Enable Webhook Integration
diff --git a/docs/admin/networking/high-availability.md b/docs/admin/networking/high-availability.md
index 051175178dd8f..7dee70a2930fc 100644
--- a/docs/admin/networking/high-availability.md
+++ b/docs/admin/networking/high-availability.md
@@ -42,7 +42,7 @@ rendezvous for the Coder nodes.
Here's an example 3-node network configuration setup:
| Name | `CODER_HTTP_ADDRESS` | `CODER_DERP_SERVER_RELAY_URL` | `CODER_ACCESS_URL` |
-| --------- | -------------------- | ----------------------------- | ------------------------ |
+|-----------|----------------------|-------------------------------|--------------------------|
| `coder-1` | `*:80` | `http://10.0.0.1:80` | `https://coder.big.corp` |
| `coder-2` | `*:80` | `http://10.0.0.2:80` | `https://coder.big.corp` |
| `coder-3` | `*:80` | `http://10.0.0.3:80` | `https://coder.big.corp` |
diff --git a/docs/admin/networking/index.md b/docs/admin/networking/index.md
index e07ee39002d44..34e1ef875a7b4 100644
--- a/docs/admin/networking/index.md
+++ b/docs/admin/networking/index.md
@@ -168,7 +168,7 @@ After you have custom DERP servers, you can launch Coder with them like so:
```
```bash
-$ coder server --derp-config-path derpmap.json
+coder server --derp-config-path derpmap.json
```
### Dashboard connections
diff --git a/docs/admin/networking/port-forwarding.md b/docs/admin/networking/port-forwarding.md
index e7a07ab4a4189..34a7133b75855 100644
--- a/docs/admin/networking/port-forwarding.md
+++ b/docs/admin/networking/port-forwarding.md
@@ -156,7 +156,7 @@ protocol configuration for each shared port individually.
You can access any port on the workspace and can configure the port protocol
manually by appending a `s` to the port in the URL.
-```
+```text
# Uses HTTP
https://33295--agent--workspace--user--apps.example.com/
# Uses HTTPS
@@ -180,8 +180,8 @@ requests cannot be authenticated and you will see an error resembling the
following:
> Access to fetch at
-> 'https://coder.example.com/api/v2/applications/auth-redirect' from origin
-> 'https://8000--dev--user--apps.coder.example.com' has been blocked by CORS
+> '' from origin
+> '' has been blocked by CORS
> policy: No 'Access-Control-Allow-Origin' header is present on the requested
> resource. If an opaque response serves your needs, set the request's mode to
> 'no-cors' to fetch the resource with CORS disabled.
@@ -190,7 +190,7 @@ following:
Below is a list of the cross-origin headers Coder sets with example values:
-```
+```text
access-control-allow-credentials: true
access-control-allow-methods: PUT
access-control-allow-headers: X-Custom-Header
diff --git a/docs/admin/networking/workspace-proxies.md b/docs/admin/networking/workspace-proxies.md
index 03da5e142f7ce..288c9eab66f97 100644
--- a/docs/admin/networking/workspace-proxies.md
+++ b/docs/admin/networking/workspace-proxies.md
@@ -14,11 +14,11 @@ connecting with their workspace over SSH, a workspace app, port forwarding, etc.
Dashboard connections and API calls (e.g. the workspaces list) are not served
over workspace proxies.
-# Deploy a workspace proxy
+## Deploy a workspace proxy
-Each workspace proxy should be a unique instance. At no point should 2 workspace
-proxy instances share the same authentication token. They only require port 443
-to be open and are expected to have network connectivity to the coderd
+Each workspace proxy should be a unique instance. At no point should two
+workspace proxy instances share the same authentication token. They only require
+port 443 to be open and are expected to have network connectivity to the coderd
dashboard. Workspace proxies **do not** make any database connections.
Workspace proxies can be used in the browser by navigating to the user
diff --git a/docs/admin/provisioners.md b/docs/admin/provisioners.md
index f0c8cc5186a21..1a27cf1d8f25a 100644
--- a/docs/admin/provisioners.md
+++ b/docs/admin/provisioners.md
@@ -201,33 +201,33 @@ different organizations.
This is illustrated in the below table:
| Provisioner Tags | Job Tags | Same Org | Can Run Job? |
-| ----------------------------------------------------------------- | ---------------------------------------------------------------- | -------- | ------------ |
-| scope=organization owner= | scope=organization owner= | ✅ | ✅ |
-| scope=organization owner= environment=on-prem | scope=organization owner= environment=on-prem | ✅ | ✅ |
-| scope=organization owner= environment=on-prem datacenter=chicago | scope=organization owner= environment=on-prem | ✅ | ✅ |
-| scope=organization owner= environment=on-prem datacenter=chicago | scope=organization owner= environment=on-prem datacenter=chicago | ✅ | ✅ |
-| scope=user owner=aaa | scope=user owner=aaa | ✅ | ✅ |
-| scope=user owner=aaa environment=on-prem | scope=user owner=aaa | ✅ | ✅ |
-| scope=user owner=aaa environment=on-prem | scope=user owner=aaa environment=on-prem | ✅ | ✅ |
-| scope=user owner=aaa environment=on-prem datacenter=chicago | scope=user owner=aaa environment=on-prem | ✅ | ✅ |
-| scope=user owner=aaa environment=on-prem datacenter=chicago | scope=user owner=aaa environment=on-prem datacenter=chicago | ✅ | ✅ |
-| scope=organization owner= | scope=organization owner= environment=on-prem | ✅ | ❌ |
-| scope=organization owner= environment=on-prem | scope=organization owner= | ✅ | ❌ |
-| scope=organization owner= environment=on-prem | scope=organization owner= environment=on-prem datacenter=chicago | ✅ | ❌ |
-| scope=organization owner= environment=on-prem datacenter=new_york | scope=organization owner= environment=on-prem datacenter=chicago | ✅ | ❌ |
-| scope=user owner=aaa | scope=organization owner= | ✅ | ❌ |
-| scope=user owner=aaa | scope=user owner=bbb | ✅ | ❌ |
-| scope=organization owner= | scope=user owner=aaa | ✅ | ❌ |
-| scope=organization owner= | scope=user owner=aaa environment=on-prem | ✅ | ❌ |
-| scope=user owner=aaa | scope=user owner=aaa environment=on-prem | ✅ | ❌ |
-| scope=user owner=aaa environment=on-prem | scope=user owner=aaa environment=on-prem datacenter=chicago | ✅ | ❌ |
-| scope=user owner=aaa environment=on-prem datacenter=chicago | scope=user owner=aaa environment=on-prem datacenter=new_york | ✅ | ❌ |
-| scope=organization owner= environment=on-prem | scope=organization owner= environment=on-prem | ❌ | ❌ |
+|-------------------------------------------------------------------|------------------------------------------------------------------|----------|--------------|
+| scope=organization owner= | scope=organization owner= | ✅ | ✅ |
+| scope=organization owner= environment=on-prem | scope=organization owner= environment=on-prem | ✅ | ✅ |
+| scope=organization owner= environment=on-prem datacenter=chicago | scope=organization owner= environment=on-prem | ✅ | ✅ |
+| scope=organization owner= environment=on-prem datacenter=chicago | scope=organization owner= environment=on-prem datacenter=chicago | ✅ | ✅ |
+| scope=user owner=aaa | scope=user owner=aaa | ✅ | ✅ |
+| scope=user owner=aaa environment=on-prem | scope=user owner=aaa | ✅ | ✅ |
+| scope=user owner=aaa environment=on-prem | scope=user owner=aaa environment=on-prem | ✅ | ✅ |
+| scope=user owner=aaa environment=on-prem datacenter=chicago | scope=user owner=aaa environment=on-prem | ✅ | ✅ |
+| scope=user owner=aaa environment=on-prem datacenter=chicago | scope=user owner=aaa environment=on-prem datacenter=chicago | ✅ | ✅ |
+| scope=organization owner= | scope=organization owner= environment=on-prem | ✅ | ❌ |
+| scope=organization owner= environment=on-prem | scope=organization owner= | ✅ | ❌ |
+| scope=organization owner= environment=on-prem | scope=organization owner= environment=on-prem datacenter=chicago | ✅ | ❌ |
+| scope=organization owner= environment=on-prem datacenter=new_york | scope=organization owner= environment=on-prem datacenter=chicago | ✅ | ❌ |
+| scope=user owner=aaa | scope=organization owner= | ✅ | ❌ |
+| scope=user owner=aaa | scope=user owner=bbb | ✅ | ❌ |
+| scope=organization owner= | scope=user owner=aaa | ✅ | ❌ |
+| scope=organization owner= | scope=user owner=aaa environment=on-prem | ✅ | ❌ |
+| scope=user owner=aaa | scope=user owner=aaa environment=on-prem | ✅ | ❌ |
+| scope=user owner=aaa environment=on-prem | scope=user owner=aaa environment=on-prem datacenter=chicago | ✅ | ❌ |
+| scope=user owner=aaa environment=on-prem datacenter=chicago | scope=user owner=aaa environment=on-prem datacenter=new_york | ✅ | ❌ |
+| scope=organization owner= environment=on-prem | scope=organization owner= environment=on-prem | ❌ | ❌ |
> **Note to maintainers:** to generate this table, run the following command and
> copy the output:
>
-> ```
+> ```go
> go test -v -count=1 ./coderd/provisionerdserver/ -test.run='^TestAcquirer_MatchTags/GenTable$'
> ```
diff --git a/docs/admin/security/audit-logs.md b/docs/admin/security/audit-logs.md
index 092cb5fba6456..430d03adb0667 100644
--- a/docs/admin/security/audit-logs.md
+++ b/docs/admin/security/audit-logs.md
@@ -8,30 +8,30 @@ We track the following resources:
-| Resource | |
-| -------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
-| APIKey login, logout, register, create, delete |
|
@@ -85,34 +85,34 @@ log entry:
```json
{
- "ts": "2023-06-13T03:45:37.294730279Z",
- "level": "INFO",
- "msg": "audit_log",
- "caller": "/home/runner/work/coder/coder/enterprise/audit/backends/slog.go:36",
- "func": "github.com/coder/coder/enterprise/audit/backends.slogBackend.Export",
- "logger_names": ["coderd"],
- "fields": {
- "ID": "033a9ffa-b54d-4c10-8ec3-2aaf9e6d741a",
- "Time": "2023-06-13T03:45:37.288506Z",
- "UserID": "6c405053-27e3-484a-9ad7-bcb64e7bfde6",
- "OrganizationID": "00000000-0000-0000-0000-000000000000",
- "Ip": "{IPNet:{IP:\u003cnil\u003e Mask:\u003cnil\u003e} Valid:false}",
- "UserAgent": "{String: Valid:false}",
- "ResourceType": "workspace_build",
- "ResourceID": "ca5647e0-ef50-4202-a246-717e04447380",
- "ResourceTarget": "",
- "Action": "start",
- "Diff": {},
- "StatusCode": 200,
- "AdditionalFields": {
- "workspace_name": "linux-container",
- "build_number": "9",
- "build_reason": "initiator",
- "workspace_owner": ""
- },
- "RequestID": "bb791ac3-f6ee-4da8-8ec2-f54e87013e93",
- "ResourceIcon": ""
- }
+ "ts": "2023-06-13T03:45:37.294730279Z",
+ "level": "INFO",
+ "msg": "audit_log",
+ "caller": "/home/runner/work/coder/coder/enterprise/audit/backends/slog.go:36",
+ "func": "github.com/coder/coder/enterprise/audit/backends.slogBackend.Export",
+ "logger_names": ["coderd"],
+ "fields": {
+ "ID": "033a9ffa-b54d-4c10-8ec3-2aaf9e6d741a",
+ "Time": "2023-06-13T03:45:37.288506Z",
+ "UserID": "6c405053-27e3-484a-9ad7-bcb64e7bfde6",
+ "OrganizationID": "00000000-0000-0000-0000-000000000000",
+ "Ip": "{IPNet:{IP:\u003cnil\u003e Mask:\u003cnil\u003e} Valid:false}",
+ "UserAgent": "{String: Valid:false}",
+ "ResourceType": "workspace_build",
+ "ResourceID": "ca5647e0-ef50-4202-a246-717e04447380",
+ "ResourceTarget": "",
+ "Action": "start",
+ "Diff": {},
+ "StatusCode": 200,
+ "AdditionalFields": {
+ "workspace_name": "linux-container",
+ "build_number": "9",
+ "build_reason": "initiator",
+ "workspace_owner": ""
+ },
+ "RequestID": "bb791ac3-f6ee-4da8-8ec2-f54e87013e93",
+ "ResourceIcon": ""
+ }
}
```
diff --git a/docs/admin/security/index.md b/docs/admin/security/index.md
index ea560abcde503..cb83bf6b78271 100644
--- a/docs/admin/security/index.md
+++ b/docs/admin/security/index.md
@@ -23,5 +23,5 @@ vulnerability.
---
| Description | Severity | Fix | Vulnerable Versions |
-| --------------------------------------------------------------------------------------------------------------------------------------------- | -------- | -------------------------------------------------------------- | ------------------- |
+|-----------------------------------------------------------------------------------------------------------------------------------------------|----------|----------------------------------------------------------------|---------------------|
| [API tokens of deleted users not invalidated](https://github.com/coder/coder/blob/main/docs/admin/security/0001_user_apikeys_invalidation.md) | HIGH | [v0.23.0](https://github.com/coder/coder/releases/tag/v0.23.0) | v0.8.25 - v0.22.2 |
diff --git a/docs/admin/security/secrets.md b/docs/admin/security/secrets.md
index 00f69fdda7f43..4fcd188ed0583 100644
--- a/docs/admin/security/secrets.md
+++ b/docs/admin/security/secrets.md
@@ -9,7 +9,7 @@ This article explains how to use secrets in a workspace. To authenticate the
workspace provisioner, see the
provisioners documentation.
-## Wait a minute...
+## Before you begin
Your first attempt to use secrets with Coder should be your local method. You
can do everything you can locally and more with your Coder workspace, so
diff --git a/docs/admin/setup/index.md b/docs/admin/setup/index.md
index 9429db6cfebac..9af914125a75e 100644
--- a/docs/admin/setup/index.md
+++ b/docs/admin/setup/index.md
@@ -52,7 +52,7 @@ a wildcard subdomain that resolves to Coder (e.g. `*.coder.example.com`).
If you are providing TLS certificates directly to the Coder server, either
1. Use a single certificate and key for both the root and wildcard domains.
-2. Configure multiple certificates and keys via
+1. Configure multiple certificates and keys via
[`coder.tls.secretNames`](https://github.com/coder/coder/blob/main/helm/coder/values.yaml)
in the Helm Chart, or
[`--tls-cert-file`](../../reference/cli/server.md#--tls-cert-file) and
@@ -78,29 +78,27 @@ working directory prior to step 1.
1. Create the TLS secret in your Kubernetes cluster
-```shell
-kubectl create secret tls coder-tls -n --key="tls.key" --cert="tls.crt"
-```
+ ```shell
+ kubectl create secret tls coder-tls -n --key="tls.key" --cert="tls.crt"
+ ```
-> You can use a single certificate for the both the access URL and wildcard
-> access URL. The certificate CN must match the wildcard domain, such as
-> `*.example.coder.com`.
+ You can use a single certificate for the both the access URL and wildcard access URL. The certificate CN must match the wildcard domain, such as `*.example.coder.com`.
1. Reference the TLS secret in your Coder Helm chart values
-```yaml
-coder:
- tls:
- secretName:
- - coder-tls
-
- # Alternatively, if you use an Ingress controller to terminate TLS,
- # set the following values:
- ingress:
- enable: true
- secretName: coder-tls
- wildcardSecretName: coder-tls
-```
+ ```yaml
+ coder:
+ tls:
+ secretName:
+ - coder-tls
+
+ # Alternatively, if you use an Ingress controller to terminate TLS,
+ # set the following values:
+ ingress:
+ enable: true
+ secretName: coder-tls
+ wildcardSecretName: coder-tls
+ ```
## PostgreSQL Database
@@ -116,7 +114,7 @@ the PostgreSQL interactive terminal), output the connection URL with the
following command:
```console
-coder server postgres-builtin-url
+$ coder server postgres-builtin-url
psql "postgres://coder@localhost:49627/coder?sslmode=disable&password=feU...yI1"
```
@@ -126,13 +124,13 @@ To migrate from the built-in database to an external database, follow these
steps:
1. Stop your Coder deployment.
-2. Run `coder server postgres-builtin-serve` in a background terminal.
-3. Run `coder server postgres-builtin-url` and copy its output command.
-4. Run `pg_dump > coder.sql` to dump the internal
+1. Run `coder server postgres-builtin-serve` in a background terminal.
+1. Run `coder server postgres-builtin-url` and copy its output command.
+1. Run `pg_dump > coder.sql` to dump the internal
database to a file.
-5. Restore that content to an external database with
+1. Restore that content to an external database with
`psql < coder.sql`.
-6. Start your Coder deployment with
+1. Start your Coder deployment with
`CODER_PG_CONNECTION_URL=`.
## Configuring Coder behind a proxy
@@ -144,7 +142,7 @@ To configure Coder behind a corporate proxy, set the environment variables
## External Authentication
Coder supports external authentication via OAuth2.0. This allows enabling
-integrations with git providers, such as GitHub, GitLab, and Bitbucket etc.
+integrations with Git providers, such as GitHub, GitLab, and Bitbucket.
External authentication can also be used to integrate with external services
like JFrog Artifactory and others.
@@ -154,5 +152,5 @@ more information.
## Up Next
-- [Learn how to setup and manage templates](../templates/index.md)
+- [Setup and manage templates](../templates/index.md)
- [Setup external provisioners](../provisioners.md)
diff --git a/docs/admin/templates/extending-templates/docker-in-workspaces.md b/docs/admin/templates/extending-templates/docker-in-workspaces.md
index 45439830e3ef6..734e7545a9090 100644
--- a/docs/admin/templates/extending-templates/docker-in-workspaces.md
+++ b/docs/admin/templates/extending-templates/docker-in-workspaces.md
@@ -3,7 +3,7 @@
There are a few ways to run Docker within container-based Coder workspaces.
| Method | Description | Limitations |
-| ---------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
+|------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| [Sysbox container runtime](#sysbox-container-runtime) | Install the Sysbox runtime on your Kubernetes nodes or Docker host(s) for secure docker-in-docker and systemd-in-docker. Works with GKE, EKS, AKS, Docker. | Requires [compatible nodes](https://github.com/nestybox/sysbox#host-requirements). [Limitations](https://github.com/nestybox/sysbox/blob/master/docs/user-guide/limitations.md) |
| [Envbox](#envbox) | A container image with all the packages necessary to run an inner Sysbox container. Removes the need to setup sysbox-runc on your nodes. Works with GKE, EKS, AKS. | Requires running the outer container as privileged (the inner container that acts as the workspace is locked down). Requires compatible [nodes](https://github.com/nestybox/sysbox/blob/master/docs/distro-compat.md#sysbox-distro-compatibility). |
| [Rootless Podman](#rootless-podman) | Run Podman inside Coder workspaces. Does not require a custom runtime or privileged containers. Works with GKE, EKS, AKS, RKE, OpenShift | Requires smarter-device-manager for FUSE mounts. [See all](https://github.com/containers/podman/blob/main/rootless.md#shortcomings-of-rootless-podman) |
diff --git a/docs/admin/templates/extending-templates/external-auth.md b/docs/admin/templates/extending-templates/external-auth.md
index de021d2783b64..ab27780b8b72d 100644
--- a/docs/admin/templates/extending-templates/external-auth.md
+++ b/docs/admin/templates/extending-templates/external-auth.md
@@ -52,7 +52,7 @@ coder external-auth access-token
Note: Some IDE's override the `GIT_ASKPASS` environment variable and need to be
configured.
-**VSCode**
+#### VSCode
Use the
[Coder](https://marketplace.visualstudio.com/items?itemName=coder.coder-remote)
diff --git a/docs/admin/templates/extending-templates/parameters.md b/docs/admin/templates/extending-templates/parameters.md
index 5ea82c0934b65..2c4801c08e82b 100644
--- a/docs/admin/templates/extending-templates/parameters.md
+++ b/docs/admin/templates/extending-templates/parameters.md
@@ -90,7 +90,7 @@ data "coder_parameter" "security_groups" {
> For the above example, to override the default values of the `security_groups`
> parameter, you will need to pass the following argument to `coder create`:
>
-> ```
+> ```shell
> --parameter "\"security_groups=[\"\"DevOps Security Group\"\",\"\"Backend Security Group\"\"]\""
> ```
>
diff --git a/docs/admin/templates/extending-templates/process-logging.md b/docs/admin/templates/extending-templates/process-logging.md
index 989bdd8572ae5..8822d988402fc 100644
--- a/docs/admin/templates/extending-templates/process-logging.md
+++ b/docs/admin/templates/extending-templates/process-logging.md
@@ -254,28 +254,28 @@ The raw logs will look something like this:
```json
{
- "ts": "2022-02-28T20:29:38.038452202Z",
- "level": "INFO",
- "msg": "exec",
- "fields": {
- "labels": {
- "user_email": "jessie@coder.com",
- "user_id": "5e876e9a-121663f01ebd1522060d5270",
- "username": "jessie",
- "workspace_id": "621d2e52-a6987ef6c56210058ee2593c",
- "workspace_name": "main"
- },
- "cmdline": "uname -a",
- "event": {
- "filename": "/usr/bin/uname",
- "argv": ["uname", "-a"],
- "truncated": false,
- "pid": 920684,
- "uid": 101000,
- "gid": 101000,
- "comm": "bash"
- }
- }
+ "ts": "2022-02-28T20:29:38.038452202Z",
+ "level": "INFO",
+ "msg": "exec",
+ "fields": {
+ "labels": {
+ "user_email": "jessie@coder.com",
+ "user_id": "5e876e9a-121663f01ebd1522060d5270",
+ "username": "jessie",
+ "workspace_id": "621d2e52-a6987ef6c56210058ee2593c",
+ "workspace_name": "main"
+ },
+ "cmdline": "uname -a",
+ "event": {
+ "filename": "/usr/bin/uname",
+ "argv": ["uname", "-a"],
+ "truncated": false,
+ "pid": 920684,
+ "uid": 101000,
+ "gid": 101000,
+ "comm": "bash"
+ }
+ }
}
```
diff --git a/docs/admin/templates/extending-templates/variables.md b/docs/admin/templates/extending-templates/variables.md
index acb064e15c3d9..3c1d02f0baf63 100644
--- a/docs/admin/templates/extending-templates/variables.md
+++ b/docs/admin/templates/extending-templates/variables.md
@@ -53,15 +53,15 @@ variables, you can employ a straightforward solution:
1. Create a `terraform.tfvars` file in in the template directory:
-```tf
-coder_image = newimage:tag
-```
+ ```tf
+ coder_image = newimage:tag
+ ```
-2. Push the new template revision using Coder CLI:
+1. Push the new template revision using Coder CLI:
-```
-coder templates push my-template -y # no need to use --var
-```
+ ```shell
+ coder templates push my-template -y # no need to use --var
+ ```
This file serves as a mechanism to override the template settings for variables.
It can be stored in the repository for easy access and reference. Coder CLI
diff --git a/docs/admin/templates/extending-templates/workspace-tags.md b/docs/admin/templates/extending-templates/workspace-tags.md
index 83ea983ce72ba..e49957d9ba515 100644
--- a/docs/admin/templates/extending-templates/workspace-tags.md
+++ b/docs/admin/templates/extending-templates/workspace-tags.md
@@ -26,7 +26,7 @@ data "coder_workspace_tags" "custom_workspace_tags" {
}
```
-**Legend**
+### Legend
- `zone` - static tag value set to `developers`
- `runtime` - supported by the string-type `coder_parameter` to select
@@ -55,7 +55,7 @@ raw values from the database and evaluates them using provided template
variables and parameters. This is illustrated in the table below:
| Value Type | Template Import | Workspace Creation |
-| ---------- | -------------------------------------------------- | ----------------------- |
+|------------|----------------------------------------------------|-------------------------|
| Static | `{"region": "us"}` | `{"region": "us"}` |
| Variable | `{"az": var.az}` | `{"region": "us-east"}` |
| Parameter | `{"cluster": data.coder_parameter.cluster.value }` | `{"cluster": "dev"}` |
@@ -98,7 +98,7 @@ as immutable and set only once, during workspace creation.
You may only specify the following as inputs for `coder_workspace_tags`:
| | Example |
-| :----------------- | :-------------------------------------------- |
+|:-------------------|:----------------------------------------------|
| Static values | `"developers"` |
| Template variables | `var.az` |
| Coder parameters | `data.coder_parameter.runtime_selector.value` |
@@ -115,7 +115,7 @@ raw queries on-the-fly without processing the entire Terraform template. This
evaluation is simpler but also limited in terms of available functions,
variables, and references to other resources.
-**Supported syntax**
+#### Supported syntax
- Static string: `foobar_tag = "foobaz"`
- Formatted string: `foobar_tag = "foobaz ${data.coder_parameter.foobaz.value}"`
@@ -125,7 +125,7 @@ variables, and references to other resources.
- Condition:
`cache = data.coder_parameter.feature_cache_enabled.value == "true" ? "with-cache" : "no-cache"`
-**Not supported**
+#### Not supported
- Function calls: `try(var.foo, "default")`
- Resources: `compute_instance.dev.name`
diff --git a/docs/admin/templates/managing-templates/devcontainers/add-devcontainer.md b/docs/admin/templates/managing-templates/devcontainers/add-devcontainer.md
index 3d4a049b138e2..5d2ac0a07f9e2 100644
--- a/docs/admin/templates/managing-templates/devcontainers/add-devcontainer.md
+++ b/docs/admin/templates/managing-templates/devcontainers/add-devcontainer.md
@@ -121,7 +121,7 @@ their development environments:
## Example templates
| Template | Description |
-| ------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
+|---------------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| [Docker dev containers](https://github.com/coder/coder/tree/main/examples/templates/docker-devcontainer) | Docker provisions a development container. |
| [Kubernetes dev containers](https://github.com/coder/coder/tree/main/examples/templates/kubernetes-devcontainer) | Provisions a development container on the Kubernetes cluster. |
| [Google Compute Engine dev container](https://github.com/coder/coder/tree/main/examples/templates/gcp-devcontainer) | Runs a development container inside a single GCP instance. It also mounts the Docker socket from the VM inside the container to enable Docker inside the workspace. |
@@ -144,7 +144,3 @@ Lifecycle scripts are managed by project developers.
## Next steps
- [Dev container security and caching](./devcontainer-security-caching.md)
-
-```
-
-```
diff --git a/docs/admin/templates/managing-templates/devcontainers/devcontainer-security-caching.md b/docs/admin/templates/managing-templates/devcontainers/devcontainer-security-caching.md
index bb56ff6a31e32..a0ae51624fc6d 100644
--- a/docs/admin/templates/managing-templates/devcontainers/devcontainer-security-caching.md
+++ b/docs/admin/templates/managing-templates/devcontainers/devcontainer-security-caching.md
@@ -3,7 +3,7 @@
Ensure Envbuilder can only pull pre-approved images and artifacts by configuring
it with your existing HTTP proxies, firewalls, and artifact managers.
-### Configure registry authentication
+## Configure registry authentication
You may need to authenticate to your container registry, such as Artifactory, or
Git provider such as GitLab, to use Envbuilder. See the
diff --git a/docs/admin/users/github-auth.md b/docs/admin/users/github-auth.md
index cc1f5365bcdc2..97e700e262ff8 100644
--- a/docs/admin/users/github-auth.md
+++ b/docs/admin/users/github-auth.md
@@ -1,6 +1,6 @@
-## GitHub
+# GitHub
-### Step 1: Configure the OAuth application in GitHub
+## Step 1: Configure the OAuth application in GitHub
First,
[register a GitHub OAuth app](https://developer.github.com/apps/building-oauth-apps/creating-an-oauth-app/).
@@ -22,7 +22,7 @@ values in the next step.
Coder will need permission to access user email addresses. Find the "Account
Permissions" settings for your app and select "read-only" for "Email addresses".
-### Step 2: Configure Coder with the OAuth credentials
+## Step 2: Configure Coder with the OAuth credentials
Navigate to your Coder host and run the following command to start up the Coder
server:
diff --git a/docs/admin/users/groups-roles.md b/docs/admin/users/groups-roles.md
index 91647db34c8f1..21dc22988b76b 100644
--- a/docs/admin/users/groups-roles.md
+++ b/docs/admin/users/groups-roles.md
@@ -17,16 +17,16 @@ which templates developers can use. For example:
Roles determine which actions users can take within the platform.
| | Auditor | User Admin | Template Admin | Owner |
-| --------------------------------------------------------------- | ------- | ---------- | -------------- | ----- |
-| Add and remove Users | | ✅ | | ✅ |
-| Manage groups (enterprise) (premium) | | ✅ | | ✅ |
-| Change User roles | | | | ✅ |
-| Manage **ALL** Templates | | | ✅ | ✅ |
-| View **ALL** Workspaces | | | ✅ | ✅ |
-| Update and delete **ALL** Workspaces | | | | ✅ |
-| Run [external provisioners](../provisioners.md) | | | ✅ | ✅ |
-| Execute and use **ALL** Workspaces | | | | ✅ |
-| View all user operation [Audit Logs](../security/audit-logs.md) | ✅ | | | ✅ |
+|-----------------------------------------------------------------|---------|------------|----------------|-------|
+| Add and remove Users | | ✅ | | ✅ |
+| Manage groups (enterprise) (premium) | | ✅ | | ✅ |
+| Change User roles | | | | ✅ |
+| Manage **ALL** Templates | | | ✅ | ✅ |
+| View **ALL** Workspaces | | | ✅ | ✅ |
+| Update and delete **ALL** Workspaces | | | | ✅ |
+| Run [external provisioners](../provisioners.md) | | | ✅ | ✅ |
+| Execute and use **ALL** Workspaces | | | | ✅ |
+| View all user operation [Audit Logs](../security/audit-logs.md) | ✅ | | | ✅ |
A user may have one or more roles. All users have an implicit Member role that
may use personal workspaces.
diff --git a/docs/admin/users/idp-sync.md b/docs/admin/users/idp-sync.md
index a28407c0ce788..8e9ea79a9a80b 100644
--- a/docs/admin/users/idp-sync.md
+++ b/docs/admin/users/idp-sync.md
@@ -1,3 +1,4 @@
+
# IDP Sync
@@ -7,6 +8,8 @@ IDP sync is an Enterprise and Premium feature.
+## Group Sync
+
If your OpenID Connect provider supports group claims, you can configure Coder
to synchronize groups in your auth provider to groups within Coder. To enable
group sync, ensure that the `groups` claim is being sent by your OpenID
@@ -141,10 +144,10 @@ will be able to configure this in the UI. For now, you must use CLI commands.
```json
{
- "field": "",
- "mapping": null,
- "regex_filter": null,
- "auto_create_missing_groups": false
+ "field": "",
+ "mapping": null,
+ "regex_filter": null,
+ "auto_create_missing_groups": false
}
```
@@ -153,10 +156,10 @@ Below is an example that uses the `groups` claim and maps all groups prefixed by
```json
{
- "field": "groups",
- "mapping": null,
- "regex_filter": "^coder-.*$",
- "auto_create_missing_groups": true
+ "field": "groups",
+ "mapping": null,
+ "regex_filter": "^coder-.*$",
+ "auto_create_missing_groups": true
}
```
@@ -174,16 +177,16 @@ group:
```json
{
- "field": "groups",
- "mapping": {
- "coder-admins": [
- "2ba2a4ff-ddfb-4493-b7cd-1aec2fa4c830",
- "93371154-150f-4b12-b5f0-261bb1326bb4"
- ],
- "coder-users": ["2f4bde93-0179-4815-ba50-b757fb3d43dd"]
- },
- "regex_filter": null,
- "auto_create_missing_groups": false
+ "field": "groups",
+ "mapping": {
+ "coder-admins": [
+ "2ba2a4ff-ddfb-4493-b7cd-1aec2fa4c830",
+ "93371154-150f-4b12-b5f0-261bb1326bb4"
+ ],
+ "coder-users": ["2f4bde93-0179-4815-ba50-b757fb3d43dd"]
+ },
+ "regex_filter": null,
+ "auto_create_missing_groups": false
}
```
@@ -209,7 +212,7 @@ Users who are not in a matching group will see the following error:
-## Role sync
+## Role Sync
@@ -307,8 +310,8 @@ will be able to configure this in the UI. For now, you must use CLI commands.
```json
{
- "field": "",
- "mapping": null
+ "field": "",
+ "mapping": null
}
```
@@ -318,11 +321,11 @@ role:
```json
{
- "field": "roles",
- "mapping": {
- "coder-admins": ["organization-admin"],
- "infra-admins": ["provisioner-admin"]
- }
+ "field": "roles",
+ "mapping": {
+ "coder-admins": ["organization-admin"],
+ "infra-admins": ["provisioner-admin"]
+ }
}
```
@@ -372,7 +375,7 @@ dashboard:
-### Dashboard
+## Dashboard
1. Confirm that your OIDC provider is sending claims. Log in with OIDC and visit
the following URL with an `Owner` account:
@@ -412,7 +415,7 @@ dashboard:
![IdP organization sync](../../images/admin/users/organizations/idp-org-sync.png)
-### CLI
+## CLI
Use the Coder CLI to show and adjust the settings.
@@ -455,7 +458,7 @@ settings, a user's memberships will update when they log out and log back in.
Analyzing the JSON payload:
| Field | Explanation |
- | :-------------------------- | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
+ |:----------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| field | If this field is the empty string `""`, then org-sync is disabled. Org memberships must be manually configured through the UI or API. |
| mapping | Mapping takes a claim from the IdP, and associates it with 1 or more organizations by UUID. No validation is done, so you can put UUID's of orgs that do not exist (a noop). The UI picker will allow selecting orgs from a drop down, and convert it to a UUID for you. |
| organization_assign_default | This setting exists for maintaining backwards compatibility with single org deployments, either through their upgrade, or in perpetuity. If this is set to 'true', all users will always be assigned to the default organization regardless of the mappings and their IdP claims. |
diff --git a/docs/admin/users/quotas.md b/docs/admin/users/quotas.md
index 4ac801148eb47..dd2c8a62bd51d 100644
--- a/docs/admin/users/quotas.md
+++ b/docs/admin/users/quotas.md
@@ -76,7 +76,7 @@ the sum of their allowances.
For example:
| Group Name | Quota Allowance |
-| ---------- | --------------- |
+|------------|-----------------|
| Frontend | 10 |
| Backend | 20 |
| Data | 30 |
@@ -84,7 +84,7 @@ For example:
| Username | Groups | Effective Budget |
-| -------- | ----------------- | ---------------- |
+|----------|-------------------|------------------|
| jill | Frontend, Backend | 30 |
| jack | Backend, Data | 50 |
| sam | Data | 30 |
diff --git a/docs/changelogs/index.md b/docs/changelogs/index.md
index 3240a41bc0d50..885fceb9d4e1b 100644
--- a/docs/changelogs/index.md
+++ b/docs/changelogs/index.md
@@ -1,6 +1,6 @@
# Changelogs
-These are the changelogs used by [generate_release_notes.sh]https://github.com/coder/coder/blob/main/scripts/release/generate_release_notes.sh) for a release.
+These are the changelogs used by [generate_release_notes.sh](https://github.com/coder/coder/blob/main/scripts/release/generate_release_notes.sh) for a release.
These changelogs are currently not kept in sync with GitHub releases. Use [GitHub releases](https://github.com/coder/coder/releases) for the latest information!
diff --git a/docs/changelogs/v0.25.0.md b/docs/changelogs/v0.25.0.md
index 9aa1f6526b25d..caf51f917e342 100644
--- a/docs/changelogs/v0.25.0.md
+++ b/docs/changelogs/v0.25.0.md
@@ -23,9 +23,11 @@
[--block-direct-connections](https://coder.com/docs/cli/server#--block-direct-connections)
(#7936)
- Search for workspaces based on last activity (#2658)
+
```text
last_seen_before:"2023-01-14T23:59:59Z" last_seen_after:"2023-01-08T00:00:00Z"
```
+
- Queue position of pending workspace builds are shown in the dashboard (#8244)
- Enable Terraform debug mode via deployment configuration (#8260)
diff --git a/docs/changelogs/v0.27.0.md b/docs/changelogs/v0.27.0.md
index dd7a259df49ad..361ef96e32ae5 100644
--- a/docs/changelogs/v0.27.0.md
+++ b/docs/changelogs/v0.27.0.md
@@ -50,81 +50,12 @@ Agent logs can be pushed after a workspace has started (#8528)
### Documentation
-## Changelog
-
-### Breaking changes
-
-Agent logs can be pushed after a workspace has started (#8528)
-
-> ⚠️ **Warning:** You will need to
-> [update](https://coder.com/docs/install) your local Coder CLI v0.27
-> to connect via `coder ssh`.
-
-### Features
-
-- [Empeheral parameters](https://registry.terraform.io/providers/coder/coder/latest/docs/data-sources/parameter#ephemeral)
- allow users to specify a value for a single build (#8415) (#8524)
- ![Ephemeral parameters](https://github.com/coder/coder/assets/22407953/89df0888-9abc-453a-ac54-f5d0e221b0b9)
- > Upgrade to Coder Terraform Provider v0.11.1 to use ephemeral parameters in
- > your templates
-- Create template, if it doesn't exist with `templates push --create` (#8454)
-- Workspaces now appear `unhealthy` in the dashboard and CLI if one or more
- agents do not exist (#8541) (#8548)
- ![Workspace health](https://github.com/coder/coder/assets/22407953/edbb1d70-61b5-4b45-bfe8-51abdab417cc)
-- Reverse port-forward with `coder ssh -R` (#8515)
-- Helm: custom command arguments in Helm chart (#8567)
-- Template version messages (#8435)
-
-- TTL and max TTL validation increased to 30 days (#8258)
-- [Self-hosted docs](https://coder.com/docs/install/offline#offline-docs):
- Host your own copy of Coder's documentation in your own environment (#8527)
- (#8601)
-- Add custom coder bin path for `config-ssh` (#8425)
-- Admins can create workspaces for other users via the CLI (#8481)
-- `coder_app` supports localhost apps running https (#8585)
-- Base container image contains [jq](https://github.com/coder/coder/pull/8563)
- for parsing mounted JSON secrets
-
-### Bug fixes
-
-- Check agent metadata every second instead of minute (#8614)
-- `coder stat` fixes
- - Read from alternate cgroup path (#8591)
- - Improve detection of container environment (#8643)
- - Unskip TestStatCPUCmd/JSON and explicitly set --host in test cmd invocation
- (#8558)
-- Avoid initial license reconfig if feature isn't enabled (#8586)
-- Audit log records delete workspace action properly (#8494)
-- Audit logs are properly paginated (#8513)
-- Fix bottom border on build logs (#8554)
-- Don't mark metadata with `interval: 0` as stale (#8627)
-- Add some missing workspace updates (#7790)
-
-### Documentation
-
-- Custom API use cases (custom agent logs, CI/CD pipelines) (#8445)
-- Docs on using remote Docker hosts (#8479)
-- Added kubernetes option to workspace proxies (#8533)
-
-Compare:
-[`v0.26.1...v0.26.2`](https://github.com/coder/coder/compare/v0.26.1...v0.27.0)
-
-## Container image
-
-- `docker pull ghcr.io/coder/coder:v0.26.2`
-
-## Install/upgrade
-
-Refer to our docs to [install](https://coder.com/docs/install) or
-[upgrade](https://coder.com/docs/admin/upgrade) Coder, or use a
-release asset below.
-
- Custom API use cases (custom agent logs, CI/CD pipelines) (#8445)
- Docs on using remote Docker hosts (#8479)
- Added kubernetes option to workspace proxies (#8533)
Compare:
-[`v0.26.1...v0.26.2`](https://github.com/coder/coder/compare/v0.26.1...v0.27.0)
+[`v0.26.2...v0.27.0`](https://github.com/coder/coder/compare/v0.26.2...v0.27.0)
## Container image
diff --git a/docs/changelogs/v2.0.0.md b/docs/changelogs/v2.0.0.md
index a02fb765f768a..f74beaf14143c 100644
--- a/docs/changelogs/v2.0.0.md
+++ b/docs/changelogs/v2.0.0.md
@@ -18,7 +18,7 @@ While Coder v1 is being sunset, we still wanted to avoid versioning conflicts.
What is not changing:
-- Our feature roadmap: See what we have planned at https://coder.com/roadmap
+- Our feature roadmap: See what we have planned at
- Your upgrade path: You can safely upgrade from previous coder/coder releases
to v2.x releases!
- Our release cadence: We want features out as quickly as possible and feature
@@ -33,7 +33,7 @@ What is changing:
dashboard ]
Questions? Feel free to ask in [our Discord](https://discord.gg/coder) or email
-ben@coder.com!
+!
## Changelog
diff --git a/docs/changelogs/v2.1.1.md b/docs/changelogs/v2.1.1.md
index e948046bcbf24..7a0d4d71bcfcc 100644
--- a/docs/changelogs/v2.1.1.md
+++ b/docs/changelogs/v2.1.1.md
@@ -7,7 +7,7 @@
![Last used](https://user-images.githubusercontent.com/22407953/262407146-06cded4e-684e-4cff-86b7-4388270e7d03.png)
> You can use `last_used_before` and `last_used_after` in the workspaces
> search with [RFC3339Nano](https://www.rfc-editor.org/rfc/rfc3339) datetime
-- Add `daily_cost`` to `coder ls` to show
+- Add `daily_cost` to `coder ls` to show
[quota](https://coder.com/docs/admin/quotas) consumption (#9200)
(@ammario)
- Added `coder_app` usage to template insights (#9138) (@mafredri)
diff --git a/docs/changelogs/v2.1.5.md b/docs/changelogs/v2.1.5.md
index f23eff4b67b25..1e440bd97e75a 100644
--- a/docs/changelogs/v2.1.5.md
+++ b/docs/changelogs/v2.1.5.md
@@ -17,11 +17,13 @@
[display apps](https://registry.terraform.io/providers/coder/coder/latest/docs/resources/agent#nested-schema-for-display_apps)
in your template, such as VS Code (Insiders), web terminal, SSH, etc. (#9100)
(@sreya) To add VS Code insiders into your template, you can set:
+
```tf
display_apps {
vscode_insiders = true
}
```
+
![Add insiders](https://user-images.githubusercontent.com/4856196/263852602-94a5cb56-b7c3-48cb-928a-3b5e0f4e964b.png)
- Create a workspace from any template version (#9471) (@aslilac)
- Add DataDog Go tracer (#9411) (@ammario)
diff --git a/docs/changelogs/v2.5.0.md b/docs/changelogs/v2.5.0.md
index a31731b7e7cc4..c0e81dec99acb 100644
--- a/docs/changelogs/v2.5.0.md
+++ b/docs/changelogs/v2.5.0.md
@@ -92,7 +92,7 @@
### Documentation
- Align CODER_HTTP_ADDRESS with document (#10779) (@JounQin)
-- Migrate all deprecated `CODER_ADDRESS `to `CODER_HTTP_ADDRESS` (#10780) (@JounQin)
+- Migrate all deprecated `CODER_ADDRESS` to `CODER_HTTP_ADDRESS` (#10780) (@JounQin)
- Add documentation for template update policies (experimental) (#10804) (@sreya)
- Fix typo in additional-clusters.md (#10868) (@bpmct)
- Update FE guide (#10942) (@BrunoQuaresma)
diff --git a/docs/contributing/CODE_OF_CONDUCT.md b/docs/contributing/CODE_OF_CONDUCT.md
index 5e40eb816bc17..64fe6bfd8d4b6 100644
--- a/docs/contributing/CODE_OF_CONDUCT.md
+++ b/docs/contributing/CODE_OF_CONDUCT.md
@@ -55,7 +55,7 @@ further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
-reported by contacting the project team at opensource@coder.com. All complaints
+reported by contacting the project team at . All complaints
will be reviewed and investigated and will result in a response that is deemed
necessary and appropriate to the circumstances. The project team is obligated to
maintain confidentiality with regard to the reporter of an incident. Further
@@ -69,9 +69,9 @@ members of the project's leadership.
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 1.4, available at
-https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
+
[homepage]: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see
-https://www.contributor-covenant.org/faq
+
diff --git a/docs/contributing/SECURITY.md b/docs/contributing/SECURITY.md
index 35dc53efd6934..7344f126449fe 100644
--- a/docs/contributing/SECURITY.md
+++ b/docs/contributing/SECURITY.md
@@ -1,4 +1,4 @@
# Security Policy
If you find a vulnerability, **DO NOT FILE AN ISSUE**. Instead, send an email to
-security@coder.com.
+.
diff --git a/docs/contributing/documentation.md b/docs/contributing/documentation.md
index 0f4ba55877b9a..b5b1a392c6923 100644
--- a/docs/contributing/documentation.md
+++ b/docs/contributing/documentation.md
@@ -25,7 +25,7 @@ If you have questions that aren't explicitly covered by this guide, consult the
following third-party references:
| **Type of guidance** | **Third-party reference** |
-| -------------------- | -------------------------------------------------------------------------------------- |
+|----------------------|----------------------------------------------------------------------------------------|
| Spelling | [Merriam-Webster.com](https://www.merriam-webster.com/) |
| Style - nontechnical | [The Chicago Manual of Style](https://www.chicagomanualofstyle.org/home.html) |
| Style - technical | [Microsoft Writing Style Guide](https://docs.microsoft.com/en-us/style-guide/welcome/) |
diff --git a/docs/contributing/feature-stages.md b/docs/contributing/feature-stages.md
index 92d879de3ea90..97b8b020a4559 100644
--- a/docs/contributing/feature-stages.md
+++ b/docs/contributing/feature-stages.md
@@ -46,7 +46,7 @@ coder server --experiments=feature1,feature2
| Feature | Description | Available in |
-| --------------- | ------------------------------------------------------------------- | ------------ |
+|-----------------|---------------------------------------------------------------------|--------------|
| `notifications` | Sends notifications via SMTP and webhooks following certain events. | stable |
diff --git a/docs/contributing/frontend.md b/docs/contributing/frontend.md
index c9d972711bce3..fd9d7ff0a64fe 100644
--- a/docs/contributing/frontend.md
+++ b/docs/contributing/frontend.md
@@ -23,14 +23,16 @@ You can run the UI and access the Coder dashboard in two ways:
In both cases, you can access the dashboard on `http://localhost:8080`. If using
`./scripts/develop.sh` you can log in with the default credentials.
-> [!TIP]
->
-> **Default Credentials:** `admin@coder.com` and `SomeSecurePassword!`.
+
+
+**Default Credentials:** `admin@coder.com` and `SomeSecurePassword!`.
+
+
## Tech Stack Overview
-All our dependencies are described in `site/package.json` but the following are
-the most important:
+All our dependencies are described in `site/package.json`, but the following are
+the most important.
- [React](https://reactjs.org/) for the UI framework
- [Typescript](https://www.typescriptlang.org/) to keep our sanity
@@ -129,17 +131,17 @@ within the component's story.
```tsx
export const WithQuota: Story = {
- parameters: {
- queries: [
- {
- key: getWorkspaceQuotaQueryKey(MockUser.username),
- data: {
- credits_consumed: 2,
- budget: 40,
- },
- },
- ],
- },
+ parameters: {
+ queries: [
+ {
+ key: getWorkspaceQuotaQueryKey(MockUser.username),
+ data: {
+ credits_consumed: 2,
+ budget: 40,
+ },
+ },
+ ],
+ },
};
```
@@ -156,12 +158,12 @@ execution. Here's an illustrative example:"
```ts
export const getAgentListeningPorts = async (
- agentID: string,
+ agentID: string,
): Promise => {
- const response = await axiosInstance.get(
- `/api/v2/workspaceagents/${agentID}/listening-ports`,
- );
- return response.data;
+ const response = await axiosInstance.get(
+ `/api/v2/workspaceagents/${agentID}/listening-ports`,
+ );
+ return response.data;
};
```
@@ -170,10 +172,10 @@ as a single function.
```ts
export const updateWorkspaceVersion = async (
- workspace: TypesGen.Workspace,
+ workspace: TypesGen.Workspace,
): Promise => {
- const template = await getTemplate(workspace.template_id);
- return startWorkspace(workspace.id, template.active_version_id);
+ const template = await getTemplate(workspace.template_id);
+ return startWorkspace(workspace.id, template.active_version_id);
};
```
@@ -224,10 +226,10 @@ inside the component itself using MUI's `visuallyHidden` utility function.
import { visuallyHidden } from "@mui/utils";
;
```
@@ -332,8 +334,8 @@ One thing we figured out that was slowing down our tests was the use of `ByRole`
queries because of how it calculates the role attribute for every element on the
`screen`. You can read more about it on the links below:
-- https://stackoverflow.com/questions/69711888/react-testing-library-getbyrole-is-performing-extremely-slowly
-- https://github.com/testing-library/dom-testing-library/issues/552#issuecomment-625172052
+-
+-
Even with `ByRole` having performance issues we still want to use it but for
that, we have to scope the "querying" area by using the `within` command. So
diff --git a/docs/install/cloud/azure-vm.md b/docs/install/cloud/azure-vm.md
index 751d204b321b4..2ab41bc53a0b5 100644
--- a/docs/install/cloud/azure-vm.md
+++ b/docs/install/cloud/azure-vm.md
@@ -12,7 +12,7 @@ This guide assumes you have full administrator privileges on Azure.
From the Azure Portal, navigate to the Virtual Machines Dashboard. Click Create,
and select creating a new Azure Virtual machine .
-
+
This will bring you to the `Create a virtual machine` page. Select the
subscription group of your choice, or create one if necessary.
@@ -22,14 +22,14 @@ of your choice. Change the region to something more appropriate for your current
location. For this tutorial, we will use the base selection of the Ubuntu Gen2
Image and keep the rest of the base settings for this image the same.
-
+
-
+
Up next, under `Inbound port rules` modify the Select `inbound ports` to also
take in `HTTPS` and `HTTP`.
-
+
The set up for the image is complete at this stage. Click `Review and Create` -
review the information and click `Create`. A popup will appear asking you to
@@ -37,11 +37,11 @@ download the key pair for the server. Click
`Download private key and create resource` and place it into a folder of your
choice on your local system.
-
+
Click `Return to create a virtual machine`. Your VM will start up!
-
+
Click `Go to resource` in the virtual machine and copy the public IP address.
You will need it to SSH into the virtual machine via your local machine.
@@ -100,12 +100,12 @@ First, run `coder template init` to create your first template. You’ll be give
a list of possible templates to use. This tutorial will show you how to set up
your Coder instance to create a Linux based machine on Azure.
-
+
Press `enter` to select `Develop in Linux on Azure` template. This will return
the following:
-
+
To get started using the Azure template, install the Azure CLI by following the
instructions
diff --git a/docs/install/offline.md b/docs/install/offline.md
index 72db203d86dfb..6a41bd9437894 100644
--- a/docs/install/offline.md
+++ b/docs/install/offline.md
@@ -7,7 +7,7 @@ environments. However, some changes to your configuration are necessary.
> offline with Kubernetes or Docker.
| | Public deployments | Offline deployments |
-| ------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
+|--------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| Terraform binary | By default, Coder downloads Terraform binary from [releases.hashicorp.com](https://releases.hashicorp.com) | Terraform binary must be included in `PATH` for the VM or container image. [Supported versions](https://github.com/coder/coder/blob/main/provisioner/terraform/install.go#L23-L24) |
| Terraform registry | Coder templates will attempt to download providers from [registry.terraform.io](https://registry.terraform.io) or [custom source addresses](https://developer.hashicorp.com/terraform/language/providers/requirements#source-addresses) specified in each template | [Custom source addresses](https://developer.hashicorp.com/terraform/language/providers/requirements#source-addresses) can be specified in each Coder template, or a custom registry/mirror can be used. More details below |
| STUN | By default, Coder uses Google's public STUN server for direct workspace connections | STUN can be safely [disabled](../reference/cli/server.md#--derp-server-stun-addresses) users can still connect via [relayed connections](../admin/networking/index.md#-geo-distribution). Alternatively, you can set a [custom DERP server](../reference/cli/server.md#--derp-server-stun-addresses) |
diff --git a/docs/install/openshift.md b/docs/install/openshift.md
index 88c117d5eef30..26bb99a7681e5 100644
--- a/docs/install/openshift.md
+++ b/docs/install/openshift.md
@@ -1,3 +1,5 @@
+# OpenShift
+
## Requirements
- OpenShift cluster running K8s 1.19+ (OpenShift 4.7+)
@@ -46,13 +48,13 @@ coder:
- For `runAsUser` / `runAsGroup`, you can retrieve the correct values for
project UID and project GID with the following command:
- ```console
- oc get project coder -o json | jq -r '.metadata.annotations'
- {
+ ```console
+ oc get project coder -o json | jq -r '.metadata.annotations'
+ {
"openshift.io/sa.scc.supplemental-groups": "1000680000/10000",
"openshift.io/sa.scc.uid-range": "1000680000/10000"
- }
- ```
+ }
+ ```
Alternatively, you can set these values to `null` to allow OpenShift to
automatically select the correct value for the project.
diff --git a/docs/install/other/index.md b/docs/install/other/index.md
index eabb6b2987fcc..3809d86812526 100644
--- a/docs/install/other/index.md
+++ b/docs/install/other/index.md
@@ -4,7 +4,7 @@ Coder has a number of alternate unofficial install methods. Contributions are
welcome!
| Platform Name | Status | Documentation |
-| --------------------------------------------------------------------------------- | ---------- | -------------------------------------------------------------------------------------------- |
+|-----------------------------------------------------------------------------------|------------|----------------------------------------------------------------------------------------------|
| AWS EC2 | Official | [Guide: AWS](../cloud/ec2.md) |
| Google Compute Engine | Official | [Guide: Google Compute Engine](../cloud/compute-engine.md) |
| Azure AKS | Unofficial | [GitHub: coder-aks](https://github.com/ericpaulsen/coder-aks) |
diff --git a/docs/install/releases.md b/docs/install/releases.md
index f63e747654c80..b640704ea5ee3 100644
--- a/docs/install/releases.md
+++ b/docs/install/releases.md
@@ -7,6 +7,8 @@ We recommend enterprise customers test the compatibility of new releases with
their infrastructure on a staging environment before upgrading a production
deployment.
+## Release channels
+
We support two release channels:
[mainline](https://github.com/coder/coder/releases/tag/v2.16.0) for the bleeding
edge version of Coder and
@@ -53,7 +55,7 @@ pages.
## Release schedule
| Release name | Release Date | Status |
-| ------------ | ------------------ | ---------------- |
+|--------------|--------------------|------------------|
| 2.12.x | June 04, 2024 | Not Supported |
| 2.13.x | July 02, 2024 | Not Supported |
| 2.14.x | August 06, 2024 | Not Supported |
@@ -61,7 +63,7 @@ pages.
| 2.16.x | October 01, 2024 | Security Support |
| 2.17.x | November 05, 2024 | Stable |
| 2.18.x | December 03, 2024 | Mainline |
-| 2.19.x | January 07, 2024 | Not Released |
+| 2.19.x | February 04, 2024 | Not Released |
> **Tip**: We publish a
> [`preview`](https://github.com/coder/coder/pkgs/container/coder-preview) image
diff --git a/docs/install/uninstall.md b/docs/install/uninstall.md
index 9c0982d5cbe1a..3538af0494669 100644
--- a/docs/install/uninstall.md
+++ b/docs/install/uninstall.md
@@ -1,15 +1,10 @@
+
# Uninstall
This article walks you through how to uninstall your Coder server.
To uninstall your Coder server, delete the following directories.
-## Cached Coder releases
-
-```shell
-rm -rf ~/.cache/coder
-```
-
## The Coder server binary and CLI
Status Code **200**
-| Name | Type | Required | Restrictions | Description |
-| ------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------- | -------- | ------------ | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
-| `[array item]` | array | false | | |
-| `» active_user_count` | integer | false | | Active user count is set to -1 when loading. |
-| `» active_version_id` | string(uuid) | false | | |
-| `» activity_bump_ms` | integer | false | | |
-| `» allow_user_autostart` | boolean | false | | Allow user autostart and AllowUserAutostop are enterprise-only. Their values are only used if your license is entitled to use the advanced template scheduling feature. |
-| `» allow_user_autostop` | boolean | false | | |
-| `» allow_user_cancel_workspace_jobs` | boolean | false | | |
-| `» autostart_requirement` | [codersdk.TemplateAutostartRequirement](schemas.md#codersdktemplateautostartrequirement) | false | | |
-| `»» days_of_week` | array | false | | Days of week is a list of days of the week in which autostart is allowed to happen. If no days are specified, autostart is not allowed. |
-| `» autostop_requirement` | [codersdk.TemplateAutostopRequirement](schemas.md#codersdktemplateautostoprequirement) | false | | Autostop requirement and AutostartRequirement are enterprise features. Its value is only used if your license is entitled to use the advanced template scheduling feature. |
-| `»» days_of_week` | array | false | | Days of week is a list of days of the week on which restarts are required. Restarts happen within the user's quiet hours (in their configured timezone). If no days are specified, restarts are not required. Weekdays cannot be specified twice. |
-| Restarts will only happen on weekdays in this list on weeks which line up with Weeks. |
-| `»» weeks` | integer | false | | Weeks is the number of weeks between required restarts. Weeks are synced across all workspaces (and Coder deployments) using modulo math on a hardcoded epoch week of January 2nd, 2023 (the first Monday of 2023). Values of 0 or 1 indicate weekly restarts. Values of 2 indicate fortnightly restarts, etc. |
-| `» build_time_stats` | [codersdk.TemplateBuildTimeStats](schemas.md#codersdktemplatebuildtimestats) | false | | |
-| `»» [any property]` | [codersdk.TransitionStats](schemas.md#codersdktransitionstats) | false | | |
-| `»»» p50` | integer | false | | |
-| `»»» p95` | integer | false | | |
-| `» created_at` | string(date-time) | false | | |
-| `» created_by_id` | string(uuid) | false | | |
-| `» created_by_name` | string | false | | |
-| `» default_ttl_ms` | integer | false | | |
-| `» deprecated` | boolean | false | | |
-| `» deprecation_message` | string | false | | |
-| `» description` | string | false | | |
-| `» display_name` | string | false | | |
-| `» failure_ttl_ms` | integer | false | | Failure ttl ms TimeTilDormantMillis, and TimeTilDormantAutoDeleteMillis are enterprise-only. Their values are used if your license is entitled to use the advanced template scheduling feature. |
-| `» icon` | string | false | | |
-| `» id` | string(uuid) | false | | |
-| `» max_port_share_level` | [codersdk.WorkspaceAgentPortShareLevel](schemas.md#codersdkworkspaceagentportsharelevel) | false | | |
-| `» name` | string | false | | |
-| `» organization_display_name` | string | false | | |
-| `» organization_icon` | string | false | | |
-| `» organization_id` | string(uuid) | false | | |
-| `» organization_name` | string(url) | false | | |
-| `» provisioner` | string | false | | |
-| `» require_active_version` | boolean | false | | Require active version mandates that workspaces are built with the active template version. |
-| `» time_til_dormant_autodelete_ms` | integer | false | | |
-| `» time_til_dormant_ms` | integer | false | | |
-| `» updated_at` | string(date-time) | false | | |
+| Name | Type | Required | Restrictions | Description |
+|--------------------------------------|------------------------------------------------------------------------------------------|----------|--------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| `[array item]` | array | false | | |
+| `» active_user_count` | integer | false | | Active user count is set to -1 when loading. |
+| `» active_version_id` | string(uuid) | false | | |
+| `» activity_bump_ms` | integer | false | | |
+| `» allow_user_autostart` | boolean | false | | Allow user autostart and AllowUserAutostop are enterprise-only. Their values are only used if your license is entitled to use the advanced template scheduling feature. |
+| `» allow_user_autostop` | boolean | false | | |
+| `» allow_user_cancel_workspace_jobs` | boolean | false | | |
+| `» autostart_requirement` | [codersdk.TemplateAutostartRequirement](schemas.md#codersdktemplateautostartrequirement) | false | | |
+| `»» days_of_week` | array | false | | Days of week is a list of days of the week in which autostart is allowed to happen. If no days are specified, autostart is not allowed. |
+| `» autostop_requirement` | [codersdk.TemplateAutostopRequirement](schemas.md#codersdktemplateautostoprequirement) | false | | Autostop requirement and AutostartRequirement are enterprise features. Its value is only used if your license is entitled to use the advanced template scheduling feature. |
+|`»» days_of_week`|array|false||Days of week is a list of days of the week on which restarts are required. Restarts happen within the user's quiet hours (in their configured timezone). If no days are specified, restarts are not required. Weekdays cannot be specified twice.
+Restarts will only happen on weekdays in this list on weeks which line up with Weeks.|
+|`»» weeks`|integer|false||Weeks is the number of weeks between required restarts. Weeks are synced across all workspaces (and Coder deployments) using modulo math on a hardcoded epoch week of January 2nd, 2023 (the first Monday of 2023). Values of 0 or 1 indicate weekly restarts. Values of 2 indicate fortnightly restarts, etc.|
+|`» build_time_stats`|[codersdk.TemplateBuildTimeStats](schemas.md#codersdktemplatebuildtimestats)|false|||
+|`»» [any property]`|[codersdk.TransitionStats](schemas.md#codersdktransitionstats)|false|||
+|`»»» p50`|integer|false|||
+|`»»» p95`|integer|false|||
+|`» created_at`|string(date-time)|false|||
+|`» created_by_id`|string(uuid)|false|||
+|`» created_by_name`|string|false|||
+|`» default_ttl_ms`|integer|false|||
+|`» deprecated`|boolean|false|||
+|`» deprecation_message`|string|false|||
+|`» description`|string|false|||
+|`» display_name`|string|false|||
+|`» failure_ttl_ms`|integer|false||Failure ttl ms TimeTilDormantMillis, and TimeTilDormantAutoDeleteMillis are enterprise-only. Their values are used if your license is entitled to use the advanced template scheduling feature.|
+|`» icon`|string|false|||
+|`» id`|string(uuid)|false|||
+|`» max_port_share_level`|[codersdk.WorkspaceAgentPortShareLevel](schemas.md#codersdkworkspaceagentportsharelevel)|false|||
+|`» name`|string|false|||
+|`» organization_display_name`|string|false|||
+|`» organization_icon`|string|false|||
+|`» organization_id`|string(uuid)|false|||
+|`» organization_name`|string(url)|false|||
+|`» provisioner`|string|false|||
+|`» require_active_version`|boolean|false||Require active version mandates that workspaces are built with the active template version.|
+|`» time_til_dormant_autodelete_ms`|integer|false|||
+|`» time_til_dormant_ms`|integer|false|||
+|`» updated_at`|string(date-time)|false|||
#### Enumerated Values
| Property | Value |
-| ---------------------- | --------------- |
+|------------------------|-----------------|
| `max_port_share_level` | `owner` |
| `max_port_share_level` | `authenticated` |
| `max_port_share_level` | `public` |
@@ -796,22 +824,24 @@ curl -X GET http://coder-server:8080/api/v2/templates/examples \
```json
[
- {
- "description": "string",
- "icon": "string",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "markdown": "string",
- "name": "string",
- "tags": ["string"],
- "url": "string"
- }
+ {
+ "description": "string",
+ "icon": "string",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "markdown": "string",
+ "name": "string",
+ "tags": [
+ "string"
+ ],
+ "url": "string"
+ }
]
```
### Responses
| Status | Meaning | Description | Schema |
-| ------ | ------------------------------------------------------- | ----------- | ----------------------------------------------------------------------- |
+|--------|---------------------------------------------------------|-------------|-------------------------------------------------------------------------|
| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | array of [codersdk.TemplateExample](schemas.md#codersdktemplateexample) |
@@ -541,7 +551,7 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/autofill-parameters?tem
Status Code **200**
| Name | Type | Required | Restrictions | Description |
-| -------------- | ------ | -------- | ------------ | ----------- |
+|----------------|--------|----------|--------------|-------------|
| `[array item]` | array | false | | |
| `» name` | string | false | | |
| `» value` | string | false | | |
@@ -564,7 +574,7 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/gitsshkey \
### Parameters
| Name | In | Type | Required | Description |
-| ------ | ---- | ------ | -------- | -------------------- |
+|--------|------|--------|----------|----------------------|
| `user` | path | string | true | User ID, name, or me |
### Example responses
@@ -573,17 +583,17 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/gitsshkey \
```json
{
- "created_at": "2019-08-24T14:15:22Z",
- "public_key": "string",
- "updated_at": "2019-08-24T14:15:22Z",
- "user_id": "a169451c-8525-4352-b8ca-070dd449a1a5"
+ "created_at": "2019-08-24T14:15:22Z",
+ "public_key": "string",
+ "updated_at": "2019-08-24T14:15:22Z",
+ "user_id": "a169451c-8525-4352-b8ca-070dd449a1a5"
}
```
### Responses
| Status | Meaning | Description | Schema |
-| ------ | ------------------------------------------------------- | ----------- | -------------------------------------------------- |
+|--------|---------------------------------------------------------|-------------|----------------------------------------------------|
| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.GitSSHKey](schemas.md#codersdkgitsshkey) |
To perform this operation, you must be authenticated. [Learn more](authentication.md).
@@ -604,7 +614,7 @@ curl -X PUT http://coder-server:8080/api/v2/users/{user}/gitsshkey \
### Parameters
| Name | In | Type | Required | Description |
-| ------ | ---- | ------ | -------- | -------------------- |
+|--------|------|--------|----------|----------------------|
| `user` | path | string | true | User ID, name, or me |
### Example responses
@@ -613,17 +623,17 @@ curl -X PUT http://coder-server:8080/api/v2/users/{user}/gitsshkey \
```json
{
- "created_at": "2019-08-24T14:15:22Z",
- "public_key": "string",
- "updated_at": "2019-08-24T14:15:22Z",
- "user_id": "a169451c-8525-4352-b8ca-070dd449a1a5"
+ "created_at": "2019-08-24T14:15:22Z",
+ "public_key": "string",
+ "updated_at": "2019-08-24T14:15:22Z",
+ "user_id": "a169451c-8525-4352-b8ca-070dd449a1a5"
}
```
### Responses
| Status | Meaning | Description | Schema |
-| ------ | ------------------------------------------------------- | ----------- | -------------------------------------------------- |
+|--------|---------------------------------------------------------|-------------|----------------------------------------------------|
| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.GitSSHKey](schemas.md#codersdkgitsshkey) |
To perform this operation, you must be authenticated. [Learn more](authentication.md).
@@ -644,7 +654,7 @@ curl -X POST http://coder-server:8080/api/v2/users/{user}/keys \
### Parameters
| Name | In | Type | Required | Description |
-| ------ | ---- | ------ | -------- | -------------------- |
+|--------|------|--------|----------|----------------------|
| `user` | path | string | true | User ID, name, or me |
### Example responses
@@ -653,14 +663,14 @@ curl -X POST http://coder-server:8080/api/v2/users/{user}/keys \
```json
{
- "key": "string"
+ "key": "string"
}
```
### Responses
| Status | Meaning | Description | Schema |
-| ------ | ------------------------------------------------------------ | ----------- | ---------------------------------------------------------------------------- |
+|--------|--------------------------------------------------------------|-------------|------------------------------------------------------------------------------|
| 201 | [Created](https://tools.ietf.org/html/rfc7231#section-6.3.2) | Created | [codersdk.GenerateAPIKeyResponse](schemas.md#codersdkgenerateapikeyresponse) |
To perform this operation, you must be authenticated. [Learn more](authentication.md).
@@ -681,7 +691,7 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/keys/tokens \
### Parameters
| Name | In | Type | Required | Description |
-| ------ | ---- | ------ | -------- | -------------------- |
+|--------|------|--------|----------|----------------------|
| `user` | path | string | true | User ID, name, or me |
### Example responses
@@ -690,25 +700,25 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/keys/tokens \
```json
[
- {
- "created_at": "2019-08-24T14:15:22Z",
- "expires_at": "2019-08-24T14:15:22Z",
- "id": "string",
- "last_used": "2019-08-24T14:15:22Z",
- "lifetime_seconds": 0,
- "login_type": "password",
- "scope": "all",
- "token_name": "string",
- "updated_at": "2019-08-24T14:15:22Z",
- "user_id": "a169451c-8525-4352-b8ca-070dd449a1a5"
- }
+ {
+ "created_at": "2019-08-24T14:15:22Z",
+ "expires_at": "2019-08-24T14:15:22Z",
+ "id": "string",
+ "last_used": "2019-08-24T14:15:22Z",
+ "lifetime_seconds": 0,
+ "login_type": "password",
+ "scope": "all",
+ "token_name": "string",
+ "updated_at": "2019-08-24T14:15:22Z",
+ "user_id": "a169451c-8525-4352-b8ca-070dd449a1a5"
+ }
]
```
### Responses
| Status | Meaning | Description | Schema |
-| ------ | ------------------------------------------------------- | ----------- | ----------------------------------------------------- |
+|--------|---------------------------------------------------------|-------------|-------------------------------------------------------|
| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | array of [codersdk.APIKey](schemas.md#codersdkapikey) |
Response Schema
@@ -716,7 +726,7 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/keys/tokens \
Status Code **200**
| Name | Type | Required | Restrictions | Description |
-| -------------------- | ------------------------------------------------------ | -------- | ------------ | ----------- |
+|----------------------|--------------------------------------------------------|----------|--------------|-------------|
| `[array item]` | array | false | | |
| `» created_at` | string(date-time) | true | | |
| `» expires_at` | string(date-time) | true | | |
@@ -732,7 +742,7 @@ Status Code **200**
#### Enumerated Values
| Property | Value |
-| ------------ | --------------------- |
+|--------------|-----------------------|
| `login_type` | `password` |
| `login_type` | `github` |
| `login_type` | `oidc` |
@@ -760,16 +770,16 @@ curl -X POST http://coder-server:8080/api/v2/users/{user}/keys/tokens \
```json
{
- "lifetime": 0,
- "scope": "all",
- "token_name": "string"
+ "lifetime": 0,
+ "scope": "all",
+ "token_name": "string"
}
```
### Parameters
| Name | In | Type | Required | Description |
-| ------ | ---- | -------------------------------------------------------------------- | -------- | -------------------- |
+|--------|------|----------------------------------------------------------------------|----------|----------------------|
| `user` | path | string | true | User ID, name, or me |
| `body` | body | [codersdk.CreateTokenRequest](schemas.md#codersdkcreatetokenrequest) | true | Create token request |
@@ -779,14 +789,14 @@ curl -X POST http://coder-server:8080/api/v2/users/{user}/keys/tokens \
```json
{
- "key": "string"
+ "key": "string"
}
```
### Responses
| Status | Meaning | Description | Schema |
-| ------ | ------------------------------------------------------------ | ----------- | ---------------------------------------------------------------------------- |
+|--------|--------------------------------------------------------------|-------------|------------------------------------------------------------------------------|
| 201 | [Created](https://tools.ietf.org/html/rfc7231#section-6.3.2) | Created | [codersdk.GenerateAPIKeyResponse](schemas.md#codersdkgenerateapikeyresponse) |
To perform this operation, you must be authenticated. [Learn more](authentication.md).
@@ -807,7 +817,7 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/keys/tokens/{keyname} \
### Parameters
| Name | In | Type | Required | Description |
-| --------- | ---- | -------------- | -------- | -------------------- |
+|-----------|------|----------------|----------|----------------------|
| `user` | path | string | true | User ID, name, or me |
| `keyname` | path | string(string) | true | Key Name |
@@ -817,23 +827,23 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/keys/tokens/{keyname} \
```json
{
- "created_at": "2019-08-24T14:15:22Z",
- "expires_at": "2019-08-24T14:15:22Z",
- "id": "string",
- "last_used": "2019-08-24T14:15:22Z",
- "lifetime_seconds": 0,
- "login_type": "password",
- "scope": "all",
- "token_name": "string",
- "updated_at": "2019-08-24T14:15:22Z",
- "user_id": "a169451c-8525-4352-b8ca-070dd449a1a5"
+ "created_at": "2019-08-24T14:15:22Z",
+ "expires_at": "2019-08-24T14:15:22Z",
+ "id": "string",
+ "last_used": "2019-08-24T14:15:22Z",
+ "lifetime_seconds": 0,
+ "login_type": "password",
+ "scope": "all",
+ "token_name": "string",
+ "updated_at": "2019-08-24T14:15:22Z",
+ "user_id": "a169451c-8525-4352-b8ca-070dd449a1a5"
}
```
### Responses
| Status | Meaning | Description | Schema |
-| ------ | ------------------------------------------------------- | ----------- | -------------------------------------------- |
+|--------|---------------------------------------------------------|-------------|----------------------------------------------|
| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.APIKey](schemas.md#codersdkapikey) |
To perform this operation, you must be authenticated. [Learn more](authentication.md).
@@ -854,7 +864,7 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/keys/{keyid} \
### Parameters
| Name | In | Type | Required | Description |
-| ------- | ---- | ------------ | -------- | -------------------- |
+|---------|------|--------------|----------|----------------------|
| `user` | path | string | true | User ID, name, or me |
| `keyid` | path | string(uuid) | true | Key ID |
@@ -864,23 +874,23 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/keys/{keyid} \
```json
{
- "created_at": "2019-08-24T14:15:22Z",
- "expires_at": "2019-08-24T14:15:22Z",
- "id": "string",
- "last_used": "2019-08-24T14:15:22Z",
- "lifetime_seconds": 0,
- "login_type": "password",
- "scope": "all",
- "token_name": "string",
- "updated_at": "2019-08-24T14:15:22Z",
- "user_id": "a169451c-8525-4352-b8ca-070dd449a1a5"
+ "created_at": "2019-08-24T14:15:22Z",
+ "expires_at": "2019-08-24T14:15:22Z",
+ "id": "string",
+ "last_used": "2019-08-24T14:15:22Z",
+ "lifetime_seconds": 0,
+ "login_type": "password",
+ "scope": "all",
+ "token_name": "string",
+ "updated_at": "2019-08-24T14:15:22Z",
+ "user_id": "a169451c-8525-4352-b8ca-070dd449a1a5"
}
```
### Responses
| Status | Meaning | Description | Schema |
-| ------ | ------------------------------------------------------- | ----------- | -------------------------------------------- |
+|--------|---------------------------------------------------------|-------------|----------------------------------------------|
| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.APIKey](schemas.md#codersdkapikey) |
To perform this operation, you must be authenticated. [Learn more](authentication.md).
@@ -900,14 +910,14 @@ curl -X DELETE http://coder-server:8080/api/v2/users/{user}/keys/{keyid} \
### Parameters
| Name | In | Type | Required | Description |
-| ------- | ---- | ------------ | -------- | -------------------- |
+|---------|------|--------------|----------|----------------------|
| `user` | path | string | true | User ID, name, or me |
| `keyid` | path | string(uuid) | true | Key ID |
### Responses
| Status | Meaning | Description | Schema |
-| ------ | --------------------------------------------------------------- | ----------- | ------ |
+|--------|-----------------------------------------------------------------|-------------|--------|
| 204 | [No Content](https://tools.ietf.org/html/rfc7231#section-6.3.5) | No Content | |
To perform this operation, you must be authenticated. [Learn more](authentication.md).
@@ -928,7 +938,7 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/login-type \
### Parameters
| Name | In | Type | Required | Description |
-| ------ | ---- | ------ | -------- | -------------------- |
+|--------|------|--------|----------|----------------------|
| `user` | path | string | true | User ID, name, or me |
### Example responses
@@ -937,14 +947,14 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/login-type \
```json
{
- "login_type": ""
+ "login_type": ""
}
```
### Responses
| Status | Meaning | Description | Schema |
-| ------ | ------------------------------------------------------- | ----------- | ---------------------------------------------------------- |
+|--------|---------------------------------------------------------|-------------|------------------------------------------------------------|
| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.UserLoginType](schemas.md#codersdkuserlogintype) |
To perform this operation, you must be authenticated. [Learn more](authentication.md).
@@ -965,7 +975,7 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/organizations \
### Parameters
| Name | In | Type | Required | Description |
-| ------ | ---- | ------ | -------- | -------------------- |
+|--------|------|--------|----------|----------------------|
| `user` | path | string | true | User ID, name, or me |
### Example responses
@@ -974,23 +984,23 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/organizations \
```json
[
- {
- "created_at": "2019-08-24T14:15:22Z",
- "description": "string",
- "display_name": "string",
- "icon": "string",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "is_default": true,
- "name": "string",
- "updated_at": "2019-08-24T14:15:22Z"
- }
+ {
+ "created_at": "2019-08-24T14:15:22Z",
+ "description": "string",
+ "display_name": "string",
+ "icon": "string",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "is_default": true,
+ "name": "string",
+ "updated_at": "2019-08-24T14:15:22Z"
+ }
]
```
### Responses
| Status | Meaning | Description | Schema |
-| ------ | ------------------------------------------------------- | ----------- | ----------------------------------------------------------------- |
+|--------|---------------------------------------------------------|-------------|-------------------------------------------------------------------|
| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | array of [codersdk.Organization](schemas.md#codersdkorganization) |
Response Schema
@@ -998,7 +1008,7 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/organizations \
Status Code **200**
| Name | Type | Required | Restrictions | Description |
-| ---------------- | ----------------- | -------- | ------------ | ----------- |
+|------------------|-------------------|----------|--------------|-------------|
| `[array item]` | array | false | | |
| `» created_at` | string(date-time) | true | | |
| `» description` | string | false | | |
@@ -1027,7 +1037,7 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/organizations/{organiza
### Parameters
| Name | In | Type | Required | Description |
-| ------------------ | ---- | ------ | -------- | -------------------- |
+|--------------------|------|--------|----------|----------------------|
| `user` | path | string | true | User ID, name, or me |
| `organizationname` | path | string | true | Organization name |
@@ -1037,21 +1047,21 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/organizations/{organiza
```json
{
- "created_at": "2019-08-24T14:15:22Z",
- "description": "string",
- "display_name": "string",
- "icon": "string",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "is_default": true,
- "name": "string",
- "updated_at": "2019-08-24T14:15:22Z"
+ "created_at": "2019-08-24T14:15:22Z",
+ "description": "string",
+ "display_name": "string",
+ "icon": "string",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "is_default": true,
+ "name": "string",
+ "updated_at": "2019-08-24T14:15:22Z"
}
```
### Responses
| Status | Meaning | Description | Schema |
-| ------ | ------------------------------------------------------- | ----------- | -------------------------------------------------------- |
+|--------|---------------------------------------------------------|-------------|----------------------------------------------------------|
| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.Organization](schemas.md#codersdkorganization) |
To perform this operation, you must be authenticated. [Learn more](authentication.md).
@@ -1073,22 +1083,22 @@ curl -X PUT http://coder-server:8080/api/v2/users/{user}/password \
```json
{
- "old_password": "string",
- "password": "string"
+ "old_password": "string",
+ "password": "string"
}
```
### Parameters
| Name | In | Type | Required | Description |
-| ------ | ---- | ---------------------------------------------------------------------------------- | -------- | ----------------------- |
+|--------|------|------------------------------------------------------------------------------------|----------|-------------------------|
| `user` | path | string | true | User ID, name, or me |
| `body` | body | [codersdk.UpdateUserPasswordRequest](schemas.md#codersdkupdateuserpasswordrequest) | true | Update password request |
### Responses
| Status | Meaning | Description | Schema |
-| ------ | --------------------------------------------------------------- | ----------- | ------ |
+|--------|-----------------------------------------------------------------|-------------|--------|
| 204 | [No Content](https://tools.ietf.org/html/rfc7231#section-6.3.5) | No Content | |
To perform this operation, you must be authenticated. [Learn more](authentication.md).
@@ -1111,15 +1121,15 @@ curl -X PUT http://coder-server:8080/api/v2/users/{user}/profile \
```json
{
- "name": "string",
- "username": "string"
+ "name": "string",
+ "username": "string"
}
```
### Parameters
| Name | In | Type | Required | Description |
-| ------ | ---- | -------------------------------------------------------------------------------- | -------- | -------------------- |
+|--------|------|----------------------------------------------------------------------------------|----------|----------------------|
| `user` | path | string | true | User ID, name, or me |
| `body` | body | [codersdk.UpdateUserProfileRequest](schemas.md#codersdkupdateuserprofilerequest) | true | Updated profile |
@@ -1129,32 +1139,34 @@ curl -X PUT http://coder-server:8080/api/v2/users/{user}/profile \
```json
{
- "avatar_url": "http://example.com",
- "created_at": "2019-08-24T14:15:22Z",
- "email": "user@example.com",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "last_seen_at": "2019-08-24T14:15:22Z",
- "login_type": "",
- "name": "string",
- "organization_ids": ["497f6eca-6276-4993-bfeb-53cbbbba6f08"],
- "roles": [
- {
- "display_name": "string",
- "name": "string",
- "organization_id": "string"
- }
- ],
- "status": "active",
- "theme_preference": "string",
- "updated_at": "2019-08-24T14:15:22Z",
- "username": "string"
+ "avatar_url": "http://example.com",
+ "created_at": "2019-08-24T14:15:22Z",
+ "email": "user@example.com",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "last_seen_at": "2019-08-24T14:15:22Z",
+ "login_type": "",
+ "name": "string",
+ "organization_ids": [
+ "497f6eca-6276-4993-bfeb-53cbbbba6f08"
+ ],
+ "roles": [
+ {
+ "display_name": "string",
+ "name": "string",
+ "organization_id": "string"
+ }
+ ],
+ "status": "active",
+ "theme_preference": "string",
+ "updated_at": "2019-08-24T14:15:22Z",
+ "username": "string"
}
```
### Responses
| Status | Meaning | Description | Schema |
-| ------ | ------------------------------------------------------- | ----------- | ---------------------------------------- |
+|--------|---------------------------------------------------------|-------------|------------------------------------------|
| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.User](schemas.md#codersdkuser) |
To perform this operation, you must be authenticated. [Learn more](authentication.md).
@@ -1175,7 +1187,7 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/roles \
### Parameters
| Name | In | Type | Required | Description |
-| ------ | ---- | ------ | -------- | -------------------- |
+|--------|------|--------|----------|----------------------|
| `user` | path | string | true | User ID, name, or me |
### Example responses
@@ -1184,32 +1196,34 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/roles \
```json
{
- "avatar_url": "http://example.com",
- "created_at": "2019-08-24T14:15:22Z",
- "email": "user@example.com",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "last_seen_at": "2019-08-24T14:15:22Z",
- "login_type": "",
- "name": "string",
- "organization_ids": ["497f6eca-6276-4993-bfeb-53cbbbba6f08"],
- "roles": [
- {
- "display_name": "string",
- "name": "string",
- "organization_id": "string"
- }
- ],
- "status": "active",
- "theme_preference": "string",
- "updated_at": "2019-08-24T14:15:22Z",
- "username": "string"
+ "avatar_url": "http://example.com",
+ "created_at": "2019-08-24T14:15:22Z",
+ "email": "user@example.com",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "last_seen_at": "2019-08-24T14:15:22Z",
+ "login_type": "",
+ "name": "string",
+ "organization_ids": [
+ "497f6eca-6276-4993-bfeb-53cbbbba6f08"
+ ],
+ "roles": [
+ {
+ "display_name": "string",
+ "name": "string",
+ "organization_id": "string"
+ }
+ ],
+ "status": "active",
+ "theme_preference": "string",
+ "updated_at": "2019-08-24T14:15:22Z",
+ "username": "string"
}
```
### Responses
| Status | Meaning | Description | Schema |
-| ------ | ------------------------------------------------------- | ----------- | ---------------------------------------- |
+|--------|---------------------------------------------------------|-------------|------------------------------------------|
| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.User](schemas.md#codersdkuser) |
To perform this operation, you must be authenticated. [Learn more](authentication.md).
@@ -1232,14 +1246,16 @@ curl -X PUT http://coder-server:8080/api/v2/users/{user}/roles \
```json
{
- "roles": ["string"]
+ "roles": [
+ "string"
+ ]
}
```
### Parameters
| Name | In | Type | Required | Description |
-| ------ | ---- | ------------------------------------------------------ | -------- | -------------------- |
+|--------|------|--------------------------------------------------------|----------|----------------------|
| `user` | path | string | true | User ID, name, or me |
| `body` | body | [codersdk.UpdateRoles](schemas.md#codersdkupdateroles) | true | Update roles request |
@@ -1249,32 +1265,34 @@ curl -X PUT http://coder-server:8080/api/v2/users/{user}/roles \
```json
{
- "avatar_url": "http://example.com",
- "created_at": "2019-08-24T14:15:22Z",
- "email": "user@example.com",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "last_seen_at": "2019-08-24T14:15:22Z",
- "login_type": "",
- "name": "string",
- "organization_ids": ["497f6eca-6276-4993-bfeb-53cbbbba6f08"],
- "roles": [
- {
- "display_name": "string",
- "name": "string",
- "organization_id": "string"
- }
- ],
- "status": "active",
- "theme_preference": "string",
- "updated_at": "2019-08-24T14:15:22Z",
- "username": "string"
+ "avatar_url": "http://example.com",
+ "created_at": "2019-08-24T14:15:22Z",
+ "email": "user@example.com",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "last_seen_at": "2019-08-24T14:15:22Z",
+ "login_type": "",
+ "name": "string",
+ "organization_ids": [
+ "497f6eca-6276-4993-bfeb-53cbbbba6f08"
+ ],
+ "roles": [
+ {
+ "display_name": "string",
+ "name": "string",
+ "organization_id": "string"
+ }
+ ],
+ "status": "active",
+ "theme_preference": "string",
+ "updated_at": "2019-08-24T14:15:22Z",
+ "username": "string"
}
```
### Responses
| Status | Meaning | Description | Schema |
-| ------ | ------------------------------------------------------- | ----------- | ---------------------------------------- |
+|--------|---------------------------------------------------------|-------------|------------------------------------------|
| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.User](schemas.md#codersdkuser) |
To perform this operation, you must be authenticated. [Learn more](authentication.md).
@@ -1295,7 +1313,7 @@ curl -X PUT http://coder-server:8080/api/v2/users/{user}/status/activate \
### Parameters
| Name | In | Type | Required | Description |
-| ------ | ---- | ------ | -------- | -------------------- |
+|--------|------|--------|----------|----------------------|
| `user` | path | string | true | User ID, name, or me |
### Example responses
@@ -1304,32 +1322,34 @@ curl -X PUT http://coder-server:8080/api/v2/users/{user}/status/activate \
```json
{
- "avatar_url": "http://example.com",
- "created_at": "2019-08-24T14:15:22Z",
- "email": "user@example.com",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "last_seen_at": "2019-08-24T14:15:22Z",
- "login_type": "",
- "name": "string",
- "organization_ids": ["497f6eca-6276-4993-bfeb-53cbbbba6f08"],
- "roles": [
- {
- "display_name": "string",
- "name": "string",
- "organization_id": "string"
- }
- ],
- "status": "active",
- "theme_preference": "string",
- "updated_at": "2019-08-24T14:15:22Z",
- "username": "string"
+ "avatar_url": "http://example.com",
+ "created_at": "2019-08-24T14:15:22Z",
+ "email": "user@example.com",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "last_seen_at": "2019-08-24T14:15:22Z",
+ "login_type": "",
+ "name": "string",
+ "organization_ids": [
+ "497f6eca-6276-4993-bfeb-53cbbbba6f08"
+ ],
+ "roles": [
+ {
+ "display_name": "string",
+ "name": "string",
+ "organization_id": "string"
+ }
+ ],
+ "status": "active",
+ "theme_preference": "string",
+ "updated_at": "2019-08-24T14:15:22Z",
+ "username": "string"
}
```
### Responses
| Status | Meaning | Description | Schema |
-| ------ | ------------------------------------------------------- | ----------- | ---------------------------------------- |
+|--------|---------------------------------------------------------|-------------|------------------------------------------|
| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.User](schemas.md#codersdkuser) |
To perform this operation, you must be authenticated. [Learn more](authentication.md).
@@ -1350,7 +1370,7 @@ curl -X PUT http://coder-server:8080/api/v2/users/{user}/status/suspend \
### Parameters
| Name | In | Type | Required | Description |
-| ------ | ---- | ------ | -------- | -------------------- |
+|--------|------|--------|----------|----------------------|
| `user` | path | string | true | User ID, name, or me |
### Example responses
@@ -1359,32 +1379,34 @@ curl -X PUT http://coder-server:8080/api/v2/users/{user}/status/suspend \
```json
{
- "avatar_url": "http://example.com",
- "created_at": "2019-08-24T14:15:22Z",
- "email": "user@example.com",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "last_seen_at": "2019-08-24T14:15:22Z",
- "login_type": "",
- "name": "string",
- "organization_ids": ["497f6eca-6276-4993-bfeb-53cbbbba6f08"],
- "roles": [
- {
- "display_name": "string",
- "name": "string",
- "organization_id": "string"
- }
- ],
- "status": "active",
- "theme_preference": "string",
- "updated_at": "2019-08-24T14:15:22Z",
- "username": "string"
+ "avatar_url": "http://example.com",
+ "created_at": "2019-08-24T14:15:22Z",
+ "email": "user@example.com",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "last_seen_at": "2019-08-24T14:15:22Z",
+ "login_type": "",
+ "name": "string",
+ "organization_ids": [
+ "497f6eca-6276-4993-bfeb-53cbbbba6f08"
+ ],
+ "roles": [
+ {
+ "display_name": "string",
+ "name": "string",
+ "organization_id": "string"
+ }
+ ],
+ "status": "active",
+ "theme_preference": "string",
+ "updated_at": "2019-08-24T14:15:22Z",
+ "username": "string"
}
```
### Responses
| Status | Meaning | Description | Schema |
-| ------ | ------------------------------------------------------- | ----------- | ---------------------------------------- |
+|--------|---------------------------------------------------------|-------------|------------------------------------------|
| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.User](schemas.md#codersdkuser) |
To perform this operation, you must be authenticated. [Learn more](authentication.md).
diff --git a/docs/reference/api/workspaceproxies.md b/docs/reference/api/workspaceproxies.md
index 35e9e6d84ed0b..72527b7e305e4 100644
--- a/docs/reference/api/workspaceproxies.md
+++ b/docs/reference/api/workspaceproxies.md
@@ -19,24 +19,24 @@ curl -X GET http://coder-server:8080/api/v2/regions \
```json
{
- "regions": [
- {
- "display_name": "string",
- "healthy": true,
- "icon_url": "string",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "name": "string",
- "path_app_url": "string",
- "wildcard_hostname": "string"
- }
- ]
+ "regions": [
+ {
+ "display_name": "string",
+ "healthy": true,
+ "icon_url": "string",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "name": "string",
+ "path_app_url": "string",
+ "wildcard_hostname": "string"
+ }
+ ]
}
```
### Responses
| Status | Meaning | Description | Schema |
-| ------ | ------------------------------------------------------- | ----------- | ---------------------------------------------------------------------------------------------- |
+|--------|---------------------------------------------------------|-------------|------------------------------------------------------------------------------------------------|
| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.RegionsResponse-codersdk_Region](schemas.md#codersdkregionsresponse-codersdk_region) |
To perform this operation, you must be authenticated. [Learn more](authentication.md).
diff --git a/docs/reference/api/workspaces.md b/docs/reference/api/workspaces.md
index b855615336c45..2413bb294a5f6 100644
--- a/docs/reference/api/workspaces.md
+++ b/docs/reference/api/workspaces.md
@@ -23,25 +23,25 @@ of the template will be used.
```json
{
- "automatic_updates": "always",
- "autostart_schedule": "string",
- "name": "string",
- "rich_parameter_values": [
- {
- "name": "string",
- "value": "string"
- }
- ],
- "template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
- "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1",
- "ttl_ms": 0
+ "automatic_updates": "always",
+ "autostart_schedule": "string",
+ "name": "string",
+ "rich_parameter_values": [
+ {
+ "name": "string",
+ "value": "string"
+ }
+ ],
+ "template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
+ "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1",
+ "ttl_ms": 0
}
```
### Parameters
| Name | In | Type | Required | Description |
-| -------------- | ---- | ---------------------------------------------------------------------------- | -------- | ------------------------ |
+|----------------|------|------------------------------------------------------------------------------|----------|--------------------------|
| `organization` | path | string(uuid) | true | Organization ID |
| `user` | path | string | true | Username, UUID, or me |
| `body` | body | [codersdk.CreateWorkspaceRequest](schemas.md#codersdkcreateworkspacerequest) | true | Create workspace request |
@@ -52,199 +52,206 @@ of the template will be used.
```json
{
- "allow_renames": true,
- "automatic_updates": "always",
- "autostart_schedule": "string",
- "created_at": "2019-08-24T14:15:22Z",
- "deleting_at": "2019-08-24T14:15:22Z",
- "dormant_at": "2019-08-24T14:15:22Z",
- "favorite": true,
- "health": {
- "failing_agents": ["497f6eca-6276-4993-bfeb-53cbbbba6f08"],
- "healthy": false
- },
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "last_used_at": "2019-08-24T14:15:22Z",
- "latest_build": {
- "build_number": 0,
- "created_at": "2019-08-24T14:15:22Z",
- "daily_cost": 0,
- "deadline": "2019-08-24T14:15:22Z",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3",
- "initiator_name": "string",
- "job": {
- "canceled_at": "2019-08-24T14:15:22Z",
- "completed_at": "2019-08-24T14:15:22Z",
- "created_at": "2019-08-24T14:15:22Z",
- "error": "string",
- "error_code": "REQUIRED_TEMPLATE_VARIABLES",
- "file_id": "8a0cfb4f-ddc9-436d-91bb-75133c583767",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "queue_position": 0,
- "queue_size": 0,
- "started_at": "2019-08-24T14:15:22Z",
- "status": "pending",
- "tags": {
- "property1": "string",
- "property2": "string"
- },
- "worker_id": "ae5fa6f7-c55b-40c1-b40a-b36ac467652b"
- },
- "matched_provisioners": {
- "available": 0,
- "count": 0,
- "most_recently_seen": "2019-08-24T14:15:22Z"
- },
- "max_deadline": "2019-08-24T14:15:22Z",
- "reason": "initiator",
- "resources": [
- {
- "agents": [
- {
- "api_version": "string",
- "apps": [
- {
- "command": "string",
- "display_name": "string",
- "external": true,
- "health": "disabled",
- "healthcheck": {
- "interval": 0,
- "threshold": 0,
- "url": "string"
- },
- "hidden": true,
- "icon": "string",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "sharing_level": "owner",
- "slug": "string",
- "subdomain": true,
- "subdomain_name": "string",
- "url": "string"
- }
- ],
- "architecture": "string",
- "connection_timeout_seconds": 0,
- "created_at": "2019-08-24T14:15:22Z",
- "directory": "string",
- "disconnected_at": "2019-08-24T14:15:22Z",
- "display_apps": ["vscode"],
- "environment_variables": {
- "property1": "string",
- "property2": "string"
- },
- "expanded_directory": "string",
- "first_connected_at": "2019-08-24T14:15:22Z",
- "health": {
- "healthy": false,
- "reason": "agent has lost connection"
- },
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "instance_id": "string",
- "last_connected_at": "2019-08-24T14:15:22Z",
- "latency": {
- "property1": {
- "latency_ms": 0,
- "preferred": true
- },
- "property2": {
- "latency_ms": 0,
- "preferred": true
- }
- },
- "lifecycle_state": "created",
- "log_sources": [
- {
- "created_at": "2019-08-24T14:15:22Z",
- "display_name": "string",
- "icon": "string",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "workspace_agent_id": "7ad2e618-fea7-4c1a-b70a-f501566a72f1"
- }
- ],
- "logs_length": 0,
- "logs_overflowed": true,
- "name": "string",
- "operating_system": "string",
- "ready_at": "2019-08-24T14:15:22Z",
- "resource_id": "4d5215ed-38bb-48ed-879a-fdb9ca58522f",
- "scripts": [
- {
- "cron": "string",
- "display_name": "string",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "log_path": "string",
- "log_source_id": "4197ab25-95cf-4b91-9c78-f7f2af5d353a",
- "run_on_start": true,
- "run_on_stop": true,
- "script": "string",
- "start_blocks_login": true,
- "timeout": 0
- }
- ],
- "started_at": "2019-08-24T14:15:22Z",
- "startup_script_behavior": "blocking",
- "status": "connecting",
- "subsystems": ["envbox"],
- "troubleshooting_url": "string",
- "updated_at": "2019-08-24T14:15:22Z",
- "version": "string"
- }
- ],
- "created_at": "2019-08-24T14:15:22Z",
- "daily_cost": 0,
- "hide": true,
- "icon": "string",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "job_id": "453bd7d7-5355-4d6d-a38e-d9e7eb218c3f",
- "metadata": [
- {
- "key": "string",
- "sensitive": true,
- "value": "string"
- }
- ],
- "name": "string",
- "type": "string",
- "workspace_transition": "start"
- }
- ],
- "status": "pending",
- "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1",
- "template_version_name": "string",
- "transition": "start",
- "updated_at": "2019-08-24T14:15:22Z",
- "workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
- "workspace_name": "string",
- "workspace_owner_avatar_url": "string",
- "workspace_owner_id": "e7078695-5279-4c86-8774-3ac2367a2fc7",
- "workspace_owner_name": "string"
- },
- "name": "string",
- "next_start_at": "2019-08-24T14:15:22Z",
- "organization_id": "7c60d51f-b44e-4682-87d6-449835ea4de6",
- "organization_name": "string",
- "outdated": true,
- "owner_avatar_url": "string",
- "owner_id": "8826ee2e-7933-4665-aef2-2393f84a0d05",
- "owner_name": "string",
- "template_active_version_id": "b0da9c29-67d8-4c87-888c-bafe356f7f3c",
- "template_allow_user_cancel_workspace_jobs": true,
- "template_display_name": "string",
- "template_icon": "string",
- "template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
- "template_name": "string",
- "template_require_active_version": true,
- "ttl_ms": 0,
- "updated_at": "2019-08-24T14:15:22Z"
+ "allow_renames": true,
+ "automatic_updates": "always",
+ "autostart_schedule": "string",
+ "created_at": "2019-08-24T14:15:22Z",
+ "deleting_at": "2019-08-24T14:15:22Z",
+ "dormant_at": "2019-08-24T14:15:22Z",
+ "favorite": true,
+ "health": {
+ "failing_agents": [
+ "497f6eca-6276-4993-bfeb-53cbbbba6f08"
+ ],
+ "healthy": false
+ },
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "last_used_at": "2019-08-24T14:15:22Z",
+ "latest_build": {
+ "build_number": 0,
+ "created_at": "2019-08-24T14:15:22Z",
+ "daily_cost": 0,
+ "deadline": "2019-08-24T14:15:22Z",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3",
+ "initiator_name": "string",
+ "job": {
+ "canceled_at": "2019-08-24T14:15:22Z",
+ "completed_at": "2019-08-24T14:15:22Z",
+ "created_at": "2019-08-24T14:15:22Z",
+ "error": "string",
+ "error_code": "REQUIRED_TEMPLATE_VARIABLES",
+ "file_id": "8a0cfb4f-ddc9-436d-91bb-75133c583767",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "queue_position": 0,
+ "queue_size": 0,
+ "started_at": "2019-08-24T14:15:22Z",
+ "status": "pending",
+ "tags": {
+ "property1": "string",
+ "property2": "string"
+ },
+ "worker_id": "ae5fa6f7-c55b-40c1-b40a-b36ac467652b"
+ },
+ "matched_provisioners": {
+ "available": 0,
+ "count": 0,
+ "most_recently_seen": "2019-08-24T14:15:22Z"
+ },
+ "max_deadline": "2019-08-24T14:15:22Z",
+ "reason": "initiator",
+ "resources": [
+ {
+ "agents": [
+ {
+ "api_version": "string",
+ "apps": [
+ {
+ "command": "string",
+ "display_name": "string",
+ "external": true,
+ "health": "disabled",
+ "healthcheck": {
+ "interval": 0,
+ "threshold": 0,
+ "url": "string"
+ },
+ "hidden": true,
+ "icon": "string",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "open_in": "slim-window",
+ "sharing_level": "owner",
+ "slug": "string",
+ "subdomain": true,
+ "subdomain_name": "string",
+ "url": "string"
+ }
+ ],
+ "architecture": "string",
+ "connection_timeout_seconds": 0,
+ "created_at": "2019-08-24T14:15:22Z",
+ "directory": "string",
+ "disconnected_at": "2019-08-24T14:15:22Z",
+ "display_apps": [
+ "vscode"
+ ],
+ "environment_variables": {
+ "property1": "string",
+ "property2": "string"
+ },
+ "expanded_directory": "string",
+ "first_connected_at": "2019-08-24T14:15:22Z",
+ "health": {
+ "healthy": false,
+ "reason": "agent has lost connection"
+ },
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "instance_id": "string",
+ "last_connected_at": "2019-08-24T14:15:22Z",
+ "latency": {
+ "property1": {
+ "latency_ms": 0,
+ "preferred": true
+ },
+ "property2": {
+ "latency_ms": 0,
+ "preferred": true
+ }
+ },
+ "lifecycle_state": "created",
+ "log_sources": [
+ {
+ "created_at": "2019-08-24T14:15:22Z",
+ "display_name": "string",
+ "icon": "string",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "workspace_agent_id": "7ad2e618-fea7-4c1a-b70a-f501566a72f1"
+ }
+ ],
+ "logs_length": 0,
+ "logs_overflowed": true,
+ "name": "string",
+ "operating_system": "string",
+ "ready_at": "2019-08-24T14:15:22Z",
+ "resource_id": "4d5215ed-38bb-48ed-879a-fdb9ca58522f",
+ "scripts": [
+ {
+ "cron": "string",
+ "display_name": "string",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "log_path": "string",
+ "log_source_id": "4197ab25-95cf-4b91-9c78-f7f2af5d353a",
+ "run_on_start": true,
+ "run_on_stop": true,
+ "script": "string",
+ "start_blocks_login": true,
+ "timeout": 0
+ }
+ ],
+ "started_at": "2019-08-24T14:15:22Z",
+ "startup_script_behavior": "blocking",
+ "status": "connecting",
+ "subsystems": [
+ "envbox"
+ ],
+ "troubleshooting_url": "string",
+ "updated_at": "2019-08-24T14:15:22Z",
+ "version": "string"
+ }
+ ],
+ "created_at": "2019-08-24T14:15:22Z",
+ "daily_cost": 0,
+ "hide": true,
+ "icon": "string",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "job_id": "453bd7d7-5355-4d6d-a38e-d9e7eb218c3f",
+ "metadata": [
+ {
+ "key": "string",
+ "sensitive": true,
+ "value": "string"
+ }
+ ],
+ "name": "string",
+ "type": "string",
+ "workspace_transition": "start"
+ }
+ ],
+ "status": "pending",
+ "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1",
+ "template_version_name": "string",
+ "transition": "start",
+ "updated_at": "2019-08-24T14:15:22Z",
+ "workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
+ "workspace_name": "string",
+ "workspace_owner_avatar_url": "string",
+ "workspace_owner_id": "e7078695-5279-4c86-8774-3ac2367a2fc7",
+ "workspace_owner_name": "string"
+ },
+ "name": "string",
+ "next_start_at": "2019-08-24T14:15:22Z",
+ "organization_id": "7c60d51f-b44e-4682-87d6-449835ea4de6",
+ "organization_name": "string",
+ "outdated": true,
+ "owner_avatar_url": "string",
+ "owner_id": "8826ee2e-7933-4665-aef2-2393f84a0d05",
+ "owner_name": "string",
+ "template_active_version_id": "b0da9c29-67d8-4c87-888c-bafe356f7f3c",
+ "template_allow_user_cancel_workspace_jobs": true,
+ "template_display_name": "string",
+ "template_icon": "string",
+ "template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
+ "template_name": "string",
+ "template_require_active_version": true,
+ "ttl_ms": 0,
+ "updated_at": "2019-08-24T14:15:22Z"
}
```
### Responses
| Status | Meaning | Description | Schema |
-| ------ | ------------------------------------------------------- | ----------- | -------------------------------------------------- |
+|--------|---------------------------------------------------------|-------------|----------------------------------------------------|
| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.Workspace](schemas.md#codersdkworkspace) |
To perform this operation, you must be authenticated. [Learn more](authentication.md).
@@ -265,7 +272,7 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/workspace/{workspacenam
### Parameters
| Name | In | Type | Required | Description |
-| ----------------- | ----- | ------- | -------- | ----------------------------------------------------------- |
+|-------------------|-------|---------|----------|-------------------------------------------------------------|
| `user` | path | string | true | User ID, name, or me |
| `workspacename` | path | string | true | Workspace name |
| `include_deleted` | query | boolean | false | Return data instead of HTTP 404 if the workspace is deleted |
@@ -276,199 +283,206 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/workspace/{workspacenam
```json
{
- "allow_renames": true,
- "automatic_updates": "always",
- "autostart_schedule": "string",
- "created_at": "2019-08-24T14:15:22Z",
- "deleting_at": "2019-08-24T14:15:22Z",
- "dormant_at": "2019-08-24T14:15:22Z",
- "favorite": true,
- "health": {
- "failing_agents": ["497f6eca-6276-4993-bfeb-53cbbbba6f08"],
- "healthy": false
- },
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "last_used_at": "2019-08-24T14:15:22Z",
- "latest_build": {
- "build_number": 0,
- "created_at": "2019-08-24T14:15:22Z",
- "daily_cost": 0,
- "deadline": "2019-08-24T14:15:22Z",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3",
- "initiator_name": "string",
- "job": {
- "canceled_at": "2019-08-24T14:15:22Z",
- "completed_at": "2019-08-24T14:15:22Z",
- "created_at": "2019-08-24T14:15:22Z",
- "error": "string",
- "error_code": "REQUIRED_TEMPLATE_VARIABLES",
- "file_id": "8a0cfb4f-ddc9-436d-91bb-75133c583767",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "queue_position": 0,
- "queue_size": 0,
- "started_at": "2019-08-24T14:15:22Z",
- "status": "pending",
- "tags": {
- "property1": "string",
- "property2": "string"
- },
- "worker_id": "ae5fa6f7-c55b-40c1-b40a-b36ac467652b"
- },
- "matched_provisioners": {
- "available": 0,
- "count": 0,
- "most_recently_seen": "2019-08-24T14:15:22Z"
- },
- "max_deadline": "2019-08-24T14:15:22Z",
- "reason": "initiator",
- "resources": [
- {
- "agents": [
- {
- "api_version": "string",
- "apps": [
- {
- "command": "string",
- "display_name": "string",
- "external": true,
- "health": "disabled",
- "healthcheck": {
- "interval": 0,
- "threshold": 0,
- "url": "string"
- },
- "hidden": true,
- "icon": "string",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "sharing_level": "owner",
- "slug": "string",
- "subdomain": true,
- "subdomain_name": "string",
- "url": "string"
- }
- ],
- "architecture": "string",
- "connection_timeout_seconds": 0,
- "created_at": "2019-08-24T14:15:22Z",
- "directory": "string",
- "disconnected_at": "2019-08-24T14:15:22Z",
- "display_apps": ["vscode"],
- "environment_variables": {
- "property1": "string",
- "property2": "string"
- },
- "expanded_directory": "string",
- "first_connected_at": "2019-08-24T14:15:22Z",
- "health": {
- "healthy": false,
- "reason": "agent has lost connection"
- },
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "instance_id": "string",
- "last_connected_at": "2019-08-24T14:15:22Z",
- "latency": {
- "property1": {
- "latency_ms": 0,
- "preferred": true
- },
- "property2": {
- "latency_ms": 0,
- "preferred": true
- }
- },
- "lifecycle_state": "created",
- "log_sources": [
- {
- "created_at": "2019-08-24T14:15:22Z",
- "display_name": "string",
- "icon": "string",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "workspace_agent_id": "7ad2e618-fea7-4c1a-b70a-f501566a72f1"
- }
- ],
- "logs_length": 0,
- "logs_overflowed": true,
- "name": "string",
- "operating_system": "string",
- "ready_at": "2019-08-24T14:15:22Z",
- "resource_id": "4d5215ed-38bb-48ed-879a-fdb9ca58522f",
- "scripts": [
- {
- "cron": "string",
- "display_name": "string",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "log_path": "string",
- "log_source_id": "4197ab25-95cf-4b91-9c78-f7f2af5d353a",
- "run_on_start": true,
- "run_on_stop": true,
- "script": "string",
- "start_blocks_login": true,
- "timeout": 0
- }
- ],
- "started_at": "2019-08-24T14:15:22Z",
- "startup_script_behavior": "blocking",
- "status": "connecting",
- "subsystems": ["envbox"],
- "troubleshooting_url": "string",
- "updated_at": "2019-08-24T14:15:22Z",
- "version": "string"
- }
- ],
- "created_at": "2019-08-24T14:15:22Z",
- "daily_cost": 0,
- "hide": true,
- "icon": "string",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "job_id": "453bd7d7-5355-4d6d-a38e-d9e7eb218c3f",
- "metadata": [
- {
- "key": "string",
- "sensitive": true,
- "value": "string"
- }
- ],
- "name": "string",
- "type": "string",
- "workspace_transition": "start"
- }
- ],
- "status": "pending",
- "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1",
- "template_version_name": "string",
- "transition": "start",
- "updated_at": "2019-08-24T14:15:22Z",
- "workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
- "workspace_name": "string",
- "workspace_owner_avatar_url": "string",
- "workspace_owner_id": "e7078695-5279-4c86-8774-3ac2367a2fc7",
- "workspace_owner_name": "string"
- },
- "name": "string",
- "next_start_at": "2019-08-24T14:15:22Z",
- "organization_id": "7c60d51f-b44e-4682-87d6-449835ea4de6",
- "organization_name": "string",
- "outdated": true,
- "owner_avatar_url": "string",
- "owner_id": "8826ee2e-7933-4665-aef2-2393f84a0d05",
- "owner_name": "string",
- "template_active_version_id": "b0da9c29-67d8-4c87-888c-bafe356f7f3c",
- "template_allow_user_cancel_workspace_jobs": true,
- "template_display_name": "string",
- "template_icon": "string",
- "template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
- "template_name": "string",
- "template_require_active_version": true,
- "ttl_ms": 0,
- "updated_at": "2019-08-24T14:15:22Z"
+ "allow_renames": true,
+ "automatic_updates": "always",
+ "autostart_schedule": "string",
+ "created_at": "2019-08-24T14:15:22Z",
+ "deleting_at": "2019-08-24T14:15:22Z",
+ "dormant_at": "2019-08-24T14:15:22Z",
+ "favorite": true,
+ "health": {
+ "failing_agents": [
+ "497f6eca-6276-4993-bfeb-53cbbbba6f08"
+ ],
+ "healthy": false
+ },
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "last_used_at": "2019-08-24T14:15:22Z",
+ "latest_build": {
+ "build_number": 0,
+ "created_at": "2019-08-24T14:15:22Z",
+ "daily_cost": 0,
+ "deadline": "2019-08-24T14:15:22Z",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3",
+ "initiator_name": "string",
+ "job": {
+ "canceled_at": "2019-08-24T14:15:22Z",
+ "completed_at": "2019-08-24T14:15:22Z",
+ "created_at": "2019-08-24T14:15:22Z",
+ "error": "string",
+ "error_code": "REQUIRED_TEMPLATE_VARIABLES",
+ "file_id": "8a0cfb4f-ddc9-436d-91bb-75133c583767",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "queue_position": 0,
+ "queue_size": 0,
+ "started_at": "2019-08-24T14:15:22Z",
+ "status": "pending",
+ "tags": {
+ "property1": "string",
+ "property2": "string"
+ },
+ "worker_id": "ae5fa6f7-c55b-40c1-b40a-b36ac467652b"
+ },
+ "matched_provisioners": {
+ "available": 0,
+ "count": 0,
+ "most_recently_seen": "2019-08-24T14:15:22Z"
+ },
+ "max_deadline": "2019-08-24T14:15:22Z",
+ "reason": "initiator",
+ "resources": [
+ {
+ "agents": [
+ {
+ "api_version": "string",
+ "apps": [
+ {
+ "command": "string",
+ "display_name": "string",
+ "external": true,
+ "health": "disabled",
+ "healthcheck": {
+ "interval": 0,
+ "threshold": 0,
+ "url": "string"
+ },
+ "hidden": true,
+ "icon": "string",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "open_in": "slim-window",
+ "sharing_level": "owner",
+ "slug": "string",
+ "subdomain": true,
+ "subdomain_name": "string",
+ "url": "string"
+ }
+ ],
+ "architecture": "string",
+ "connection_timeout_seconds": 0,
+ "created_at": "2019-08-24T14:15:22Z",
+ "directory": "string",
+ "disconnected_at": "2019-08-24T14:15:22Z",
+ "display_apps": [
+ "vscode"
+ ],
+ "environment_variables": {
+ "property1": "string",
+ "property2": "string"
+ },
+ "expanded_directory": "string",
+ "first_connected_at": "2019-08-24T14:15:22Z",
+ "health": {
+ "healthy": false,
+ "reason": "agent has lost connection"
+ },
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "instance_id": "string",
+ "last_connected_at": "2019-08-24T14:15:22Z",
+ "latency": {
+ "property1": {
+ "latency_ms": 0,
+ "preferred": true
+ },
+ "property2": {
+ "latency_ms": 0,
+ "preferred": true
+ }
+ },
+ "lifecycle_state": "created",
+ "log_sources": [
+ {
+ "created_at": "2019-08-24T14:15:22Z",
+ "display_name": "string",
+ "icon": "string",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "workspace_agent_id": "7ad2e618-fea7-4c1a-b70a-f501566a72f1"
+ }
+ ],
+ "logs_length": 0,
+ "logs_overflowed": true,
+ "name": "string",
+ "operating_system": "string",
+ "ready_at": "2019-08-24T14:15:22Z",
+ "resource_id": "4d5215ed-38bb-48ed-879a-fdb9ca58522f",
+ "scripts": [
+ {
+ "cron": "string",
+ "display_name": "string",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "log_path": "string",
+ "log_source_id": "4197ab25-95cf-4b91-9c78-f7f2af5d353a",
+ "run_on_start": true,
+ "run_on_stop": true,
+ "script": "string",
+ "start_blocks_login": true,
+ "timeout": 0
+ }
+ ],
+ "started_at": "2019-08-24T14:15:22Z",
+ "startup_script_behavior": "blocking",
+ "status": "connecting",
+ "subsystems": [
+ "envbox"
+ ],
+ "troubleshooting_url": "string",
+ "updated_at": "2019-08-24T14:15:22Z",
+ "version": "string"
+ }
+ ],
+ "created_at": "2019-08-24T14:15:22Z",
+ "daily_cost": 0,
+ "hide": true,
+ "icon": "string",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "job_id": "453bd7d7-5355-4d6d-a38e-d9e7eb218c3f",
+ "metadata": [
+ {
+ "key": "string",
+ "sensitive": true,
+ "value": "string"
+ }
+ ],
+ "name": "string",
+ "type": "string",
+ "workspace_transition": "start"
+ }
+ ],
+ "status": "pending",
+ "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1",
+ "template_version_name": "string",
+ "transition": "start",
+ "updated_at": "2019-08-24T14:15:22Z",
+ "workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
+ "workspace_name": "string",
+ "workspace_owner_avatar_url": "string",
+ "workspace_owner_id": "e7078695-5279-4c86-8774-3ac2367a2fc7",
+ "workspace_owner_name": "string"
+ },
+ "name": "string",
+ "next_start_at": "2019-08-24T14:15:22Z",
+ "organization_id": "7c60d51f-b44e-4682-87d6-449835ea4de6",
+ "organization_name": "string",
+ "outdated": true,
+ "owner_avatar_url": "string",
+ "owner_id": "8826ee2e-7933-4665-aef2-2393f84a0d05",
+ "owner_name": "string",
+ "template_active_version_id": "b0da9c29-67d8-4c87-888c-bafe356f7f3c",
+ "template_allow_user_cancel_workspace_jobs": true,
+ "template_display_name": "string",
+ "template_icon": "string",
+ "template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
+ "template_name": "string",
+ "template_require_active_version": true,
+ "ttl_ms": 0,
+ "updated_at": "2019-08-24T14:15:22Z"
}
```
### Responses
| Status | Meaning | Description | Schema |
-| ------ | ------------------------------------------------------- | ----------- | -------------------------------------------------- |
+|--------|---------------------------------------------------------|-------------|----------------------------------------------------|
| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.Workspace](schemas.md#codersdkworkspace) |
To perform this operation, you must be authenticated. [Learn more](authentication.md).
@@ -496,25 +510,25 @@ of the template will be used.
```json
{
- "automatic_updates": "always",
- "autostart_schedule": "string",
- "name": "string",
- "rich_parameter_values": [
- {
- "name": "string",
- "value": "string"
- }
- ],
- "template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
- "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1",
- "ttl_ms": 0
+ "automatic_updates": "always",
+ "autostart_schedule": "string",
+ "name": "string",
+ "rich_parameter_values": [
+ {
+ "name": "string",
+ "value": "string"
+ }
+ ],
+ "template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
+ "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1",
+ "ttl_ms": 0
}
```
### Parameters
| Name | In | Type | Required | Description |
-| ------ | ---- | ---------------------------------------------------------------------------- | -------- | ------------------------ |
+|--------|------|------------------------------------------------------------------------------|----------|--------------------------|
| `user` | path | string | true | Username, UUID, or me |
| `body` | body | [codersdk.CreateWorkspaceRequest](schemas.md#codersdkcreateworkspacerequest) | true | Create workspace request |
@@ -524,199 +538,206 @@ of the template will be used.
```json
{
- "allow_renames": true,
- "automatic_updates": "always",
- "autostart_schedule": "string",
- "created_at": "2019-08-24T14:15:22Z",
- "deleting_at": "2019-08-24T14:15:22Z",
- "dormant_at": "2019-08-24T14:15:22Z",
- "favorite": true,
- "health": {
- "failing_agents": ["497f6eca-6276-4993-bfeb-53cbbbba6f08"],
- "healthy": false
- },
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "last_used_at": "2019-08-24T14:15:22Z",
- "latest_build": {
- "build_number": 0,
- "created_at": "2019-08-24T14:15:22Z",
- "daily_cost": 0,
- "deadline": "2019-08-24T14:15:22Z",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3",
- "initiator_name": "string",
- "job": {
- "canceled_at": "2019-08-24T14:15:22Z",
- "completed_at": "2019-08-24T14:15:22Z",
- "created_at": "2019-08-24T14:15:22Z",
- "error": "string",
- "error_code": "REQUIRED_TEMPLATE_VARIABLES",
- "file_id": "8a0cfb4f-ddc9-436d-91bb-75133c583767",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "queue_position": 0,
- "queue_size": 0,
- "started_at": "2019-08-24T14:15:22Z",
- "status": "pending",
- "tags": {
- "property1": "string",
- "property2": "string"
- },
- "worker_id": "ae5fa6f7-c55b-40c1-b40a-b36ac467652b"
- },
- "matched_provisioners": {
- "available": 0,
- "count": 0,
- "most_recently_seen": "2019-08-24T14:15:22Z"
- },
- "max_deadline": "2019-08-24T14:15:22Z",
- "reason": "initiator",
- "resources": [
- {
- "agents": [
- {
- "api_version": "string",
- "apps": [
- {
- "command": "string",
- "display_name": "string",
- "external": true,
- "health": "disabled",
- "healthcheck": {
- "interval": 0,
- "threshold": 0,
- "url": "string"
- },
- "hidden": true,
- "icon": "string",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "sharing_level": "owner",
- "slug": "string",
- "subdomain": true,
- "subdomain_name": "string",
- "url": "string"
- }
- ],
- "architecture": "string",
- "connection_timeout_seconds": 0,
- "created_at": "2019-08-24T14:15:22Z",
- "directory": "string",
- "disconnected_at": "2019-08-24T14:15:22Z",
- "display_apps": ["vscode"],
- "environment_variables": {
- "property1": "string",
- "property2": "string"
- },
- "expanded_directory": "string",
- "first_connected_at": "2019-08-24T14:15:22Z",
- "health": {
- "healthy": false,
- "reason": "agent has lost connection"
- },
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "instance_id": "string",
- "last_connected_at": "2019-08-24T14:15:22Z",
- "latency": {
- "property1": {
- "latency_ms": 0,
- "preferred": true
- },
- "property2": {
- "latency_ms": 0,
- "preferred": true
- }
- },
- "lifecycle_state": "created",
- "log_sources": [
- {
- "created_at": "2019-08-24T14:15:22Z",
- "display_name": "string",
- "icon": "string",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "workspace_agent_id": "7ad2e618-fea7-4c1a-b70a-f501566a72f1"
- }
- ],
- "logs_length": 0,
- "logs_overflowed": true,
- "name": "string",
- "operating_system": "string",
- "ready_at": "2019-08-24T14:15:22Z",
- "resource_id": "4d5215ed-38bb-48ed-879a-fdb9ca58522f",
- "scripts": [
- {
- "cron": "string",
- "display_name": "string",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "log_path": "string",
- "log_source_id": "4197ab25-95cf-4b91-9c78-f7f2af5d353a",
- "run_on_start": true,
- "run_on_stop": true,
- "script": "string",
- "start_blocks_login": true,
- "timeout": 0
- }
- ],
- "started_at": "2019-08-24T14:15:22Z",
- "startup_script_behavior": "blocking",
- "status": "connecting",
- "subsystems": ["envbox"],
- "troubleshooting_url": "string",
- "updated_at": "2019-08-24T14:15:22Z",
- "version": "string"
- }
- ],
- "created_at": "2019-08-24T14:15:22Z",
- "daily_cost": 0,
- "hide": true,
- "icon": "string",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "job_id": "453bd7d7-5355-4d6d-a38e-d9e7eb218c3f",
- "metadata": [
- {
- "key": "string",
- "sensitive": true,
- "value": "string"
- }
- ],
- "name": "string",
- "type": "string",
- "workspace_transition": "start"
- }
- ],
- "status": "pending",
- "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1",
- "template_version_name": "string",
- "transition": "start",
- "updated_at": "2019-08-24T14:15:22Z",
- "workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
- "workspace_name": "string",
- "workspace_owner_avatar_url": "string",
- "workspace_owner_id": "e7078695-5279-4c86-8774-3ac2367a2fc7",
- "workspace_owner_name": "string"
- },
- "name": "string",
- "next_start_at": "2019-08-24T14:15:22Z",
- "organization_id": "7c60d51f-b44e-4682-87d6-449835ea4de6",
- "organization_name": "string",
- "outdated": true,
- "owner_avatar_url": "string",
- "owner_id": "8826ee2e-7933-4665-aef2-2393f84a0d05",
- "owner_name": "string",
- "template_active_version_id": "b0da9c29-67d8-4c87-888c-bafe356f7f3c",
- "template_allow_user_cancel_workspace_jobs": true,
- "template_display_name": "string",
- "template_icon": "string",
- "template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
- "template_name": "string",
- "template_require_active_version": true,
- "ttl_ms": 0,
- "updated_at": "2019-08-24T14:15:22Z"
+ "allow_renames": true,
+ "automatic_updates": "always",
+ "autostart_schedule": "string",
+ "created_at": "2019-08-24T14:15:22Z",
+ "deleting_at": "2019-08-24T14:15:22Z",
+ "dormant_at": "2019-08-24T14:15:22Z",
+ "favorite": true,
+ "health": {
+ "failing_agents": [
+ "497f6eca-6276-4993-bfeb-53cbbbba6f08"
+ ],
+ "healthy": false
+ },
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "last_used_at": "2019-08-24T14:15:22Z",
+ "latest_build": {
+ "build_number": 0,
+ "created_at": "2019-08-24T14:15:22Z",
+ "daily_cost": 0,
+ "deadline": "2019-08-24T14:15:22Z",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3",
+ "initiator_name": "string",
+ "job": {
+ "canceled_at": "2019-08-24T14:15:22Z",
+ "completed_at": "2019-08-24T14:15:22Z",
+ "created_at": "2019-08-24T14:15:22Z",
+ "error": "string",
+ "error_code": "REQUIRED_TEMPLATE_VARIABLES",
+ "file_id": "8a0cfb4f-ddc9-436d-91bb-75133c583767",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "queue_position": 0,
+ "queue_size": 0,
+ "started_at": "2019-08-24T14:15:22Z",
+ "status": "pending",
+ "tags": {
+ "property1": "string",
+ "property2": "string"
+ },
+ "worker_id": "ae5fa6f7-c55b-40c1-b40a-b36ac467652b"
+ },
+ "matched_provisioners": {
+ "available": 0,
+ "count": 0,
+ "most_recently_seen": "2019-08-24T14:15:22Z"
+ },
+ "max_deadline": "2019-08-24T14:15:22Z",
+ "reason": "initiator",
+ "resources": [
+ {
+ "agents": [
+ {
+ "api_version": "string",
+ "apps": [
+ {
+ "command": "string",
+ "display_name": "string",
+ "external": true,
+ "health": "disabled",
+ "healthcheck": {
+ "interval": 0,
+ "threshold": 0,
+ "url": "string"
+ },
+ "hidden": true,
+ "icon": "string",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "open_in": "slim-window",
+ "sharing_level": "owner",
+ "slug": "string",
+ "subdomain": true,
+ "subdomain_name": "string",
+ "url": "string"
+ }
+ ],
+ "architecture": "string",
+ "connection_timeout_seconds": 0,
+ "created_at": "2019-08-24T14:15:22Z",
+ "directory": "string",
+ "disconnected_at": "2019-08-24T14:15:22Z",
+ "display_apps": [
+ "vscode"
+ ],
+ "environment_variables": {
+ "property1": "string",
+ "property2": "string"
+ },
+ "expanded_directory": "string",
+ "first_connected_at": "2019-08-24T14:15:22Z",
+ "health": {
+ "healthy": false,
+ "reason": "agent has lost connection"
+ },
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "instance_id": "string",
+ "last_connected_at": "2019-08-24T14:15:22Z",
+ "latency": {
+ "property1": {
+ "latency_ms": 0,
+ "preferred": true
+ },
+ "property2": {
+ "latency_ms": 0,
+ "preferred": true
+ }
+ },
+ "lifecycle_state": "created",
+ "log_sources": [
+ {
+ "created_at": "2019-08-24T14:15:22Z",
+ "display_name": "string",
+ "icon": "string",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "workspace_agent_id": "7ad2e618-fea7-4c1a-b70a-f501566a72f1"
+ }
+ ],
+ "logs_length": 0,
+ "logs_overflowed": true,
+ "name": "string",
+ "operating_system": "string",
+ "ready_at": "2019-08-24T14:15:22Z",
+ "resource_id": "4d5215ed-38bb-48ed-879a-fdb9ca58522f",
+ "scripts": [
+ {
+ "cron": "string",
+ "display_name": "string",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "log_path": "string",
+ "log_source_id": "4197ab25-95cf-4b91-9c78-f7f2af5d353a",
+ "run_on_start": true,
+ "run_on_stop": true,
+ "script": "string",
+ "start_blocks_login": true,
+ "timeout": 0
+ }
+ ],
+ "started_at": "2019-08-24T14:15:22Z",
+ "startup_script_behavior": "blocking",
+ "status": "connecting",
+ "subsystems": [
+ "envbox"
+ ],
+ "troubleshooting_url": "string",
+ "updated_at": "2019-08-24T14:15:22Z",
+ "version": "string"
+ }
+ ],
+ "created_at": "2019-08-24T14:15:22Z",
+ "daily_cost": 0,
+ "hide": true,
+ "icon": "string",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "job_id": "453bd7d7-5355-4d6d-a38e-d9e7eb218c3f",
+ "metadata": [
+ {
+ "key": "string",
+ "sensitive": true,
+ "value": "string"
+ }
+ ],
+ "name": "string",
+ "type": "string",
+ "workspace_transition": "start"
+ }
+ ],
+ "status": "pending",
+ "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1",
+ "template_version_name": "string",
+ "transition": "start",
+ "updated_at": "2019-08-24T14:15:22Z",
+ "workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
+ "workspace_name": "string",
+ "workspace_owner_avatar_url": "string",
+ "workspace_owner_id": "e7078695-5279-4c86-8774-3ac2367a2fc7",
+ "workspace_owner_name": "string"
+ },
+ "name": "string",
+ "next_start_at": "2019-08-24T14:15:22Z",
+ "organization_id": "7c60d51f-b44e-4682-87d6-449835ea4de6",
+ "organization_name": "string",
+ "outdated": true,
+ "owner_avatar_url": "string",
+ "owner_id": "8826ee2e-7933-4665-aef2-2393f84a0d05",
+ "owner_name": "string",
+ "template_active_version_id": "b0da9c29-67d8-4c87-888c-bafe356f7f3c",
+ "template_allow_user_cancel_workspace_jobs": true,
+ "template_display_name": "string",
+ "template_icon": "string",
+ "template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
+ "template_name": "string",
+ "template_require_active_version": true,
+ "ttl_ms": 0,
+ "updated_at": "2019-08-24T14:15:22Z"
}
```
### Responses
| Status | Meaning | Description | Schema |
-| ------ | ------------------------------------------------------- | ----------- | -------------------------------------------------- |
+|--------|---------------------------------------------------------|-------------|----------------------------------------------------|
| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.Workspace](schemas.md#codersdkworkspace) |
To perform this operation, you must be authenticated. [Learn more](authentication.md).
@@ -737,7 +758,7 @@ curl -X GET http://coder-server:8080/api/v2/workspaces \
### Parameters
| Name | In | Type | Required | Description |
-| -------- | ----- | ------- | -------- | ------------------------------------------------------------------------------------------------------------------------------------------------- |
+|----------|-------|---------|----------|---------------------------------------------------------------------------------------------------------------------------------------------------|
| `q` | query | string | false | Search query in the format `key:value`. Available keys are: owner, template, name, status, has-agent, dormant, last_used_after, last_used_before. |
| `limit` | query | integer | false | Page limit |
| `offset` | query | integer | false | Page offset |
@@ -748,200 +769,207 @@ curl -X GET http://coder-server:8080/api/v2/workspaces \
```json
{
- "count": 0,
- "workspaces": [
- {
- "allow_renames": true,
- "automatic_updates": "always",
- "autostart_schedule": "string",
- "created_at": "2019-08-24T14:15:22Z",
- "deleting_at": "2019-08-24T14:15:22Z",
- "dormant_at": "2019-08-24T14:15:22Z",
- "favorite": true,
- "health": {
- "failing_agents": ["497f6eca-6276-4993-bfeb-53cbbbba6f08"],
- "healthy": false
- },
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "last_used_at": "2019-08-24T14:15:22Z",
- "latest_build": {
- "build_number": 0,
- "created_at": "2019-08-24T14:15:22Z",
- "daily_cost": 0,
- "deadline": "2019-08-24T14:15:22Z",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3",
- "initiator_name": "string",
- "job": {
- "canceled_at": "2019-08-24T14:15:22Z",
- "completed_at": "2019-08-24T14:15:22Z",
- "created_at": "2019-08-24T14:15:22Z",
- "error": "string",
- "error_code": "REQUIRED_TEMPLATE_VARIABLES",
- "file_id": "8a0cfb4f-ddc9-436d-91bb-75133c583767",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "queue_position": 0,
- "queue_size": 0,
- "started_at": "2019-08-24T14:15:22Z",
- "status": "pending",
- "tags": {
- "property1": "string",
- "property2": "string"
- },
- "worker_id": "ae5fa6f7-c55b-40c1-b40a-b36ac467652b"
- },
- "matched_provisioners": {
- "available": 0,
- "count": 0,
- "most_recently_seen": "2019-08-24T14:15:22Z"
- },
- "max_deadline": "2019-08-24T14:15:22Z",
- "reason": "initiator",
- "resources": [
- {
- "agents": [
- {
- "api_version": "string",
- "apps": [
- {
- "command": "string",
- "display_name": "string",
- "external": true,
- "health": "disabled",
- "healthcheck": {},
- "hidden": true,
- "icon": "string",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "sharing_level": "owner",
- "slug": "string",
- "subdomain": true,
- "subdomain_name": "string",
- "url": "string"
- }
- ],
- "architecture": "string",
- "connection_timeout_seconds": 0,
- "created_at": "2019-08-24T14:15:22Z",
- "directory": "string",
- "disconnected_at": "2019-08-24T14:15:22Z",
- "display_apps": ["vscode"],
- "environment_variables": {
- "property1": "string",
- "property2": "string"
- },
- "expanded_directory": "string",
- "first_connected_at": "2019-08-24T14:15:22Z",
- "health": {
- "healthy": false,
- "reason": "agent has lost connection"
- },
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "instance_id": "string",
- "last_connected_at": "2019-08-24T14:15:22Z",
- "latency": {
- "property1": {
- "latency_ms": 0,
- "preferred": true
- },
- "property2": {
- "latency_ms": 0,
- "preferred": true
- }
- },
- "lifecycle_state": "created",
- "log_sources": [
- {
- "created_at": "2019-08-24T14:15:22Z",
- "display_name": "string",
- "icon": "string",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "workspace_agent_id": "7ad2e618-fea7-4c1a-b70a-f501566a72f1"
- }
- ],
- "logs_length": 0,
- "logs_overflowed": true,
- "name": "string",
- "operating_system": "string",
- "ready_at": "2019-08-24T14:15:22Z",
- "resource_id": "4d5215ed-38bb-48ed-879a-fdb9ca58522f",
- "scripts": [
- {
- "cron": "string",
- "display_name": "string",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "log_path": "string",
- "log_source_id": "4197ab25-95cf-4b91-9c78-f7f2af5d353a",
- "run_on_start": true,
- "run_on_stop": true,
- "script": "string",
- "start_blocks_login": true,
- "timeout": 0
- }
- ],
- "started_at": "2019-08-24T14:15:22Z",
- "startup_script_behavior": "blocking",
- "status": "connecting",
- "subsystems": ["envbox"],
- "troubleshooting_url": "string",
- "updated_at": "2019-08-24T14:15:22Z",
- "version": "string"
- }
- ],
- "created_at": "2019-08-24T14:15:22Z",
- "daily_cost": 0,
- "hide": true,
- "icon": "string",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "job_id": "453bd7d7-5355-4d6d-a38e-d9e7eb218c3f",
- "metadata": [
- {
- "key": "string",
- "sensitive": true,
- "value": "string"
- }
- ],
- "name": "string",
- "type": "string",
- "workspace_transition": "start"
- }
- ],
- "status": "pending",
- "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1",
- "template_version_name": "string",
- "transition": "start",
- "updated_at": "2019-08-24T14:15:22Z",
- "workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
- "workspace_name": "string",
- "workspace_owner_avatar_url": "string",
- "workspace_owner_id": "e7078695-5279-4c86-8774-3ac2367a2fc7",
- "workspace_owner_name": "string"
- },
- "name": "string",
- "next_start_at": "2019-08-24T14:15:22Z",
- "organization_id": "7c60d51f-b44e-4682-87d6-449835ea4de6",
- "organization_name": "string",
- "outdated": true,
- "owner_avatar_url": "string",
- "owner_id": "8826ee2e-7933-4665-aef2-2393f84a0d05",
- "owner_name": "string",
- "template_active_version_id": "b0da9c29-67d8-4c87-888c-bafe356f7f3c",
- "template_allow_user_cancel_workspace_jobs": true,
- "template_display_name": "string",
- "template_icon": "string",
- "template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
- "template_name": "string",
- "template_require_active_version": true,
- "ttl_ms": 0,
- "updated_at": "2019-08-24T14:15:22Z"
- }
- ]
+ "count": 0,
+ "workspaces": [
+ {
+ "allow_renames": true,
+ "automatic_updates": "always",
+ "autostart_schedule": "string",
+ "created_at": "2019-08-24T14:15:22Z",
+ "deleting_at": "2019-08-24T14:15:22Z",
+ "dormant_at": "2019-08-24T14:15:22Z",
+ "favorite": true,
+ "health": {
+ "failing_agents": [
+ "497f6eca-6276-4993-bfeb-53cbbbba6f08"
+ ],
+ "healthy": false
+ },
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "last_used_at": "2019-08-24T14:15:22Z",
+ "latest_build": {
+ "build_number": 0,
+ "created_at": "2019-08-24T14:15:22Z",
+ "daily_cost": 0,
+ "deadline": "2019-08-24T14:15:22Z",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3",
+ "initiator_name": "string",
+ "job": {
+ "canceled_at": "2019-08-24T14:15:22Z",
+ "completed_at": "2019-08-24T14:15:22Z",
+ "created_at": "2019-08-24T14:15:22Z",
+ "error": "string",
+ "error_code": "REQUIRED_TEMPLATE_VARIABLES",
+ "file_id": "8a0cfb4f-ddc9-436d-91bb-75133c583767",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "queue_position": 0,
+ "queue_size": 0,
+ "started_at": "2019-08-24T14:15:22Z",
+ "status": "pending",
+ "tags": {
+ "property1": "string",
+ "property2": "string"
+ },
+ "worker_id": "ae5fa6f7-c55b-40c1-b40a-b36ac467652b"
+ },
+ "matched_provisioners": {
+ "available": 0,
+ "count": 0,
+ "most_recently_seen": "2019-08-24T14:15:22Z"
+ },
+ "max_deadline": "2019-08-24T14:15:22Z",
+ "reason": "initiator",
+ "resources": [
+ {
+ "agents": [
+ {
+ "api_version": "string",
+ "apps": [
+ {
+ "command": "string",
+ "display_name": "string",
+ "external": true,
+ "health": "disabled",
+ "healthcheck": {},
+ "hidden": true,
+ "icon": "string",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "open_in": "slim-window",
+ "sharing_level": "owner",
+ "slug": "string",
+ "subdomain": true,
+ "subdomain_name": "string",
+ "url": "string"
+ }
+ ],
+ "architecture": "string",
+ "connection_timeout_seconds": 0,
+ "created_at": "2019-08-24T14:15:22Z",
+ "directory": "string",
+ "disconnected_at": "2019-08-24T14:15:22Z",
+ "display_apps": [
+ "vscode"
+ ],
+ "environment_variables": {
+ "property1": "string",
+ "property2": "string"
+ },
+ "expanded_directory": "string",
+ "first_connected_at": "2019-08-24T14:15:22Z",
+ "health": {
+ "healthy": false,
+ "reason": "agent has lost connection"
+ },
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "instance_id": "string",
+ "last_connected_at": "2019-08-24T14:15:22Z",
+ "latency": {
+ "property1": {
+ "latency_ms": 0,
+ "preferred": true
+ },
+ "property2": {
+ "latency_ms": 0,
+ "preferred": true
+ }
+ },
+ "lifecycle_state": "created",
+ "log_sources": [
+ {
+ "created_at": "2019-08-24T14:15:22Z",
+ "display_name": "string",
+ "icon": "string",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "workspace_agent_id": "7ad2e618-fea7-4c1a-b70a-f501566a72f1"
+ }
+ ],
+ "logs_length": 0,
+ "logs_overflowed": true,
+ "name": "string",
+ "operating_system": "string",
+ "ready_at": "2019-08-24T14:15:22Z",
+ "resource_id": "4d5215ed-38bb-48ed-879a-fdb9ca58522f",
+ "scripts": [
+ {
+ "cron": "string",
+ "display_name": "string",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "log_path": "string",
+ "log_source_id": "4197ab25-95cf-4b91-9c78-f7f2af5d353a",
+ "run_on_start": true,
+ "run_on_stop": true,
+ "script": "string",
+ "start_blocks_login": true,
+ "timeout": 0
+ }
+ ],
+ "started_at": "2019-08-24T14:15:22Z",
+ "startup_script_behavior": "blocking",
+ "status": "connecting",
+ "subsystems": [
+ "envbox"
+ ],
+ "troubleshooting_url": "string",
+ "updated_at": "2019-08-24T14:15:22Z",
+ "version": "string"
+ }
+ ],
+ "created_at": "2019-08-24T14:15:22Z",
+ "daily_cost": 0,
+ "hide": true,
+ "icon": "string",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "job_id": "453bd7d7-5355-4d6d-a38e-d9e7eb218c3f",
+ "metadata": [
+ {
+ "key": "string",
+ "sensitive": true,
+ "value": "string"
+ }
+ ],
+ "name": "string",
+ "type": "string",
+ "workspace_transition": "start"
+ }
+ ],
+ "status": "pending",
+ "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1",
+ "template_version_name": "string",
+ "transition": "start",
+ "updated_at": "2019-08-24T14:15:22Z",
+ "workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
+ "workspace_name": "string",
+ "workspace_owner_avatar_url": "string",
+ "workspace_owner_id": "e7078695-5279-4c86-8774-3ac2367a2fc7",
+ "workspace_owner_name": "string"
+ },
+ "name": "string",
+ "next_start_at": "2019-08-24T14:15:22Z",
+ "organization_id": "7c60d51f-b44e-4682-87d6-449835ea4de6",
+ "organization_name": "string",
+ "outdated": true,
+ "owner_avatar_url": "string",
+ "owner_id": "8826ee2e-7933-4665-aef2-2393f84a0d05",
+ "owner_name": "string",
+ "template_active_version_id": "b0da9c29-67d8-4c87-888c-bafe356f7f3c",
+ "template_allow_user_cancel_workspace_jobs": true,
+ "template_display_name": "string",
+ "template_icon": "string",
+ "template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
+ "template_name": "string",
+ "template_require_active_version": true,
+ "ttl_ms": 0,
+ "updated_at": "2019-08-24T14:15:22Z"
+ }
+ ]
}
```
### Responses
| Status | Meaning | Description | Schema |
-| ------ | ------------------------------------------------------- | ----------- | -------------------------------------------------------------------- |
+|--------|---------------------------------------------------------|-------------|----------------------------------------------------------------------|
| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.WorkspacesResponse](schemas.md#codersdkworkspacesresponse) |
To perform this operation, you must be authenticated. [Learn more](authentication.md).
@@ -962,7 +990,7 @@ curl -X GET http://coder-server:8080/api/v2/workspaces/{workspace} \
### Parameters
| Name | In | Type | Required | Description |
-| ----------------- | ----- | ------------ | -------- | ----------------------------------------------------------- |
+|-------------------|-------|--------------|----------|-------------------------------------------------------------|
| `workspace` | path | string(uuid) | true | Workspace ID |
| `include_deleted` | query | boolean | false | Return data instead of HTTP 404 if the workspace is deleted |
@@ -972,199 +1000,206 @@ curl -X GET http://coder-server:8080/api/v2/workspaces/{workspace} \
```json
{
- "allow_renames": true,
- "automatic_updates": "always",
- "autostart_schedule": "string",
- "created_at": "2019-08-24T14:15:22Z",
- "deleting_at": "2019-08-24T14:15:22Z",
- "dormant_at": "2019-08-24T14:15:22Z",
- "favorite": true,
- "health": {
- "failing_agents": ["497f6eca-6276-4993-bfeb-53cbbbba6f08"],
- "healthy": false
- },
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "last_used_at": "2019-08-24T14:15:22Z",
- "latest_build": {
- "build_number": 0,
- "created_at": "2019-08-24T14:15:22Z",
- "daily_cost": 0,
- "deadline": "2019-08-24T14:15:22Z",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3",
- "initiator_name": "string",
- "job": {
- "canceled_at": "2019-08-24T14:15:22Z",
- "completed_at": "2019-08-24T14:15:22Z",
- "created_at": "2019-08-24T14:15:22Z",
- "error": "string",
- "error_code": "REQUIRED_TEMPLATE_VARIABLES",
- "file_id": "8a0cfb4f-ddc9-436d-91bb-75133c583767",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "queue_position": 0,
- "queue_size": 0,
- "started_at": "2019-08-24T14:15:22Z",
- "status": "pending",
- "tags": {
- "property1": "string",
- "property2": "string"
- },
- "worker_id": "ae5fa6f7-c55b-40c1-b40a-b36ac467652b"
- },
- "matched_provisioners": {
- "available": 0,
- "count": 0,
- "most_recently_seen": "2019-08-24T14:15:22Z"
- },
- "max_deadline": "2019-08-24T14:15:22Z",
- "reason": "initiator",
- "resources": [
- {
- "agents": [
- {
- "api_version": "string",
- "apps": [
- {
- "command": "string",
- "display_name": "string",
- "external": true,
- "health": "disabled",
- "healthcheck": {
- "interval": 0,
- "threshold": 0,
- "url": "string"
- },
- "hidden": true,
- "icon": "string",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "sharing_level": "owner",
- "slug": "string",
- "subdomain": true,
- "subdomain_name": "string",
- "url": "string"
- }
- ],
- "architecture": "string",
- "connection_timeout_seconds": 0,
- "created_at": "2019-08-24T14:15:22Z",
- "directory": "string",
- "disconnected_at": "2019-08-24T14:15:22Z",
- "display_apps": ["vscode"],
- "environment_variables": {
- "property1": "string",
- "property2": "string"
- },
- "expanded_directory": "string",
- "first_connected_at": "2019-08-24T14:15:22Z",
- "health": {
- "healthy": false,
- "reason": "agent has lost connection"
- },
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "instance_id": "string",
- "last_connected_at": "2019-08-24T14:15:22Z",
- "latency": {
- "property1": {
- "latency_ms": 0,
- "preferred": true
- },
- "property2": {
- "latency_ms": 0,
- "preferred": true
- }
- },
- "lifecycle_state": "created",
- "log_sources": [
- {
- "created_at": "2019-08-24T14:15:22Z",
- "display_name": "string",
- "icon": "string",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "workspace_agent_id": "7ad2e618-fea7-4c1a-b70a-f501566a72f1"
- }
- ],
- "logs_length": 0,
- "logs_overflowed": true,
- "name": "string",
- "operating_system": "string",
- "ready_at": "2019-08-24T14:15:22Z",
- "resource_id": "4d5215ed-38bb-48ed-879a-fdb9ca58522f",
- "scripts": [
- {
- "cron": "string",
- "display_name": "string",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "log_path": "string",
- "log_source_id": "4197ab25-95cf-4b91-9c78-f7f2af5d353a",
- "run_on_start": true,
- "run_on_stop": true,
- "script": "string",
- "start_blocks_login": true,
- "timeout": 0
- }
- ],
- "started_at": "2019-08-24T14:15:22Z",
- "startup_script_behavior": "blocking",
- "status": "connecting",
- "subsystems": ["envbox"],
- "troubleshooting_url": "string",
- "updated_at": "2019-08-24T14:15:22Z",
- "version": "string"
- }
- ],
- "created_at": "2019-08-24T14:15:22Z",
- "daily_cost": 0,
- "hide": true,
- "icon": "string",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "job_id": "453bd7d7-5355-4d6d-a38e-d9e7eb218c3f",
- "metadata": [
- {
- "key": "string",
- "sensitive": true,
- "value": "string"
- }
- ],
- "name": "string",
- "type": "string",
- "workspace_transition": "start"
- }
- ],
- "status": "pending",
- "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1",
- "template_version_name": "string",
- "transition": "start",
- "updated_at": "2019-08-24T14:15:22Z",
- "workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
- "workspace_name": "string",
- "workspace_owner_avatar_url": "string",
- "workspace_owner_id": "e7078695-5279-4c86-8774-3ac2367a2fc7",
- "workspace_owner_name": "string"
- },
- "name": "string",
- "next_start_at": "2019-08-24T14:15:22Z",
- "organization_id": "7c60d51f-b44e-4682-87d6-449835ea4de6",
- "organization_name": "string",
- "outdated": true,
- "owner_avatar_url": "string",
- "owner_id": "8826ee2e-7933-4665-aef2-2393f84a0d05",
- "owner_name": "string",
- "template_active_version_id": "b0da9c29-67d8-4c87-888c-bafe356f7f3c",
- "template_allow_user_cancel_workspace_jobs": true,
- "template_display_name": "string",
- "template_icon": "string",
- "template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
- "template_name": "string",
- "template_require_active_version": true,
- "ttl_ms": 0,
- "updated_at": "2019-08-24T14:15:22Z"
+ "allow_renames": true,
+ "automatic_updates": "always",
+ "autostart_schedule": "string",
+ "created_at": "2019-08-24T14:15:22Z",
+ "deleting_at": "2019-08-24T14:15:22Z",
+ "dormant_at": "2019-08-24T14:15:22Z",
+ "favorite": true,
+ "health": {
+ "failing_agents": [
+ "497f6eca-6276-4993-bfeb-53cbbbba6f08"
+ ],
+ "healthy": false
+ },
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "last_used_at": "2019-08-24T14:15:22Z",
+ "latest_build": {
+ "build_number": 0,
+ "created_at": "2019-08-24T14:15:22Z",
+ "daily_cost": 0,
+ "deadline": "2019-08-24T14:15:22Z",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3",
+ "initiator_name": "string",
+ "job": {
+ "canceled_at": "2019-08-24T14:15:22Z",
+ "completed_at": "2019-08-24T14:15:22Z",
+ "created_at": "2019-08-24T14:15:22Z",
+ "error": "string",
+ "error_code": "REQUIRED_TEMPLATE_VARIABLES",
+ "file_id": "8a0cfb4f-ddc9-436d-91bb-75133c583767",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "queue_position": 0,
+ "queue_size": 0,
+ "started_at": "2019-08-24T14:15:22Z",
+ "status": "pending",
+ "tags": {
+ "property1": "string",
+ "property2": "string"
+ },
+ "worker_id": "ae5fa6f7-c55b-40c1-b40a-b36ac467652b"
+ },
+ "matched_provisioners": {
+ "available": 0,
+ "count": 0,
+ "most_recently_seen": "2019-08-24T14:15:22Z"
+ },
+ "max_deadline": "2019-08-24T14:15:22Z",
+ "reason": "initiator",
+ "resources": [
+ {
+ "agents": [
+ {
+ "api_version": "string",
+ "apps": [
+ {
+ "command": "string",
+ "display_name": "string",
+ "external": true,
+ "health": "disabled",
+ "healthcheck": {
+ "interval": 0,
+ "threshold": 0,
+ "url": "string"
+ },
+ "hidden": true,
+ "icon": "string",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "open_in": "slim-window",
+ "sharing_level": "owner",
+ "slug": "string",
+ "subdomain": true,
+ "subdomain_name": "string",
+ "url": "string"
+ }
+ ],
+ "architecture": "string",
+ "connection_timeout_seconds": 0,
+ "created_at": "2019-08-24T14:15:22Z",
+ "directory": "string",
+ "disconnected_at": "2019-08-24T14:15:22Z",
+ "display_apps": [
+ "vscode"
+ ],
+ "environment_variables": {
+ "property1": "string",
+ "property2": "string"
+ },
+ "expanded_directory": "string",
+ "first_connected_at": "2019-08-24T14:15:22Z",
+ "health": {
+ "healthy": false,
+ "reason": "agent has lost connection"
+ },
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "instance_id": "string",
+ "last_connected_at": "2019-08-24T14:15:22Z",
+ "latency": {
+ "property1": {
+ "latency_ms": 0,
+ "preferred": true
+ },
+ "property2": {
+ "latency_ms": 0,
+ "preferred": true
+ }
+ },
+ "lifecycle_state": "created",
+ "log_sources": [
+ {
+ "created_at": "2019-08-24T14:15:22Z",
+ "display_name": "string",
+ "icon": "string",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "workspace_agent_id": "7ad2e618-fea7-4c1a-b70a-f501566a72f1"
+ }
+ ],
+ "logs_length": 0,
+ "logs_overflowed": true,
+ "name": "string",
+ "operating_system": "string",
+ "ready_at": "2019-08-24T14:15:22Z",
+ "resource_id": "4d5215ed-38bb-48ed-879a-fdb9ca58522f",
+ "scripts": [
+ {
+ "cron": "string",
+ "display_name": "string",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "log_path": "string",
+ "log_source_id": "4197ab25-95cf-4b91-9c78-f7f2af5d353a",
+ "run_on_start": true,
+ "run_on_stop": true,
+ "script": "string",
+ "start_blocks_login": true,
+ "timeout": 0
+ }
+ ],
+ "started_at": "2019-08-24T14:15:22Z",
+ "startup_script_behavior": "blocking",
+ "status": "connecting",
+ "subsystems": [
+ "envbox"
+ ],
+ "troubleshooting_url": "string",
+ "updated_at": "2019-08-24T14:15:22Z",
+ "version": "string"
+ }
+ ],
+ "created_at": "2019-08-24T14:15:22Z",
+ "daily_cost": 0,
+ "hide": true,
+ "icon": "string",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "job_id": "453bd7d7-5355-4d6d-a38e-d9e7eb218c3f",
+ "metadata": [
+ {
+ "key": "string",
+ "sensitive": true,
+ "value": "string"
+ }
+ ],
+ "name": "string",
+ "type": "string",
+ "workspace_transition": "start"
+ }
+ ],
+ "status": "pending",
+ "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1",
+ "template_version_name": "string",
+ "transition": "start",
+ "updated_at": "2019-08-24T14:15:22Z",
+ "workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
+ "workspace_name": "string",
+ "workspace_owner_avatar_url": "string",
+ "workspace_owner_id": "e7078695-5279-4c86-8774-3ac2367a2fc7",
+ "workspace_owner_name": "string"
+ },
+ "name": "string",
+ "next_start_at": "2019-08-24T14:15:22Z",
+ "organization_id": "7c60d51f-b44e-4682-87d6-449835ea4de6",
+ "organization_name": "string",
+ "outdated": true,
+ "owner_avatar_url": "string",
+ "owner_id": "8826ee2e-7933-4665-aef2-2393f84a0d05",
+ "owner_name": "string",
+ "template_active_version_id": "b0da9c29-67d8-4c87-888c-bafe356f7f3c",
+ "template_allow_user_cancel_workspace_jobs": true,
+ "template_display_name": "string",
+ "template_icon": "string",
+ "template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
+ "template_name": "string",
+ "template_require_active_version": true,
+ "ttl_ms": 0,
+ "updated_at": "2019-08-24T14:15:22Z"
}
```
### Responses
| Status | Meaning | Description | Schema |
-| ------ | ------------------------------------------------------- | ----------- | -------------------------------------------------- |
+|--------|---------------------------------------------------------|-------------|----------------------------------------------------|
| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.Workspace](schemas.md#codersdkworkspace) |
To perform this operation, you must be authenticated. [Learn more](authentication.md).
@@ -1186,21 +1221,21 @@ curl -X PATCH http://coder-server:8080/api/v2/workspaces/{workspace} \
```json
{
- "name": "string"
+ "name": "string"
}
```
### Parameters
| Name | In | Type | Required | Description |
-| ----------- | ---- | ---------------------------------------------------------------------------- | -------- | ----------------------- |
+|-------------|------|------------------------------------------------------------------------------|----------|-------------------------|
| `workspace` | path | string(uuid) | true | Workspace ID |
| `body` | body | [codersdk.UpdateWorkspaceRequest](schemas.md#codersdkupdateworkspacerequest) | true | Metadata update request |
### Responses
| Status | Meaning | Description | Schema |
-| ------ | --------------------------------------------------------------- | ----------- | ------ |
+|--------|-----------------------------------------------------------------|-------------|--------|
| 204 | [No Content](https://tools.ietf.org/html/rfc7231#section-6.3.5) | No Content | |
To perform this operation, you must be authenticated. [Learn more](authentication.md).
@@ -1222,21 +1257,21 @@ curl -X PUT http://coder-server:8080/api/v2/workspaces/{workspace}/autostart \
```json
{
- "schedule": "string"
+ "schedule": "string"
}
```
### Parameters
| Name | In | Type | Required | Description |
-| ----------- | ---- | ---------------------------------------------------------------------------------------------- | -------- | ----------------------- |
+|-------------|------|------------------------------------------------------------------------------------------------|----------|-------------------------|
| `workspace` | path | string(uuid) | true | Workspace ID |
| `body` | body | [codersdk.UpdateWorkspaceAutostartRequest](schemas.md#codersdkupdateworkspaceautostartrequest) | true | Schedule update request |
### Responses
| Status | Meaning | Description | Schema |
-| ------ | --------------------------------------------------------------- | ----------- | ------ |
+|--------|-----------------------------------------------------------------|-------------|--------|
| 204 | [No Content](https://tools.ietf.org/html/rfc7231#section-6.3.5) | No Content | |
To perform this operation, you must be authenticated. [Learn more](authentication.md).
@@ -1258,26 +1293,26 @@ curl -X PUT http://coder-server:8080/api/v2/workspaces/{workspace}/autoupdates \
```json
{
- "automatic_updates": "always"
+ "automatic_updates": "always"
}
```
### Parameters
| Name | In | Type | Required | Description |
-| ----------- | ---- | ------------------------------------------------------------------------------------------------------------ | -------- | ------------------------- |
+|-------------|------|--------------------------------------------------------------------------------------------------------------|----------|---------------------------|
| `workspace` | path | string(uuid) | true | Workspace ID |
| `body` | body | [codersdk.UpdateWorkspaceAutomaticUpdatesRequest](schemas.md#codersdkupdateworkspaceautomaticupdatesrequest) | true | Automatic updates request |
### Responses
| Status | Meaning | Description | Schema |
-| ------ | --------------------------------------------------------------- | ----------- | ------ |
+|--------|-----------------------------------------------------------------|-------------|--------|
| 204 | [No Content](https://tools.ietf.org/html/rfc7231#section-6.3.5) | No Content | |
To perform this operation, you must be authenticated. [Learn more](authentication.md).
-## Update workspace dormancy status by id.
+## Update workspace dormancy status by id
### Code samples
@@ -1295,14 +1330,14 @@ curl -X PUT http://coder-server:8080/api/v2/workspaces/{workspace}/dormant \
```json
{
- "dormant": true
+ "dormant": true
}
```
### Parameters
| Name | In | Type | Required | Description |
-| ----------- | ---- | ------------------------------------------------------------------------------ | -------- | ---------------------------------- |
+|-------------|------|--------------------------------------------------------------------------------|----------|------------------------------------|
| `workspace` | path | string(uuid) | true | Workspace ID |
| `body` | body | [codersdk.UpdateWorkspaceDormancy](schemas.md#codersdkupdateworkspacedormancy) | true | Make a workspace dormant or active |
@@ -1312,199 +1347,206 @@ curl -X PUT http://coder-server:8080/api/v2/workspaces/{workspace}/dormant \
```json
{
- "allow_renames": true,
- "automatic_updates": "always",
- "autostart_schedule": "string",
- "created_at": "2019-08-24T14:15:22Z",
- "deleting_at": "2019-08-24T14:15:22Z",
- "dormant_at": "2019-08-24T14:15:22Z",
- "favorite": true,
- "health": {
- "failing_agents": ["497f6eca-6276-4993-bfeb-53cbbbba6f08"],
- "healthy": false
- },
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "last_used_at": "2019-08-24T14:15:22Z",
- "latest_build": {
- "build_number": 0,
- "created_at": "2019-08-24T14:15:22Z",
- "daily_cost": 0,
- "deadline": "2019-08-24T14:15:22Z",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3",
- "initiator_name": "string",
- "job": {
- "canceled_at": "2019-08-24T14:15:22Z",
- "completed_at": "2019-08-24T14:15:22Z",
- "created_at": "2019-08-24T14:15:22Z",
- "error": "string",
- "error_code": "REQUIRED_TEMPLATE_VARIABLES",
- "file_id": "8a0cfb4f-ddc9-436d-91bb-75133c583767",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "queue_position": 0,
- "queue_size": 0,
- "started_at": "2019-08-24T14:15:22Z",
- "status": "pending",
- "tags": {
- "property1": "string",
- "property2": "string"
- },
- "worker_id": "ae5fa6f7-c55b-40c1-b40a-b36ac467652b"
- },
- "matched_provisioners": {
- "available": 0,
- "count": 0,
- "most_recently_seen": "2019-08-24T14:15:22Z"
- },
- "max_deadline": "2019-08-24T14:15:22Z",
- "reason": "initiator",
- "resources": [
- {
- "agents": [
- {
- "api_version": "string",
- "apps": [
- {
- "command": "string",
- "display_name": "string",
- "external": true,
- "health": "disabled",
- "healthcheck": {
- "interval": 0,
- "threshold": 0,
- "url": "string"
- },
- "hidden": true,
- "icon": "string",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "sharing_level": "owner",
- "slug": "string",
- "subdomain": true,
- "subdomain_name": "string",
- "url": "string"
- }
- ],
- "architecture": "string",
- "connection_timeout_seconds": 0,
- "created_at": "2019-08-24T14:15:22Z",
- "directory": "string",
- "disconnected_at": "2019-08-24T14:15:22Z",
- "display_apps": ["vscode"],
- "environment_variables": {
- "property1": "string",
- "property2": "string"
- },
- "expanded_directory": "string",
- "first_connected_at": "2019-08-24T14:15:22Z",
- "health": {
- "healthy": false,
- "reason": "agent has lost connection"
- },
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "instance_id": "string",
- "last_connected_at": "2019-08-24T14:15:22Z",
- "latency": {
- "property1": {
- "latency_ms": 0,
- "preferred": true
- },
- "property2": {
- "latency_ms": 0,
- "preferred": true
- }
- },
- "lifecycle_state": "created",
- "log_sources": [
- {
- "created_at": "2019-08-24T14:15:22Z",
- "display_name": "string",
- "icon": "string",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "workspace_agent_id": "7ad2e618-fea7-4c1a-b70a-f501566a72f1"
- }
- ],
- "logs_length": 0,
- "logs_overflowed": true,
- "name": "string",
- "operating_system": "string",
- "ready_at": "2019-08-24T14:15:22Z",
- "resource_id": "4d5215ed-38bb-48ed-879a-fdb9ca58522f",
- "scripts": [
- {
- "cron": "string",
- "display_name": "string",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "log_path": "string",
- "log_source_id": "4197ab25-95cf-4b91-9c78-f7f2af5d353a",
- "run_on_start": true,
- "run_on_stop": true,
- "script": "string",
- "start_blocks_login": true,
- "timeout": 0
- }
- ],
- "started_at": "2019-08-24T14:15:22Z",
- "startup_script_behavior": "blocking",
- "status": "connecting",
- "subsystems": ["envbox"],
- "troubleshooting_url": "string",
- "updated_at": "2019-08-24T14:15:22Z",
- "version": "string"
- }
- ],
- "created_at": "2019-08-24T14:15:22Z",
- "daily_cost": 0,
- "hide": true,
- "icon": "string",
- "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
- "job_id": "453bd7d7-5355-4d6d-a38e-d9e7eb218c3f",
- "metadata": [
- {
- "key": "string",
- "sensitive": true,
- "value": "string"
- }
- ],
- "name": "string",
- "type": "string",
- "workspace_transition": "start"
- }
- ],
- "status": "pending",
- "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1",
- "template_version_name": "string",
- "transition": "start",
- "updated_at": "2019-08-24T14:15:22Z",
- "workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
- "workspace_name": "string",
- "workspace_owner_avatar_url": "string",
- "workspace_owner_id": "e7078695-5279-4c86-8774-3ac2367a2fc7",
- "workspace_owner_name": "string"
- },
- "name": "string",
- "next_start_at": "2019-08-24T14:15:22Z",
- "organization_id": "7c60d51f-b44e-4682-87d6-449835ea4de6",
- "organization_name": "string",
- "outdated": true,
- "owner_avatar_url": "string",
- "owner_id": "8826ee2e-7933-4665-aef2-2393f84a0d05",
- "owner_name": "string",
- "template_active_version_id": "b0da9c29-67d8-4c87-888c-bafe356f7f3c",
- "template_allow_user_cancel_workspace_jobs": true,
- "template_display_name": "string",
- "template_icon": "string",
- "template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
- "template_name": "string",
- "template_require_active_version": true,
- "ttl_ms": 0,
- "updated_at": "2019-08-24T14:15:22Z"
+ "allow_renames": true,
+ "automatic_updates": "always",
+ "autostart_schedule": "string",
+ "created_at": "2019-08-24T14:15:22Z",
+ "deleting_at": "2019-08-24T14:15:22Z",
+ "dormant_at": "2019-08-24T14:15:22Z",
+ "favorite": true,
+ "health": {
+ "failing_agents": [
+ "497f6eca-6276-4993-bfeb-53cbbbba6f08"
+ ],
+ "healthy": false
+ },
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "last_used_at": "2019-08-24T14:15:22Z",
+ "latest_build": {
+ "build_number": 0,
+ "created_at": "2019-08-24T14:15:22Z",
+ "daily_cost": 0,
+ "deadline": "2019-08-24T14:15:22Z",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3",
+ "initiator_name": "string",
+ "job": {
+ "canceled_at": "2019-08-24T14:15:22Z",
+ "completed_at": "2019-08-24T14:15:22Z",
+ "created_at": "2019-08-24T14:15:22Z",
+ "error": "string",
+ "error_code": "REQUIRED_TEMPLATE_VARIABLES",
+ "file_id": "8a0cfb4f-ddc9-436d-91bb-75133c583767",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "queue_position": 0,
+ "queue_size": 0,
+ "started_at": "2019-08-24T14:15:22Z",
+ "status": "pending",
+ "tags": {
+ "property1": "string",
+ "property2": "string"
+ },
+ "worker_id": "ae5fa6f7-c55b-40c1-b40a-b36ac467652b"
+ },
+ "matched_provisioners": {
+ "available": 0,
+ "count": 0,
+ "most_recently_seen": "2019-08-24T14:15:22Z"
+ },
+ "max_deadline": "2019-08-24T14:15:22Z",
+ "reason": "initiator",
+ "resources": [
+ {
+ "agents": [
+ {
+ "api_version": "string",
+ "apps": [
+ {
+ "command": "string",
+ "display_name": "string",
+ "external": true,
+ "health": "disabled",
+ "healthcheck": {
+ "interval": 0,
+ "threshold": 0,
+ "url": "string"
+ },
+ "hidden": true,
+ "icon": "string",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "open_in": "slim-window",
+ "sharing_level": "owner",
+ "slug": "string",
+ "subdomain": true,
+ "subdomain_name": "string",
+ "url": "string"
+ }
+ ],
+ "architecture": "string",
+ "connection_timeout_seconds": 0,
+ "created_at": "2019-08-24T14:15:22Z",
+ "directory": "string",
+ "disconnected_at": "2019-08-24T14:15:22Z",
+ "display_apps": [
+ "vscode"
+ ],
+ "environment_variables": {
+ "property1": "string",
+ "property2": "string"
+ },
+ "expanded_directory": "string",
+ "first_connected_at": "2019-08-24T14:15:22Z",
+ "health": {
+ "healthy": false,
+ "reason": "agent has lost connection"
+ },
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "instance_id": "string",
+ "last_connected_at": "2019-08-24T14:15:22Z",
+ "latency": {
+ "property1": {
+ "latency_ms": 0,
+ "preferred": true
+ },
+ "property2": {
+ "latency_ms": 0,
+ "preferred": true
+ }
+ },
+ "lifecycle_state": "created",
+ "log_sources": [
+ {
+ "created_at": "2019-08-24T14:15:22Z",
+ "display_name": "string",
+ "icon": "string",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "workspace_agent_id": "7ad2e618-fea7-4c1a-b70a-f501566a72f1"
+ }
+ ],
+ "logs_length": 0,
+ "logs_overflowed": true,
+ "name": "string",
+ "operating_system": "string",
+ "ready_at": "2019-08-24T14:15:22Z",
+ "resource_id": "4d5215ed-38bb-48ed-879a-fdb9ca58522f",
+ "scripts": [
+ {
+ "cron": "string",
+ "display_name": "string",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "log_path": "string",
+ "log_source_id": "4197ab25-95cf-4b91-9c78-f7f2af5d353a",
+ "run_on_start": true,
+ "run_on_stop": true,
+ "script": "string",
+ "start_blocks_login": true,
+ "timeout": 0
+ }
+ ],
+ "started_at": "2019-08-24T14:15:22Z",
+ "startup_script_behavior": "blocking",
+ "status": "connecting",
+ "subsystems": [
+ "envbox"
+ ],
+ "troubleshooting_url": "string",
+ "updated_at": "2019-08-24T14:15:22Z",
+ "version": "string"
+ }
+ ],
+ "created_at": "2019-08-24T14:15:22Z",
+ "daily_cost": 0,
+ "hide": true,
+ "icon": "string",
+ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
+ "job_id": "453bd7d7-5355-4d6d-a38e-d9e7eb218c3f",
+ "metadata": [
+ {
+ "key": "string",
+ "sensitive": true,
+ "value": "string"
+ }
+ ],
+ "name": "string",
+ "type": "string",
+ "workspace_transition": "start"
+ }
+ ],
+ "status": "pending",
+ "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1",
+ "template_version_name": "string",
+ "transition": "start",
+ "updated_at": "2019-08-24T14:15:22Z",
+ "workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
+ "workspace_name": "string",
+ "workspace_owner_avatar_url": "string",
+ "workspace_owner_id": "e7078695-5279-4c86-8774-3ac2367a2fc7",
+ "workspace_owner_name": "string"
+ },
+ "name": "string",
+ "next_start_at": "2019-08-24T14:15:22Z",
+ "organization_id": "7c60d51f-b44e-4682-87d6-449835ea4de6",
+ "organization_name": "string",
+ "outdated": true,
+ "owner_avatar_url": "string",
+ "owner_id": "8826ee2e-7933-4665-aef2-2393f84a0d05",
+ "owner_name": "string",
+ "template_active_version_id": "b0da9c29-67d8-4c87-888c-bafe356f7f3c",
+ "template_allow_user_cancel_workspace_jobs": true,
+ "template_display_name": "string",
+ "template_icon": "string",
+ "template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
+ "template_name": "string",
+ "template_require_active_version": true,
+ "ttl_ms": 0,
+ "updated_at": "2019-08-24T14:15:22Z"
}
```
### Responses
| Status | Meaning | Description | Schema |
-| ------ | ------------------------------------------------------- | ----------- | -------------------------------------------------- |
+|--------|---------------------------------------------------------|-------------|----------------------------------------------------|
| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.Workspace](schemas.md#codersdkworkspace) |
To perform this operation, you must be authenticated. [Learn more](authentication.md).
@@ -1527,14 +1569,14 @@ curl -X PUT http://coder-server:8080/api/v2/workspaces/{workspace}/extend \
```json
{
- "deadline": "2019-08-24T14:15:22Z"
+ "deadline": "2019-08-24T14:15:22Z"
}
```
### Parameters
| Name | In | Type | Required | Description |
-| ----------- | ---- | ---------------------------------------------------------------------------------- | -------- | ------------------------------ |
+|-------------|------|------------------------------------------------------------------------------------|----------|--------------------------------|
| `workspace` | path | string(uuid) | true | Workspace ID |
| `body` | body | [codersdk.PutExtendWorkspaceRequest](schemas.md#codersdkputextendworkspacerequest) | true | Extend deadline update request |
@@ -1544,26 +1586,26 @@ curl -X PUT http://coder-server:8080/api/v2/workspaces/{workspace}/extend \
```json
{
- "detail": "string",
- "message": "string",
- "validations": [
- {
- "detail": "string",
- "field": "string"
- }
- ]
+ "detail": "string",
+ "message": "string",
+ "validations": [
+ {
+ "detail": "string",
+ "field": "string"
+ }
+ ]
}
```
### Responses
| Status | Meaning | Description | Schema |
-| ------ | ------------------------------------------------------- | ----------- | ------------------------------------------------ |
+|--------|---------------------------------------------------------|-------------|--------------------------------------------------|
| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.Response](schemas.md#codersdkresponse) |
To perform this operation, you must be authenticated. [Learn more](authentication.md).
-## Favorite workspace by ID.
+## Favorite workspace by ID
### Code samples
@@ -1578,18 +1620,18 @@ curl -X PUT http://coder-server:8080/api/v2/workspaces/{workspace}/favorite \
### Parameters
| Name | In | Type | Required | Description |
-| ----------- | ---- | ------------ | -------- | ------------ |
+|-------------|------|--------------|----------|--------------|
| `workspace` | path | string(uuid) | true | Workspace ID |
### Responses
| Status | Meaning | Description | Schema |
-| ------ | --------------------------------------------------------------- | ----------- | ------ |
+|--------|-----------------------------------------------------------------|-------------|--------|
| 204 | [No Content](https://tools.ietf.org/html/rfc7231#section-6.3.5) | No Content | |
To perform this operation, you must be authenticated. [Learn more](authentication.md).
-## Unfavorite workspace by ID.
+## Unfavorite workspace by ID
### Code samples
@@ -1604,18 +1646,18 @@ curl -X DELETE http://coder-server:8080/api/v2/workspaces/{workspace}/favorite \
### Parameters
| Name | In | Type | Required | Description |
-| ----------- | ---- | ------------ | -------- | ------------ |
+|-------------|------|--------------|----------|--------------|
| `workspace` | path | string(uuid) | true | Workspace ID |
### Responses
| Status | Meaning | Description | Schema |
-| ------ | --------------------------------------------------------------- | ----------- | ------ |
+|--------|-----------------------------------------------------------------|-------------|--------|
| 204 | [No Content](https://tools.ietf.org/html/rfc7231#section-6.3.5) | No Content | |
To perform this operation, you must be authenticated. [Learn more](authentication.md).
-## Resolve workspace autostart by id.
+## Resolve workspace autostart by id
### Code samples
@@ -1631,7 +1673,7 @@ curl -X GET http://coder-server:8080/api/v2/workspaces/{workspace}/resolve-autos
### Parameters
| Name | In | Type | Required | Description |
-| ----------- | ---- | ------------ | -------- | ------------ |
+|-------------|------|--------------|----------|--------------|
| `workspace` | path | string(uuid) | true | Workspace ID |
### Example responses
@@ -1640,14 +1682,14 @@ curl -X GET http://coder-server:8080/api/v2/workspaces/{workspace}/resolve-autos
```json
{
- "parameter_mismatch": true
+ "parameter_mismatch": true
}
```
### Responses
| Status | Meaning | Description | Schema |
-| ------ | ------------------------------------------------------- | ----------- | -------------------------------------------------------------------------------- |
+|--------|---------------------------------------------------------|-------------|----------------------------------------------------------------------------------|
| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.ResolveAutostartResponse](schemas.md#codersdkresolveautostartresponse) |
To perform this operation, you must be authenticated. [Learn more](authentication.md).
@@ -1668,7 +1710,7 @@ curl -X GET http://coder-server:8080/api/v2/workspaces/{workspace}/timings \
### Parameters
| Name | In | Type | Required | Description |
-| ----------- | ---- | ------------ | -------- | ------------ |
+|-------------|------|--------------|----------|--------------|
| `workspace` | path | string(uuid) | true | Workspace ID |
### Example responses
@@ -1677,45 +1719,45 @@ curl -X GET http://coder-server:8080/api/v2/workspaces/{workspace}/timings \
```json
{
- "agent_connection_timings": [
- {
- "ended_at": "2019-08-24T14:15:22Z",
- "stage": "init",
- "started_at": "2019-08-24T14:15:22Z",
- "workspace_agent_id": "string",
- "workspace_agent_name": "string"
- }
- ],
- "agent_script_timings": [
- {
- "display_name": "string",
- "ended_at": "2019-08-24T14:15:22Z",
- "exit_code": 0,
- "stage": "init",
- "started_at": "2019-08-24T14:15:22Z",
- "status": "string",
- "workspace_agent_id": "string",
- "workspace_agent_name": "string"
- }
- ],
- "provisioner_timings": [
- {
- "action": "string",
- "ended_at": "2019-08-24T14:15:22Z",
- "job_id": "453bd7d7-5355-4d6d-a38e-d9e7eb218c3f",
- "resource": "string",
- "source": "string",
- "stage": "init",
- "started_at": "2019-08-24T14:15:22Z"
- }
- ]
+ "agent_connection_timings": [
+ {
+ "ended_at": "2019-08-24T14:15:22Z",
+ "stage": "init",
+ "started_at": "2019-08-24T14:15:22Z",
+ "workspace_agent_id": "string",
+ "workspace_agent_name": "string"
+ }
+ ],
+ "agent_script_timings": [
+ {
+ "display_name": "string",
+ "ended_at": "2019-08-24T14:15:22Z",
+ "exit_code": 0,
+ "stage": "init",
+ "started_at": "2019-08-24T14:15:22Z",
+ "status": "string",
+ "workspace_agent_id": "string",
+ "workspace_agent_name": "string"
+ }
+ ],
+ "provisioner_timings": [
+ {
+ "action": "string",
+ "ended_at": "2019-08-24T14:15:22Z",
+ "job_id": "453bd7d7-5355-4d6d-a38e-d9e7eb218c3f",
+ "resource": "string",
+ "source": "string",
+ "stage": "init",
+ "started_at": "2019-08-24T14:15:22Z"
+ }
+ ]
}
```
### Responses
| Status | Meaning | Description | Schema |
-| ------ | ------------------------------------------------------- | ----------- | -------------------------------------------------------------------------- |
+|--------|---------------------------------------------------------|-------------|----------------------------------------------------------------------------|
| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.WorkspaceBuildTimings](schemas.md#codersdkworkspacebuildtimings) |
To perform this operation, you must be authenticated. [Learn more](authentication.md).
@@ -1737,21 +1779,21 @@ curl -X PUT http://coder-server:8080/api/v2/workspaces/{workspace}/ttl \
```json
{
- "ttl_ms": 0
+ "ttl_ms": 0
}
```
### Parameters
| Name | In | Type | Required | Description |
-| ----------- | ---- | ---------------------------------------------------------------------------------- | -------- | ---------------------------- |
+|-------------|------|------------------------------------------------------------------------------------|----------|------------------------------|
| `workspace` | path | string(uuid) | true | Workspace ID |
| `body` | body | [codersdk.UpdateWorkspaceTTLRequest](schemas.md#codersdkupdateworkspacettlrequest) | true | Workspace TTL update request |
### Responses
| Status | Meaning | Description | Schema |
-| ------ | --------------------------------------------------------------- | ----------- | ------ |
+|--------|-----------------------------------------------------------------|-------------|--------|
| 204 | [No Content](https://tools.ietf.org/html/rfc7231#section-6.3.5) | No Content | |
To perform this operation, you must be authenticated. [Learn more](authentication.md).
@@ -1773,22 +1815,22 @@ curl -X POST http://coder-server:8080/api/v2/workspaces/{workspace}/usage \
```json
{
- "agent_id": "2b1e3b65-2c04-4fa2-a2d7-467901e98978",
- "app_name": "vscode"
+ "agent_id": "2b1e3b65-2c04-4fa2-a2d7-467901e98978",
+ "app_name": "vscode"
}
```
### Parameters
| Name | In | Type | Required | Description |
-| ----------- | ---- | ---------------------------------------------------------------------------------- | -------- | ---------------------------- |
+|-------------|------|------------------------------------------------------------------------------------|----------|------------------------------|
| `workspace` | path | string(uuid) | true | Workspace ID |
| `body` | body | [codersdk.PostWorkspaceUsageRequest](schemas.md#codersdkpostworkspaceusagerequest) | false | Post workspace usage request |
### Responses
| Status | Meaning | Description | Schema |
-| ------ | --------------------------------------------------------------- | ----------- | ------ |
+|--------|-----------------------------------------------------------------|-------------|--------|
| 204 | [No Content](https://tools.ietf.org/html/rfc7231#section-6.3.5) | No Content | |
To perform this operation, you must be authenticated. [Learn more](authentication.md).
@@ -1809,7 +1851,7 @@ curl -X GET http://coder-server:8080/api/v2/workspaces/{workspace}/watch \
### Parameters
| Name | In | Type | Required | Description |
-| ----------- | ---- | ------------ | -------- | ------------ |
+|-------------|------|--------------|----------|--------------|
| `workspace` | path | string(uuid) | true | Workspace ID |
### Example responses
@@ -1819,7 +1861,7 @@ curl -X GET http://coder-server:8080/api/v2/workspaces/{workspace}/watch \
### Responses
| Status | Meaning | Description | Schema |
-| ------ | ------------------------------------------------------- | ----------- | ------------------------------------------------ |
+|--------|---------------------------------------------------------|-------------|--------------------------------------------------|
| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.Response](schemas.md#codersdkresponse) |
To perform this operation, you must be authenticated. [Learn more](authentication.md).
diff --git a/docs/reference/cli/autoupdate.md b/docs/reference/cli/autoupdate.md
index 12751dfd291a5..a025616e76031 100644
--- a/docs/reference/cli/autoupdate.md
+++ b/docs/reference/cli/autoupdate.md
@@ -1,5 +1,4 @@
-
# autoupdate
Toggle auto-update policy for a workspace
@@ -15,7 +14,7 @@ coder autoupdate [flags]
### -y, --yes
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Bypass prompts.
diff --git a/docs/reference/cli/completion.md b/docs/reference/cli/completion.md
index 45e8ab77b741d..1d14fc2aa2467 100644
--- a/docs/reference/cli/completion.md
+++ b/docs/reference/cli/completion.md
@@ -1,5 +1,4 @@
-
# completion
Install or update shell completion scripts for the detected or chosen shell.
@@ -15,7 +14,7 @@ coder completion [flags]
### -s, --shell
| | |
-| ---- | ---------------------------------------- |
+|------|------------------------------------------|
| Type | bash\|fish\|zsh\|powershell |
The shell to install completion for.
@@ -23,7 +22,7 @@ The shell to install completion for.
### -p, --print
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Print the completion script instead of installing it.
diff --git a/docs/reference/cli/config-ssh.md b/docs/reference/cli/config-ssh.md
index ef1c75e56ec61..937bcd061bd05 100644
--- a/docs/reference/cli/config-ssh.md
+++ b/docs/reference/cli/config-ssh.md
@@ -1,5 +1,4 @@
-
# config-ssh
Add an SSH Host entry for your workspaces "ssh coder.workspace"
@@ -28,7 +27,7 @@ workspaces:
### --ssh-config-file
| | |
-| ----------- | ----------------------------------- |
+|-------------|-------------------------------------|
| Type | string |
| Environment | $CODER_SSH_CONFIG_FILE |
| Default | ~/.ssh/config |
@@ -38,7 +37,7 @@ Specifies the path to an SSH config.
### --coder-binary-path
| | |
-| ----------- | ------------------------------------------ |
+|-------------|--------------------------------------------|
| Type | string |
| Environment | $CODER_SSH_CONFIG_BINARY_PATH |
@@ -47,7 +46,7 @@ Optionally specify the absolute path to the coder binary used in ProxyCommand. B
### -o, --ssh-option
| | |
-| ----------- | ----------------------------------- |
+|-------------|-------------------------------------|
| Type | string-array |
| Environment | $CODER_SSH_CONFIG_OPTS |
@@ -56,7 +55,7 @@ Specifies additional SSH options to embed in each host stanza.
### -n, --dry-run
| | |
-| ----------- | ------------------------------- |
+|-------------|---------------------------------|
| Type | bool |
| Environment | $CODER_SSH_DRY_RUN |
@@ -65,7 +64,7 @@ Perform a trial run with no changes made, showing a diff at the end.
### --use-previous-options
| | |
-| ----------- | -------------------------------------------- |
+|-------------|----------------------------------------------|
| Type | bool |
| Environment | $CODER_SSH_USE_PREVIOUS_OPTIONS |
@@ -74,7 +73,7 @@ Specifies whether or not to keep options from previous run of config-ssh.
### --ssh-host-prefix
| | |
-| ----------- | --------------------------------------------- |
+|-------------|-----------------------------------------------|
| Type | string |
| Environment | $CODER_CONFIGSSH_SSH_HOST_PREFIX |
@@ -83,7 +82,7 @@ Override the default host prefix.
### --wait
| | |
-| ----------- | ---------------------------------- |
+|-------------|------------------------------------|
| Type | yes\|no\|auto |
| Environment | $CODER_CONFIGSSH_WAIT |
| Default | auto |
@@ -93,7 +92,7 @@ Specifies whether or not to wait for the startup script to finish executing. Aut
### --disable-autostart
| | |
-| ----------- | ----------------------------------------------- |
+|-------------|-------------------------------------------------|
| Type | bool |
| Environment | $CODER_CONFIGSSH_DISABLE_AUTOSTART |
| Default | false |
@@ -103,7 +102,7 @@ Disable starting the workspace automatically when connecting via SSH.
### -y, --yes
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Bypass prompts.
diff --git a/docs/reference/cli/create.md b/docs/reference/cli/create.md
index 377dda14189e4..58c0fad4a14e8 100644
--- a/docs/reference/cli/create.md
+++ b/docs/reference/cli/create.md
@@ -1,5 +1,4 @@
-
# create
Create a workspace
@@ -23,7 +22,7 @@ coder create [flags] [workspace]
### -t, --template
| | |
-| ----------- | --------------------------------- |
+|-------------|-----------------------------------|
| Type | string |
| Environment | $CODER_TEMPLATE_NAME |
@@ -32,7 +31,7 @@ Specify a template name.
### --template-version
| | |
-| ----------- | ------------------------------------ |
+|-------------|--------------------------------------|
| Type | string |
| Environment | $CODER_TEMPLATE_VERSION |
@@ -41,7 +40,7 @@ Specify a template version name.
### --start-at
| | |
-| ----------- | -------------------------------------- |
+|-------------|----------------------------------------|
| Type | string |
| Environment | $CODER_WORKSPACE_START_AT |
@@ -50,7 +49,7 @@ Specify the workspace autostart schedule. Check coder schedule start --help for
### --stop-after
| | |
-| ----------- | ---------------------------------------- |
+|-------------|------------------------------------------|
| Type | duration |
| Environment | $CODER_WORKSPACE_STOP_AFTER |
@@ -59,7 +58,7 @@ Specify a duration after which the workspace should shut down (e.g. 8h).
### --automatic-updates
| | |
-| ----------- | ----------------------------------------------- |
+|-------------|-------------------------------------------------|
| Type | string |
| Environment | $CODER_WORKSPACE_AUTOMATIC_UPDATES |
| Default | never |
@@ -69,7 +68,7 @@ Specify automatic updates setting for the workspace (accepts 'always' or 'never'
### --copy-parameters-from
| | |
-| ----------- | -------------------------------------------------- |
+|-------------|----------------------------------------------------|
| Type | string |
| Environment | $CODER_WORKSPACE_COPY_PARAMETERS_FROM |
@@ -78,7 +77,7 @@ Specify the source workspace name to copy parameters from.
### -y, --yes
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Bypass prompts.
@@ -86,7 +85,7 @@ Bypass prompts.
### --parameter
| | |
-| ----------- | ---------------------------------- |
+|-------------|------------------------------------|
| Type | string-array |
| Environment | $CODER_RICH_PARAMETER |
@@ -95,7 +94,7 @@ Rich parameter value in the format "name=value".
### --rich-parameter-file
| | |
-| ----------- | --------------------------------------- |
+|-------------|-----------------------------------------|
| Type | string |
| Environment | $CODER_RICH_PARAMETER_FILE |
@@ -104,7 +103,7 @@ Specify a file path with values for rich parameters defined in the template. The
### --parameter-default
| | |
-| ----------- | ------------------------------------------ |
+|-------------|--------------------------------------------|
| Type | string-array |
| Environment | $CODER_RICH_PARAMETER_DEFAULT |
@@ -113,7 +112,7 @@ Rich parameter default values in the format "name=value".
### -O, --org
| | |
-| ----------- | -------------------------------- |
+|-------------|----------------------------------|
| Type | string |
| Environment | $CODER_ORGANIZATION |
diff --git a/docs/reference/cli/delete.md b/docs/reference/cli/delete.md
index b5093efa63f0a..9dc2ea6fa9a19 100644
--- a/docs/reference/cli/delete.md
+++ b/docs/reference/cli/delete.md
@@ -1,12 +1,11 @@
-
# delete
Delete a workspace
Aliases:
-- rm
+* rm
## Usage
@@ -27,7 +26,7 @@ coder delete [flags]
### --orphan
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Delete a workspace without deleting its resources. This can delete a workspace in a broken state, but may also lead to unaccounted cloud resources.
@@ -35,7 +34,7 @@ Delete a workspace without deleting its resources. This can delete a workspace i
### -y, --yes
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Bypass prompts.
diff --git a/docs/reference/cli/dotfiles.md b/docs/reference/cli/dotfiles.md
index 709aab6dd70b0..57074497fee5f 100644
--- a/docs/reference/cli/dotfiles.md
+++ b/docs/reference/cli/dotfiles.md
@@ -1,5 +1,4 @@
-
# dotfiles
Personalize your workspace by applying a canonical dotfiles repository
@@ -23,7 +22,7 @@ coder dotfiles [flags]
### --symlink-dir
| | |
-| ----------- | ------------------------------- |
+|-------------|---------------------------------|
| Type | string |
| Environment | $CODER_SYMLINK_DIR |
@@ -32,7 +31,7 @@ Specifies the directory for the dotfiles symlink destinations. If empty, will us
### -b, --branch
| | |
-| ---- | ------------------- |
+|------|---------------------|
| Type | string |
Specifies which branch to clone. If empty, will default to cloning the default branch or using the existing branch in the cloned repo on disk.
@@ -40,7 +39,7 @@ Specifies which branch to clone. If empty, will default to cloning the default b
### --repo-dir
| | |
-| ----------- | ------------------------------------- |
+|-------------|---------------------------------------|
| Type | string |
| Environment | $CODER_DOTFILES_REPO_DIR |
| Default | dotfiles |
@@ -50,7 +49,7 @@ Specifies the directory for the dotfiles repository, relative to global config d
### -y, --yes
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Bypass prompts.
diff --git a/docs/reference/cli/external-auth.md b/docs/reference/cli/external-auth.md
index ebe16435feb62..5347bfd34e1ac 100644
--- a/docs/reference/cli/external-auth.md
+++ b/docs/reference/cli/external-auth.md
@@ -1,5 +1,4 @@
-
# external-auth
Manage external authentication
@@ -19,5 +18,5 @@ Authenticate with external services inside of a workspace.
## Subcommands
| Name | Purpose |
-| ------------------------------------------------------------ | ----------------------------------- |
+|--------------------------------------------------------------|-------------------------------------|
| [access-token](./external-auth_access-token.md) | Print auth for an external provider |
diff --git a/docs/reference/cli/external-auth_access-token.md b/docs/reference/cli/external-auth_access-token.md
index ead28af54be31..2303e8f076da8 100644
--- a/docs/reference/cli/external-auth_access-token.md
+++ b/docs/reference/cli/external-auth_access-token.md
@@ -1,5 +1,4 @@
-
# external-auth access-token
Print auth for an external provider
@@ -37,7 +36,7 @@ fi
### --extra
| | |
-| ---- | ------------------- |
+|------|---------------------|
| Type | string |
Extract a field from the "extra" properties of the OAuth token.
diff --git a/docs/reference/cli/favorite.md b/docs/reference/cli/favorite.md
index 93b5027367020..97ff6fde44032 100644
--- a/docs/reference/cli/favorite.md
+++ b/docs/reference/cli/favorite.md
@@ -1,13 +1,12 @@
-
# favorite
Add a workspace to your favorites
Aliases:
-- fav
-- favourite
+* fav
+* favourite
## Usage
diff --git a/docs/reference/cli/features.md b/docs/reference/cli/features.md
index d367623f049a0..1ba187f964c8e 100644
--- a/docs/reference/cli/features.md
+++ b/docs/reference/cli/features.md
@@ -1,12 +1,11 @@
-
# features
List Enterprise features
Aliases:
-- feature
+* feature
## Usage
@@ -17,5 +16,5 @@ coder features
## Subcommands
| Name | Purpose |
-| --------------------------------------- | ------- |
+|-----------------------------------------|---------|
| [list](./features_list.md) | |
diff --git a/docs/reference/cli/features_list.md b/docs/reference/cli/features_list.md
index 43795aea2874b..a1aab1d165ae6 100644
--- a/docs/reference/cli/features_list.md
+++ b/docs/reference/cli/features_list.md
@@ -1,10 +1,9 @@
-
# features list
Aliases:
-- ls
+* ls
## Usage
@@ -17,7 +16,7 @@ coder features list [flags]
### -c, --column
| | |
-| ------- | -------------------------------------------------------- |
+|---------|----------------------------------------------------------|
| Type | [name\|entitlement\|enabled\|limit\|actual] |
| Default | name,entitlement,enabled,limit,actual |
@@ -26,7 +25,7 @@ Specify columns to filter in the table.
### -o, --output
| | |
-| ------- | ------------------------ |
+|---------|--------------------------|
| Type | table\|json |
| Default | table |
diff --git a/docs/reference/cli/groups.md b/docs/reference/cli/groups.md
index 6d5c936e7f0c5..a036d646ab263 100644
--- a/docs/reference/cli/groups.md
+++ b/docs/reference/cli/groups.md
@@ -1,12 +1,11 @@
-
# groups
Manage groups
Aliases:
-- group
+* group
## Usage
@@ -17,7 +16,7 @@ coder groups
## Subcommands
| Name | Purpose |
-| ----------------------------------------- | ------------------- |
+|-------------------------------------------|---------------------|
| [create](./groups_create.md) | Create a user group |
| [list](./groups_list.md) | List user groups |
| [edit](./groups_edit.md) | Edit a user group |
diff --git a/docs/reference/cli/groups_create.md b/docs/reference/cli/groups_create.md
index e758b422ea387..4274a681a5873 100644
--- a/docs/reference/cli/groups_create.md
+++ b/docs/reference/cli/groups_create.md
@@ -1,5 +1,4 @@
-
# groups create
Create a user group
@@ -15,7 +14,7 @@ coder groups create [flags]
### -u, --avatar-url
| | |
-| ----------- | ------------------------------ |
+|-------------|--------------------------------|
| Type | string |
| Environment | $CODER_AVATAR_URL |
@@ -24,7 +23,7 @@ Set an avatar for a group.
### --display-name
| | |
-| ----------- | -------------------------------- |
+|-------------|----------------------------------|
| Type | string |
| Environment | $CODER_DISPLAY_NAME |
@@ -33,7 +32,7 @@ Optional human friendly name for the group.
### -O, --org
| | |
-| ----------- | -------------------------------- |
+|-------------|----------------------------------|
| Type | string |
| Environment | $CODER_ORGANIZATION |
diff --git a/docs/reference/cli/groups_delete.md b/docs/reference/cli/groups_delete.md
index 7bbf215ae2f29..2135fb635cb8a 100644
--- a/docs/reference/cli/groups_delete.md
+++ b/docs/reference/cli/groups_delete.md
@@ -1,12 +1,11 @@
-
# groups delete
Delete a user group
Aliases:
-- rm
+* rm
## Usage
@@ -19,7 +18,7 @@ coder groups delete [flags]
### -O, --org
| | |
-| ----------- | -------------------------------- |
+|-------------|----------------------------------|
| Type | string |
| Environment | $CODER_ORGANIZATION |
diff --git a/docs/reference/cli/groups_edit.md b/docs/reference/cli/groups_edit.md
index f7c39c58e1d24..356a7eea4e7a9 100644
--- a/docs/reference/cli/groups_edit.md
+++ b/docs/reference/cli/groups_edit.md
@@ -1,5 +1,4 @@
-
# groups edit
Edit a user group
@@ -15,7 +14,7 @@ coder groups edit [flags]
### -n, --name
| | |
-| ---- | ------------------- |
+|------|---------------------|
| Type | string |
Update the group name.
@@ -23,7 +22,7 @@ Update the group name.
### -u, --avatar-url
| | |
-| ---- | ------------------- |
+|------|---------------------|
| Type | string |
Update the group avatar.
@@ -31,7 +30,7 @@ Update the group avatar.
### --display-name
| | |
-| ----------- | -------------------------------- |
+|-------------|----------------------------------|
| Type | string |
| Environment | $CODER_DISPLAY_NAME |
@@ -40,7 +39,7 @@ Optional human friendly name for the group.
### -a, --add-users
| | |
-| ---- | ------------------------- |
+|------|---------------------------|
| Type | string-array |
Add users to the group. Accepts emails or IDs.
@@ -48,7 +47,7 @@ Add users to the group. Accepts emails or IDs.
### -r, --rm-users
| | |
-| ---- | ------------------------- |
+|------|---------------------------|
| Type | string-array |
Remove users to the group. Accepts emails or IDs.
@@ -56,7 +55,7 @@ Remove users to the group. Accepts emails or IDs.
### -O, --org
| | |
-| ----------- | -------------------------------- |
+|-------------|----------------------------------|
| Type | string |
| Environment | $CODER_ORGANIZATION |
diff --git a/docs/reference/cli/groups_list.md b/docs/reference/cli/groups_list.md
index f3ab2f5e0956e..c76e8b382ec44 100644
--- a/docs/reference/cli/groups_list.md
+++ b/docs/reference/cli/groups_list.md
@@ -1,5 +1,4 @@
-
# groups list
List user groups
@@ -15,7 +14,7 @@ coder groups list [flags]
### -c, --column
| | |
-| ------- | ----------------------------------------------------------------------- |
+|---------|-------------------------------------------------------------------------|
| Type | [name\|display name\|organization id\|members\|avatar url] |
| Default | name,display name,organization id,members,avatar url |
@@ -24,7 +23,7 @@ Columns to display in table output.
### -o, --output
| | |
-| ------- | ------------------------ |
+|---------|--------------------------|
| Type | table\|json |
| Default | table |
@@ -33,7 +32,7 @@ Output format.
### -O, --org
| | |
-| ----------- | -------------------------------- |
+|-------------|----------------------------------|
| Type | string |
| Environment | $CODER_ORGANIZATION |
diff --git a/docs/reference/cli/index.md b/docs/reference/cli/index.md
index 525cb8ac7d183..71a1a2b4e2c68 100644
--- a/docs/reference/cli/index.md
+++ b/docs/reference/cli/index.md
@@ -1,5 +1,4 @@
-
# coder
## Usage
@@ -24,7 +23,7 @@ Coder — A tool for provisioning self-hosted development environments with Terr
## Subcommands
| Name | Purpose |
-| -------------------------------------------------- | ----------------------------------------------------------------------------------------------------- |
+|----------------------------------------------------|-------------------------------------------------------------------------------------------------------|
| [completion](./completion.md) | Install or update shell completion scripts for the detected or chosen shell. |
| [dotfiles](./dotfiles.md) | Personalize your workspace by applying a canonical dotfiles repository |
| [external-auth](./external-auth.md) | Manage external authentication |
@@ -73,7 +72,7 @@ Coder — A tool for provisioning self-hosted development environments with Terr
### --url
| | |
-| ----------- | ----------------------- |
+|-------------|-------------------------|
| Type | url |
| Environment | $CODER_URL |
@@ -82,7 +81,7 @@ URL to a deployment.
### --debug-options
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Print all options, how they're set, then exit.
@@ -90,7 +89,7 @@ Print all options, how they're set, then exit.
### --token
| | |
-| ----------- | --------------------------------- |
+|-------------|-----------------------------------|
| Type | string |
| Environment | $CODER_SESSION_TOKEN |
@@ -99,7 +98,7 @@ Specify an authentication token. For security reasons setting CODER_SESSION_TOKE
### --no-version-warning
| | |
-| ----------- | -------------------------------------- |
+|-------------|----------------------------------------|
| Type | bool |
| Environment | $CODER_NO_VERSION_WARNING |
@@ -108,7 +107,7 @@ Suppress warning when client and server versions do not match.
### --no-feature-warning
| | |
-| ----------- | -------------------------------------- |
+|-------------|----------------------------------------|
| Type | bool |
| Environment | $CODER_NO_FEATURE_WARNING |
@@ -117,7 +116,7 @@ Suppress warnings about unlicensed features.
### --header
| | |
-| ----------- | -------------------------- |
+|-------------|----------------------------|
| Type | string-array |
| Environment | $CODER_HEADER |
@@ -126,7 +125,7 @@ Additional HTTP headers added to all requests. Provide as key=value. Can be spec
### --header-command
| | |
-| ----------- | ---------------------------------- |
+|-------------|------------------------------------|
| Type | string |
| Environment | $CODER_HEADER_COMMAND |
@@ -135,7 +134,7 @@ An external command that outputs additional HTTP headers added to all requests.
### -v, --verbose
| | |
-| ----------- | --------------------------- |
+|-------------|-----------------------------|
| Type | bool |
| Environment | $CODER_VERBOSE |
@@ -144,7 +143,7 @@ Enable verbose output.
### --disable-direct-connections
| | |
-| ----------- | ---------------------------------------------- |
+|-------------|------------------------------------------------|
| Type | bool |
| Environment | $CODER_DISABLE_DIRECT_CONNECTIONS |
@@ -153,7 +152,7 @@ Disable direct (P2P) connections to workspaces.
### --disable-network-telemetry
| | |
-| ----------- | --------------------------------------------- |
+|-------------|-----------------------------------------------|
| Type | bool |
| Environment | $CODER_DISABLE_NETWORK_TELEMETRY |
@@ -162,7 +161,7 @@ Disable network telemetry. Network telemetry is collected when connecting to wor
### --global-config
| | |
-| ----------- | ------------------------------ |
+|-------------|--------------------------------|
| Type | string |
| Environment | $CODER_CONFIG_DIR |
| Default | ~/.config/coderv2 |
diff --git a/docs/reference/cli/licenses.md b/docs/reference/cli/licenses.md
index 63e337afb259d..8e71f01aba8c6 100644
--- a/docs/reference/cli/licenses.md
+++ b/docs/reference/cli/licenses.md
@@ -1,12 +1,11 @@
-
# licenses
Add, delete, and list licenses
Aliases:
-- license
+* license
## Usage
@@ -17,7 +16,7 @@ coder licenses
## Subcommands
| Name | Purpose |
-| ------------------------------------------- | --------------------------------- |
+|---------------------------------------------|-----------------------------------|
| [add](./licenses_add.md) | Add license to Coder deployment |
| [list](./licenses_list.md) | List licenses (including expired) |
| [delete](./licenses_delete.md) | Delete license by ID |
diff --git a/docs/reference/cli/licenses_add.md b/docs/reference/cli/licenses_add.md
index f3d9f201ed099..5562f5f49b365 100644
--- a/docs/reference/cli/licenses_add.md
+++ b/docs/reference/cli/licenses_add.md
@@ -1,5 +1,4 @@
-
# licenses add
Add license to Coder deployment
@@ -15,7 +14,7 @@ coder licenses add [flags] [-f file | -l license]
### -f, --file
| | |
-| ---- | ------------------- |
+|------|---------------------|
| Type | string |
Load license from file.
@@ -23,7 +22,7 @@ Load license from file.
### -l, --license
| | |
-| ---- | ------------------- |
+|------|---------------------|
| Type | string |
License string.
@@ -31,7 +30,7 @@ License string.
### --debug
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Output license claims for debugging.
diff --git a/docs/reference/cli/licenses_delete.md b/docs/reference/cli/licenses_delete.md
index 8cf95894d5815..9a24e520e6584 100644
--- a/docs/reference/cli/licenses_delete.md
+++ b/docs/reference/cli/licenses_delete.md
@@ -1,13 +1,12 @@
-
# licenses delete
Delete license by ID
Aliases:
-- del
-- rm
+* del
+* rm
## Usage
diff --git a/docs/reference/cli/licenses_list.md b/docs/reference/cli/licenses_list.md
index a888c44331546..17311df2d6da2 100644
--- a/docs/reference/cli/licenses_list.md
+++ b/docs/reference/cli/licenses_list.md
@@ -1,12 +1,11 @@
-
# licenses list
List licenses (including expired)
Aliases:
-- ls
+* ls
## Usage
@@ -19,7 +18,7 @@ coder licenses list [flags]
### -c, --column
| | |
-| ------- | ----------------------------------------------------------------- |
+|---------|-------------------------------------------------------------------|
| Type | [id\|uuid\|uploaded at\|features\|expires at\|trial] |
| Default | ID,UUID,Expires At,Uploaded At,Features |
@@ -28,7 +27,7 @@ Columns to display in table output.
### -o, --output
| | |
-| ------- | ------------------------ |
+|---------|--------------------------|
| Type | table\|json |
| Default | table |
diff --git a/docs/reference/cli/list.md b/docs/reference/cli/list.md
index e9e82988c0af8..5911785b87fc1 100644
--- a/docs/reference/cli/list.md
+++ b/docs/reference/cli/list.md
@@ -1,12 +1,11 @@
-
# list
List workspaces
Aliases:
-- ls
+* ls
## Usage
@@ -19,7 +18,7 @@ coder list [flags]
### -a, --all
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Specifies whether all workspaces will be listed or not.
@@ -27,7 +26,7 @@ Specifies whether all workspaces will be listed or not.
### --search
| | |
-| ------- | --------------------- |
+|---------|-----------------------|
| Type | string |
| Default | owner:me |
@@ -36,7 +35,7 @@ Search for a workspace with a query.
### -c, --column
| | |
-| ------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
+|---------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| Type | [favorite\|workspace\|organization id\|organization name\|template\|status\|healthy\|last built\|current version\|outdated\|starts at\|starts next\|stops after\|stops next\|daily cost] |
| Default | workspace,template,status,healthy,last built,current version,outdated,starts at,stops after |
@@ -45,7 +44,7 @@ Columns to display in table output.
### -o, --output
| | |
-| ------- | ------------------------ |
+|---------|--------------------------|
| Type | table\|json |
| Default | table |
diff --git a/docs/reference/cli/login.md b/docs/reference/cli/login.md
index 9a27e4a6357c8..a35038fedef8c 100644
--- a/docs/reference/cli/login.md
+++ b/docs/reference/cli/login.md
@@ -1,5 +1,4 @@
-
# login
Authenticate with Coder deployment
@@ -15,7 +14,7 @@ coder login [flags] []
### --first-user-email
| | |
-| ----------- | ------------------------------------ |
+|-------------|--------------------------------------|
| Type | string |
| Environment | $CODER_FIRST_USER_EMAIL |
@@ -24,7 +23,7 @@ Specifies an email address to use if creating the first user for the deployment.
### --first-user-username
| | |
-| ----------- | --------------------------------------- |
+|-------------|-----------------------------------------|
| Type | string |
| Environment | $CODER_FIRST_USER_USERNAME |
@@ -33,7 +32,7 @@ Specifies a username to use if creating the first user for the deployment.
### --first-user-full-name
| | |
-| ----------- | ---------------------------------------- |
+|-------------|------------------------------------------|
| Type | string |
| Environment | $CODER_FIRST_USER_FULL_NAME |
@@ -42,7 +41,7 @@ Specifies a human-readable name for the first user of the deployment.
### --first-user-password
| | |
-| ----------- | --------------------------------------- |
+|-------------|-----------------------------------------|
| Type | string |
| Environment | $CODER_FIRST_USER_PASSWORD |
@@ -51,7 +50,7 @@ Specifies a password to use if creating the first user for the deployment.
### --first-user-trial
| | |
-| ----------- | ------------------------------------ |
+|-------------|--------------------------------------|
| Type | bool |
| Environment | $CODER_FIRST_USER_TRIAL |
@@ -60,7 +59,7 @@ Specifies whether a trial license should be provisioned for the Coder deployment
### --use-token-as-session
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
By default, the CLI will generate a new session token when logging in. This flag will instead use the provided token as the session token.
diff --git a/docs/reference/cli/logout.md b/docs/reference/cli/logout.md
index 255c474054243..b35369ee36448 100644
--- a/docs/reference/cli/logout.md
+++ b/docs/reference/cli/logout.md
@@ -1,5 +1,4 @@
-
# logout
Unauthenticate your local session
@@ -15,7 +14,7 @@ coder logout [flags]
### -y, --yes
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Bypass prompts.
diff --git a/docs/reference/cli/netcheck.md b/docs/reference/cli/netcheck.md
index 0d70bc3a76642..219f6fa16b762 100644
--- a/docs/reference/cli/netcheck.md
+++ b/docs/reference/cli/netcheck.md
@@ -1,5 +1,4 @@
-
# netcheck
Print network debug information for DERP and STUN
diff --git a/docs/reference/cli/notifications.md b/docs/reference/cli/notifications.md
index 59e74b4324357..169776876e315 100644
--- a/docs/reference/cli/notifications.md
+++ b/docs/reference/cli/notifications.md
@@ -1,12 +1,11 @@
-
# notifications
Manage Coder notifications
Aliases:
-- notification
+* notification
## Usage
@@ -32,6 +31,6 @@ server or Webhook not responding).:
## Subcommands
| Name | Purpose |
-| ------------------------------------------------ | -------------------- |
+|--------------------------------------------------|----------------------|
| [pause](./notifications_pause.md) | Pause notifications |
| [resume](./notifications_resume.md) | Resume notifications |
diff --git a/docs/reference/cli/notifications_pause.md b/docs/reference/cli/notifications_pause.md
index 0cb2b101d474c..5bac0c2f9e05b 100644
--- a/docs/reference/cli/notifications_pause.md
+++ b/docs/reference/cli/notifications_pause.md
@@ -1,5 +1,4 @@
-
# notifications pause
Pause notifications
diff --git a/docs/reference/cli/notifications_resume.md b/docs/reference/cli/notifications_resume.md
index a8dc17453a383..79ec60ba543ff 100644
--- a/docs/reference/cli/notifications_resume.md
+++ b/docs/reference/cli/notifications_resume.md
@@ -1,5 +1,4 @@
-
# notifications resume
Resume notifications
diff --git a/docs/reference/cli/open.md b/docs/reference/cli/open.md
index 8b5f5beef4c03..e19bdaeba884d 100644
--- a/docs/reference/cli/open.md
+++ b/docs/reference/cli/open.md
@@ -1,5 +1,4 @@
-
# open
Open a workspace
@@ -13,5 +12,5 @@ coder open
## Subcommands
| Name | Purpose |
-| --------------------------------------- | ----------------------------------- |
+|-----------------------------------------|-------------------------------------|
| [vscode](./open_vscode.md) | Open a workspace in VS Code Desktop |
diff --git a/docs/reference/cli/open_vscode.md b/docs/reference/cli/open_vscode.md
index 23e4d85d604b6..2b1e80dfbe5b7 100644
--- a/docs/reference/cli/open_vscode.md
+++ b/docs/reference/cli/open_vscode.md
@@ -1,5 +1,4 @@
-
# open vscode
Open a workspace in VS Code Desktop
@@ -15,7 +14,7 @@ coder open vscode [flags] []
### --generate-token
| | |
-| ----------- | ---------------------------------------------- |
+|-------------|------------------------------------------------|
| Type | bool |
| Environment | $CODER_OPEN_VSCODE_GENERATE_TOKEN |
diff --git a/docs/reference/cli/organizations.md b/docs/reference/cli/organizations.md
index 1fbd076425ace..c2d4497173103 100644
--- a/docs/reference/cli/organizations.md
+++ b/docs/reference/cli/organizations.md
@@ -1,14 +1,13 @@
-
# organizations
Organization related commands
Aliases:
-- organization
-- org
-- orgs
+* organization
+* org
+* orgs
## Usage
@@ -19,7 +18,7 @@ coder organizations [flags] [subcommand]
## Subcommands
| Name | Purpose |
-| ---------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------- |
+|------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------|
| [show](./organizations_show.md) | Show the organization. Using "selected" will show the selected organization from the "--org" flag. Using "me" will show all organizations you are a member of. |
| [create](./organizations_create.md) | Create a new organization. |
| [members](./organizations_members.md) | Manage organization members |
@@ -31,7 +30,7 @@ coder organizations [flags] [subcommand]
### -O, --org
| | |
-| ----------- | -------------------------------- |
+|-------------|----------------------------------|
| Type | string |
| Environment | $CODER_ORGANIZATION |
diff --git a/docs/reference/cli/organizations_create.md b/docs/reference/cli/organizations_create.md
index 416a1306456e2..14f40f55e00d1 100644
--- a/docs/reference/cli/organizations_create.md
+++ b/docs/reference/cli/organizations_create.md
@@ -1,5 +1,4 @@
-
# organizations create
Create a new organization.
@@ -15,7 +14,7 @@ coder organizations create [flags]
### -y, --yes
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Bypass prompts.
diff --git a/docs/reference/cli/organizations_members.md b/docs/reference/cli/organizations_members.md
index 49d29ace004a8..b71372f13bdd9 100644
--- a/docs/reference/cli/organizations_members.md
+++ b/docs/reference/cli/organizations_members.md
@@ -1,12 +1,11 @@
-
# organizations members
Manage organization members
Aliases:
-- member
+* member
## Usage
@@ -17,7 +16,7 @@ coder organizations members
## Subcommands
| Name | Purpose |
-| ---------------------------------------------------------------- | ----------------------------------------------- |
+|------------------------------------------------------------------|-------------------------------------------------|
| [list](./organizations_members_list.md) | List all organization members |
| [edit-roles](./organizations_members_edit-roles.md) | Edit organization member's roles |
| [add](./organizations_members_add.md) | Add a new member to the current organization |
diff --git a/docs/reference/cli/organizations_members_add.md b/docs/reference/cli/organizations_members_add.md
index b912a7ab56545..57481f02dd859 100644
--- a/docs/reference/cli/organizations_members_add.md
+++ b/docs/reference/cli/organizations_members_add.md
@@ -1,5 +1,4 @@
-
# organizations members add
Add a new member to the current organization
diff --git a/docs/reference/cli/organizations_members_edit-roles.md b/docs/reference/cli/organizations_members_edit-roles.md
index 3bd9d2066f5cf..0d4a21a379e11 100644
--- a/docs/reference/cli/organizations_members_edit-roles.md
+++ b/docs/reference/cli/organizations_members_edit-roles.md
@@ -1,12 +1,11 @@
-
# organizations members edit-roles
Edit organization member's roles
Aliases:
-- edit-role
+* edit-role
## Usage
diff --git a/docs/reference/cli/organizations_members_list.md b/docs/reference/cli/organizations_members_list.md
index 9a0a5d3fa0640..270fb1d49e945 100644
--- a/docs/reference/cli/organizations_members_list.md
+++ b/docs/reference/cli/organizations_members_list.md
@@ -1,5 +1,4 @@
-
# organizations members list
List all organization members
@@ -15,7 +14,7 @@ coder organizations members list [flags]
### -c, --column
| | |
-| ------- | --------------------------------------------------------------------------------------------------- |
+|---------|-----------------------------------------------------------------------------------------------------|
| Type | [username\|name\|user id\|organization id\|created at\|updated at\|organization roles] |
| Default | username,organization roles |
@@ -24,7 +23,7 @@ Columns to display in table output.
### -o, --output
| | |
-| ------- | ------------------------ |
+|---------|--------------------------|
| Type | table\|json |
| Default | table |
diff --git a/docs/reference/cli/organizations_members_remove.md b/docs/reference/cli/organizations_members_remove.md
index f36ea00b3ed48..9b6e29416557b 100644
--- a/docs/reference/cli/organizations_members_remove.md
+++ b/docs/reference/cli/organizations_members_remove.md
@@ -1,12 +1,11 @@
-
# organizations members remove
Remove a new member to the current organization
Aliases:
-- rm
+* rm
## Usage
diff --git a/docs/reference/cli/organizations_roles.md b/docs/reference/cli/organizations_roles.md
index 536e6abe89c10..19b6271dcbf9c 100644
--- a/docs/reference/cli/organizations_roles.md
+++ b/docs/reference/cli/organizations_roles.md
@@ -1,12 +1,11 @@
-
# organizations roles
Manage organization roles.
Aliases:
-- role
+* role
## Usage
@@ -17,6 +16,6 @@ coder organizations roles
## Subcommands
| Name | Purpose |
-| -------------------------------------------------- | -------------------------------- |
+|----------------------------------------------------|----------------------------------|
| [show](./organizations_roles_show.md) | Show role(s) |
| [edit](./organizations_roles_edit.md) | Edit an organization custom role |
diff --git a/docs/reference/cli/organizations_roles_edit.md b/docs/reference/cli/organizations_roles_edit.md
index 04fc8522a21ef..988f8c0eee1b2 100644
--- a/docs/reference/cli/organizations_roles_edit.md
+++ b/docs/reference/cli/organizations_roles_edit.md
@@ -1,5 +1,4 @@
-
# organizations roles edit
Edit an organization custom role
@@ -23,7 +22,7 @@ coder organizations roles edit [flags]
### -y, --yes
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Bypass prompts.
@@ -31,7 +30,7 @@ Bypass prompts.
### --dry-run
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Does all the work, but does not submit the final updated role.
@@ -39,7 +38,7 @@ Does all the work, but does not submit the final updated role.
### --stdin
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Reads stdin for the json role definition to upload.
@@ -47,7 +46,7 @@ Reads stdin for the json role definition to upload.
### -c, --column
| | |
-| ------- | ---------------------------------------------------------------------------------------------------------------- |
+|---------|------------------------------------------------------------------------------------------------------------------|
| Type | [name\|display name\|organization id\|site permissions\|organization permissions\|user permissions] |
| Default | name,display name,site permissions,organization permissions,user permissions |
@@ -56,7 +55,7 @@ Columns to display in table output.
### -o, --output
| | |
-| ------- | ------------------------ |
+|---------|--------------------------|
| Type | table\|json |
| Default | table |
diff --git a/docs/reference/cli/organizations_roles_show.md b/docs/reference/cli/organizations_roles_show.md
index 2d75ae74d4576..1d5653839e756 100644
--- a/docs/reference/cli/organizations_roles_show.md
+++ b/docs/reference/cli/organizations_roles_show.md
@@ -1,5 +1,4 @@
-
# organizations roles show
Show role(s)
@@ -15,7 +14,7 @@ coder organizations roles show [flags] [role_names ...]
### -c, --column
| | |
-| ------- | ---------------------------------------------------------------------------------------------------------------- |
+|---------|------------------------------------------------------------------------------------------------------------------|
| Type | [name\|display name\|organization id\|site permissions\|organization permissions\|user permissions] |
| Default | name,display name,site permissions,organization permissions,user permissions |
@@ -24,7 +23,7 @@ Columns to display in table output.
### -o, --output
| | |
-| ------- | ------------------------ |
+|---------|--------------------------|
| Type | table\|json |
| Default | table |
diff --git a/docs/reference/cli/organizations_settings.md b/docs/reference/cli/organizations_settings.md
index 15093c984fedc..76a84135edb07 100644
--- a/docs/reference/cli/organizations_settings.md
+++ b/docs/reference/cli/organizations_settings.md
@@ -1,12 +1,11 @@
-
# organizations settings
Manage organization settings.
Aliases:
-- setting
+* setting
## Usage
@@ -17,6 +16,6 @@ coder organizations settings
## Subcommands
| Name | Purpose |
-| ----------------------------------------------------- | --------------------------------------- |
+|-------------------------------------------------------|-----------------------------------------|
| [show](./organizations_settings_show.md) | Outputs specified organization setting. |
| [set](./organizations_settings_set.md) | Update specified organization setting. |
diff --git a/docs/reference/cli/organizations_settings_set.md b/docs/reference/cli/organizations_settings_set.md
index e1e9bf0261a1b..c7d0fd8f138e3 100644
--- a/docs/reference/cli/organizations_settings_set.md
+++ b/docs/reference/cli/organizations_settings_set.md
@@ -1,5 +1,4 @@
-
# organizations settings set
Update specified organization setting.
@@ -21,7 +20,7 @@ coder organizations settings set
## Subcommands
| Name | Purpose |
-| ----------------------------------------------------------------------------------- | ------------------------------------------------------------------------ |
+|-------------------------------------------------------------------------------------|--------------------------------------------------------------------------|
| [group-sync](./organizations_settings_set_group-sync.md) | Group sync settings to sync groups from an IdP. |
| [role-sync](./organizations_settings_set_role-sync.md) | Role sync settings to sync organization roles from an IdP. |
| [organization-sync](./organizations_settings_set_organization-sync.md) | Organization sync settings to sync organization memberships from an IdP. |
diff --git a/docs/reference/cli/organizations_settings_set_group-sync.md b/docs/reference/cli/organizations_settings_set_group-sync.md
index f60a456771763..ceefa22a523c2 100644
--- a/docs/reference/cli/organizations_settings_set_group-sync.md
+++ b/docs/reference/cli/organizations_settings_set_group-sync.md
@@ -1,12 +1,11 @@
-
# organizations settings set group-sync
Group sync settings to sync groups from an IdP.
Aliases:
-- groupsync
+* groupsync
## Usage
diff --git a/docs/reference/cli/organizations_settings_set_organization-sync.md b/docs/reference/cli/organizations_settings_set_organization-sync.md
index 6b6557e2c3358..8580c6cef3767 100644
--- a/docs/reference/cli/organizations_settings_set_organization-sync.md
+++ b/docs/reference/cli/organizations_settings_set_organization-sync.md
@@ -1,14 +1,13 @@
-
# organizations settings set organization-sync
Organization sync settings to sync organization memberships from an IdP.
Aliases:
-- organizationsync
-- org-sync
-- orgsync
+* organizationsync
+* org-sync
+* orgsync
## Usage
diff --git a/docs/reference/cli/organizations_settings_set_role-sync.md b/docs/reference/cli/organizations_settings_set_role-sync.md
index 40203b21f752e..01d46319f54a9 100644
--- a/docs/reference/cli/organizations_settings_set_role-sync.md
+++ b/docs/reference/cli/organizations_settings_set_role-sync.md
@@ -1,12 +1,11 @@
-
# organizations settings set role-sync
Role sync settings to sync organization roles from an IdP.
Aliases:
-- rolesync
+* rolesync
## Usage
diff --git a/docs/reference/cli/organizations_settings_show.md b/docs/reference/cli/organizations_settings_show.md
index feaef7d0124f9..90dc642745707 100644
--- a/docs/reference/cli/organizations_settings_show.md
+++ b/docs/reference/cli/organizations_settings_show.md
@@ -1,5 +1,4 @@
-
# organizations settings show
Outputs specified organization setting.
@@ -21,7 +20,7 @@ coder organizations settings show
## Subcommands
| Name | Purpose |
-| ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------ |
+|--------------------------------------------------------------------------------------|--------------------------------------------------------------------------|
| [group-sync](./organizations_settings_show_group-sync.md) | Group sync settings to sync groups from an IdP. |
| [role-sync](./organizations_settings_show_role-sync.md) | Role sync settings to sync organization roles from an IdP. |
| [organization-sync](./organizations_settings_show_organization-sync.md) | Organization sync settings to sync organization memberships from an IdP. |
diff --git a/docs/reference/cli/organizations_settings_show_group-sync.md b/docs/reference/cli/organizations_settings_show_group-sync.md
index 6ae796d117e61..75a4398f88bce 100644
--- a/docs/reference/cli/organizations_settings_show_group-sync.md
+++ b/docs/reference/cli/organizations_settings_show_group-sync.md
@@ -1,12 +1,11 @@
-
# organizations settings show group-sync
Group sync settings to sync groups from an IdP.
Aliases:
-- groupsync
+* groupsync
## Usage
diff --git a/docs/reference/cli/organizations_settings_show_organization-sync.md b/docs/reference/cli/organizations_settings_show_organization-sync.md
index 7e2e025c2a4af..2054aa29b4cdb 100644
--- a/docs/reference/cli/organizations_settings_show_organization-sync.md
+++ b/docs/reference/cli/organizations_settings_show_organization-sync.md
@@ -1,14 +1,13 @@
-
# organizations settings show organization-sync
Organization sync settings to sync organization memberships from an IdP.
Aliases:
-- organizationsync
-- org-sync
-- orgsync
+* organizationsync
+* org-sync
+* orgsync
## Usage
diff --git a/docs/reference/cli/organizations_settings_show_role-sync.md b/docs/reference/cli/organizations_settings_show_role-sync.md
index 8a32c138517d1..6fe2fd40a951c 100644
--- a/docs/reference/cli/organizations_settings_show_role-sync.md
+++ b/docs/reference/cli/organizations_settings_show_role-sync.md
@@ -1,12 +1,11 @@
-
# organizations settings show role-sync
Role sync settings to sync organization roles from an IdP.
Aliases:
-- rolesync
+* rolesync
## Usage
diff --git a/docs/reference/cli/organizations_show.md b/docs/reference/cli/organizations_show.md
index 0cd111e9da0eb..540014b46802d 100644
--- a/docs/reference/cli/organizations_show.md
+++ b/docs/reference/cli/organizations_show.md
@@ -1,5 +1,4 @@
-
# organizations show
Show the organization. Using "selected" will show the selected organization from the "--org" flag. Using "me" will show all organizations you are a member of.
@@ -35,7 +34,7 @@ coder organizations show [flags] ["selected"|"me"|uuid|org_name]
### --only-id
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Only print the organization ID.
@@ -43,7 +42,7 @@ Only print the organization ID.
### -c, --column
| | |
-| ------- | ----------------------------------------------------------------------------------------- |
+|---------|-------------------------------------------------------------------------------------------|
| Type | [id\|name\|display name\|icon\|description\|created at\|updated at\|default] |
| Default | id,name,default |
@@ -52,7 +51,7 @@ Columns to display in table output.
### -o, --output
| | |
-| ------- | ------------------------------ |
+|---------|--------------------------------|
| Type | text\|table\|json |
| Default | text |
diff --git a/docs/reference/cli/ping.md b/docs/reference/cli/ping.md
index c8d63addcf8d7..8fbc1eaf36e8e 100644
--- a/docs/reference/cli/ping.md
+++ b/docs/reference/cli/ping.md
@@ -1,5 +1,4 @@
-
# ping
Ping a workspace
@@ -15,7 +14,7 @@ coder ping [flags]
### --wait
| | |
-| ------- | --------------------- |
+|---------|-----------------------|
| Type | duration |
| Default | 1s |
@@ -24,7 +23,7 @@ Specifies how long to wait between pings.
### -t, --timeout
| | |
-| ------- | --------------------- |
+|---------|-----------------------|
| Type | duration |
| Default | 5s |
@@ -33,7 +32,7 @@ Specifies how long to wait for a ping to complete.
### -n, --num
| | |
-| ---- | ---------------- |
+|------|------------------|
| Type | int |
Specifies the number of pings to perform. By default, pings will continue until interrupted.
diff --git a/docs/reference/cli/port-forward.md b/docs/reference/cli/port-forward.md
index f279e2125d93b..976b830fca360 100644
--- a/docs/reference/cli/port-forward.md
+++ b/docs/reference/cli/port-forward.md
@@ -1,12 +1,11 @@
-
# port-forward
Forward ports from a workspace to the local machine. For reverse port forwarding, use "coder ssh -R".
Aliases:
-- tunnel
+* tunnel
## Usage
@@ -45,7 +44,7 @@ machine:
### -p, --tcp
| | |
-| ----------- | ------------------------------------ |
+|-------------|--------------------------------------|
| Type | string-array |
| Environment | $CODER_PORT_FORWARD_TCP |
@@ -54,7 +53,7 @@ Forward TCP port(s) from the workspace to the local machine.
### --udp
| | |
-| ----------- | ------------------------------------ |
+|-------------|--------------------------------------|
| Type | string-array |
| Environment | $CODER_PORT_FORWARD_UDP |
@@ -63,7 +62,7 @@ Forward UDP port(s) from the workspace to the local machine. The UDP connection
### --disable-autostart
| | |
-| ----------- | ----------------------------------------- |
+|-------------|-------------------------------------------|
| Type | bool |
| Environment | $CODER_SSH_DISABLE_AUTOSTART |
| Default | false |
diff --git a/docs/reference/cli/provisioner.md b/docs/reference/cli/provisioner.md
index 54cc28a84bea4..08f4918ec1cf0 100644
--- a/docs/reference/cli/provisioner.md
+++ b/docs/reference/cli/provisioner.md
@@ -1,12 +1,11 @@
-
# provisioner
Manage provisioner daemons
Aliases:
-- provisioners
+* provisioners
## Usage
@@ -17,6 +16,6 @@ coder provisioner
## Subcommands
| Name | Purpose |
-| -------------------------------------------- | ------------------------ |
+|----------------------------------------------|--------------------------|
| [start](./provisioner_start.md) | Run a provisioner daemon |
| [keys](./provisioner_keys.md) | Manage provisioner keys |
diff --git a/docs/reference/cli/provisioner_keys.md b/docs/reference/cli/provisioner_keys.md
index 014af6f117c3a..80cfd8f0a31b8 100644
--- a/docs/reference/cli/provisioner_keys.md
+++ b/docs/reference/cli/provisioner_keys.md
@@ -1,12 +1,11 @@
-
# provisioner keys
Manage provisioner keys
Aliases:
-- key
+* key
## Usage
@@ -17,7 +16,7 @@ coder provisioner keys
## Subcommands
| Name | Purpose |
-| --------------------------------------------------- | ---------------------------------------- |
+|-----------------------------------------------------|------------------------------------------|
| [create](./provisioner_keys_create.md) | Create a new provisioner key |
| [list](./provisioner_keys_list.md) | List provisioner keys in an organization |
| [delete](./provisioner_keys_delete.md) | Delete a provisioner key |
diff --git a/docs/reference/cli/provisioner_keys_create.md b/docs/reference/cli/provisioner_keys_create.md
index da6479d15bfc9..737ba187c9c27 100644
--- a/docs/reference/cli/provisioner_keys_create.md
+++ b/docs/reference/cli/provisioner_keys_create.md
@@ -1,5 +1,4 @@
-
# provisioner keys create
Create a new provisioner key
@@ -15,7 +14,7 @@ coder provisioner keys create [flags]
### -t, --tag
| | |
-| ----------- | ------------------------------------- |
+|-------------|---------------------------------------|
| Type | string-array |
| Environment | $CODER_PROVISIONERD_TAGS |
@@ -24,7 +23,7 @@ Tags to filter provisioner jobs by.
### -O, --org
| | |
-| ----------- | -------------------------------- |
+|-------------|----------------------------------|
| Type | string |
| Environment | $CODER_ORGANIZATION |
diff --git a/docs/reference/cli/provisioner_keys_delete.md b/docs/reference/cli/provisioner_keys_delete.md
index 56e32e57d048b..4303491106716 100644
--- a/docs/reference/cli/provisioner_keys_delete.md
+++ b/docs/reference/cli/provisioner_keys_delete.md
@@ -1,12 +1,11 @@
-
# provisioner keys delete
Delete a provisioner key
Aliases:
-- rm
+* rm
## Usage
@@ -19,7 +18,7 @@ coder provisioner keys delete [flags]
### -y, --yes
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Bypass prompts.
@@ -27,7 +26,7 @@ Bypass prompts.
### -O, --org
| | |
-| ----------- | -------------------------------- |
+|-------------|----------------------------------|
| Type | string |
| Environment | $CODER_ORGANIZATION |
diff --git a/docs/reference/cli/provisioner_keys_list.md b/docs/reference/cli/provisioner_keys_list.md
index 366db05fa490f..61e00dde759a9 100644
--- a/docs/reference/cli/provisioner_keys_list.md
+++ b/docs/reference/cli/provisioner_keys_list.md
@@ -1,12 +1,11 @@
-
# provisioner keys list
List provisioner keys in an organization
Aliases:
-- ls
+* ls
## Usage
@@ -19,7 +18,7 @@ coder provisioner keys list [flags]
### -O, --org
| | |
-| ----------- | -------------------------------- |
+|-------------|----------------------------------|
| Type | string |
| Environment | $CODER_ORGANIZATION |
diff --git a/docs/reference/cli/provisioner_start.md b/docs/reference/cli/provisioner_start.md
index 65254d18c0149..2a3c88ff93139 100644
--- a/docs/reference/cli/provisioner_start.md
+++ b/docs/reference/cli/provisioner_start.md
@@ -1,5 +1,4 @@
-
# provisioner start
Run a provisioner daemon
@@ -15,7 +14,7 @@ coder provisioner start [flags]
### -c, --cache-dir
| | |
-| ----------- | ----------------------------------- |
+|-------------|-------------------------------------|
| Type | string |
| Environment | $CODER_CACHE_DIRECTORY |
| Default | ~/.cache/coder |
@@ -25,7 +24,7 @@ Directory to store cached data.
### -t, --tag
| | |
-| ----------- | ------------------------------------- |
+|-------------|---------------------------------------|
| Type | string-array |
| Environment | $CODER_PROVISIONERD_TAGS |
@@ -34,7 +33,7 @@ Tags to filter provisioner jobs by.
### --poll-interval
| | |
-| ----------- | ---------------------------------------------- |
+|-------------|------------------------------------------------|
| Type | duration |
| Environment | $CODER_PROVISIONERD_POLL_INTERVAL |
| Default | 1s |
@@ -44,7 +43,7 @@ Deprecated and ignored.
### --poll-jitter
| | |
-| ----------- | -------------------------------------------- |
+|-------------|----------------------------------------------|
| Type | duration |
| Environment | $CODER_PROVISIONERD_POLL_JITTER |
| Default | 100ms |
@@ -54,7 +53,7 @@ Deprecated and ignored.
### --psk
| | |
-| ----------- | ------------------------------------------ |
+|-------------|--------------------------------------------|
| Type | string |
| Environment | $CODER_PROVISIONER_DAEMON_PSK |
@@ -63,7 +62,7 @@ Pre-shared key to authenticate with Coder server.
### --key
| | |
-| ----------- | ------------------------------------------ |
+|-------------|--------------------------------------------|
| Type | string |
| Environment | $CODER_PROVISIONER_DAEMON_KEY |
@@ -72,7 +71,7 @@ Provisioner key to authenticate with Coder server.
### --name
| | |
-| ----------- | ------------------------------------------- |
+|-------------|---------------------------------------------|
| Type | string |
| Environment | $CODER_PROVISIONER_DAEMON_NAME |
@@ -81,7 +80,7 @@ Name of this provisioner daemon. Defaults to the current hostname without FQDN.
### --verbose
| | |
-| ----------- | ---------------------------------------------- |
+|-------------|------------------------------------------------|
| Type | bool |
| Environment | $CODER_PROVISIONER_DAEMON_VERBOSE |
| Default | false |
@@ -91,7 +90,7 @@ Output debug-level logs.
### --log-human
| | |
-| ----------- | ---------------------------------------------------- |
+|-------------|------------------------------------------------------|
| Type | string |
| Environment | $CODER_PROVISIONER_DAEMON_LOGGING_HUMAN |
| Default | /dev/stderr |
@@ -101,7 +100,7 @@ Output human-readable logs to a given file.
### --log-json
| | |
-| ----------- | --------------------------------------------------- |
+|-------------|-----------------------------------------------------|
| Type | string |
| Environment | $CODER_PROVISIONER_DAEMON_LOGGING_JSON |
@@ -110,7 +109,7 @@ Output JSON logs to a given file.
### --log-stackdriver
| | |
-| ----------- | ---------------------------------------------------------- |
+|-------------|------------------------------------------------------------|
| Type | string |
| Environment | $CODER_PROVISIONER_DAEMON_LOGGING_STACKDRIVER |
@@ -119,16 +118,16 @@ Output Stackdriver compatible logs to a given file.
### --log-filter
| | |
-| ----------- | ------------------------------------------------- |
+|-------------|---------------------------------------------------|
| Type | string-array |
| Environment | $CODER_PROVISIONER_DAEMON_LOG_FILTER |
-Filter debug logs by matching against a given regex. Use .\* to match all debug logs.
+Filter debug logs by matching against a given regex. Use .* to match all debug logs.
### --prometheus-enable
| | |
-| ----------- | ------------------------------------- |
+|-------------|---------------------------------------|
| Type | bool |
| Environment | $CODER_PROMETHEUS_ENABLE |
| Default | false |
@@ -138,7 +137,7 @@ Serve prometheus metrics on the address defined by prometheus address.
### --prometheus-address
| | |
-| ----------- | -------------------------------------- |
+|-------------|----------------------------------------|
| Type | string |
| Environment | $CODER_PROMETHEUS_ADDRESS |
| Default | 127.0.0.1:2112 |
@@ -148,7 +147,7 @@ The bind address to serve prometheus metrics.
### -O, --org
| | |
-| ----------- | -------------------------------- |
+|-------------|----------------------------------|
| Type | string |
| Environment | $CODER_ORGANIZATION |
diff --git a/docs/reference/cli/publickey.md b/docs/reference/cli/publickey.md
index 63e19e7e54423..ec68d813b137b 100644
--- a/docs/reference/cli/publickey.md
+++ b/docs/reference/cli/publickey.md
@@ -1,12 +1,11 @@
-
# publickey
Output your Coder public key used for Git operations
Aliases:
-- pubkey
+* pubkey
## Usage
@@ -19,7 +18,7 @@ coder publickey [flags]
### --reset
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Regenerate your public key. This will require updating the key on any services it's registered with.
@@ -27,7 +26,7 @@ Regenerate your public key. This will require updating the key on any services i
### -y, --yes
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Bypass prompts.
diff --git a/docs/reference/cli/rename.md b/docs/reference/cli/rename.md
index 5cb9242beba38..511ccc60f8d3b 100644
--- a/docs/reference/cli/rename.md
+++ b/docs/reference/cli/rename.md
@@ -1,5 +1,4 @@
-
# rename
Rename a workspace
@@ -15,7 +14,7 @@ coder rename [flags]
### -y, --yes
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Bypass prompts.
diff --git a/docs/reference/cli/reset-password.md b/docs/reference/cli/reset-password.md
index 2d63226f02d26..ada9ad7e7db3e 100644
--- a/docs/reference/cli/reset-password.md
+++ b/docs/reference/cli/reset-password.md
@@ -1,5 +1,4 @@
-
# reset-password
Directly connect to the database to reset a user's password
@@ -15,8 +14,18 @@ coder reset-password [flags]
### --postgres-url
| | |
-| ----------- | ------------------------------------- |
+|-------------|---------------------------------------|
| Type | string |
| Environment | $CODER_PG_CONNECTION_URL |
URL of a PostgreSQL database to connect to.
+
+### --postgres-connection-auth
+
+| | |
+|-------------|----------------------------------------|
+| Type | password\|awsiamrds |
+| Environment | $CODER_PG_CONNECTION_AUTH |
+| Default | password |
+
+Type of auth to use when connecting to postgres.
diff --git a/docs/reference/cli/restart.md b/docs/reference/cli/restart.md
index 3b06efb6e4855..1c30e3e1fffaa 100644
--- a/docs/reference/cli/restart.md
+++ b/docs/reference/cli/restart.md
@@ -1,5 +1,4 @@
-
# restart
Restart a workspace
@@ -15,7 +14,7 @@ coder restart [flags]
### -y, --yes
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Bypass prompts.
@@ -23,7 +22,7 @@ Bypass prompts.
### --build-option
| | |
-| ----------- | -------------------------------- |
+|-------------|----------------------------------|
| Type | string-array |
| Environment | $CODER_BUILD_OPTION |
@@ -32,7 +31,7 @@ Build option value in the format "name=value".
### --build-options
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Prompt for one-time build options defined with ephemeral parameters.
@@ -40,7 +39,7 @@ Prompt for one-time build options defined with ephemeral parameters.
### --ephemeral-parameter
| | |
-| ----------- | --------------------------------------- |
+|-------------|-----------------------------------------|
| Type | string-array |
| Environment | $CODER_EPHEMERAL_PARAMETER |
@@ -49,7 +48,7 @@ Set the value of ephemeral parameters defined in the template. The format is "na
### --prompt-ephemeral-parameters
| | |
-| ----------- | ----------------------------------------------- |
+|-------------|-------------------------------------------------|
| Type | bool |
| Environment | $CODER_PROMPT_EPHEMERAL_PARAMETERS |
@@ -58,7 +57,7 @@ Prompt to set values of ephemeral parameters defined in the template. If a value
### --parameter
| | |
-| ----------- | ---------------------------------- |
+|-------------|------------------------------------|
| Type | string-array |
| Environment | $CODER_RICH_PARAMETER |
@@ -67,7 +66,7 @@ Rich parameter value in the format "name=value".
### --rich-parameter-file
| | |
-| ----------- | --------------------------------------- |
+|-------------|-----------------------------------------|
| Type | string |
| Environment | $CODER_RICH_PARAMETER_FILE |
@@ -76,7 +75,7 @@ Specify a file path with values for rich parameters defined in the template. The
### --parameter-default
| | |
-| ----------- | ------------------------------------------ |
+|-------------|--------------------------------------------|
| Type | string-array |
| Environment | $CODER_RICH_PARAMETER_DEFAULT |
@@ -85,7 +84,7 @@ Rich parameter default values in the format "name=value".
### --always-prompt
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Always prompt all parameters. Does not pull parameter values from existing workspace.
diff --git a/docs/reference/cli/schedule.md b/docs/reference/cli/schedule.md
index 4319f80438af3..c25bd4bf60036 100644
--- a/docs/reference/cli/schedule.md
+++ b/docs/reference/cli/schedule.md
@@ -1,5 +1,4 @@
-
# schedule
Schedule automated start and stop times for workspaces
@@ -13,7 +12,7 @@ coder schedule { show | start | stop | extend }
## Subcommands
| Name | Purpose |
-| ------------------------------------------- | --------------------------------------------------------------- |
+|---------------------------------------------|-----------------------------------------------------------------|
| [show](./schedule_show.md) | Show workspace schedules |
| [start](./schedule_start.md) | Edit workspace start schedule |
| [stop](./schedule_stop.md) | Edit workspace stop schedule |
diff --git a/docs/reference/cli/schedule_extend.md b/docs/reference/cli/schedule_extend.md
index 06ba46d358da7..e4b696ad5c4a7 100644
--- a/docs/reference/cli/schedule_extend.md
+++ b/docs/reference/cli/schedule_extend.md
@@ -1,12 +1,11 @@
-
# schedule extend
Extend the stop time of a currently running workspace instance.
Aliases:
-- override-stop
+* override-stop
## Usage
diff --git a/docs/reference/cli/schedule_show.md b/docs/reference/cli/schedule_show.md
index a9f848a242fda..65d858c1fbe38 100644
--- a/docs/reference/cli/schedule_show.md
+++ b/docs/reference/cli/schedule_show.md
@@ -1,5 +1,4 @@
-
# schedule show
Show workspace schedules
@@ -26,7 +25,7 @@ Shows the following information for the given workspace(s):
### -a, --all
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Specifies whether all workspaces will be listed or not.
@@ -34,7 +33,7 @@ Specifies whether all workspaces will be listed or not.
### --search
| | |
-| ------- | --------------------- |
+|---------|-----------------------|
| Type | string |
| Default | owner:me |
@@ -43,7 +42,7 @@ Search for a workspace with a query.
### -c, --column
| | |
-| ------- | ------------------------------------------------------------------------- |
+|---------|---------------------------------------------------------------------------|
| Type | [workspace\|starts at\|starts next\|stops after\|stops next] |
| Default | workspace,starts at,starts next,stops after,stops next |
@@ -52,7 +51,7 @@ Columns to display in table output.
### -o, --output
| | |
-| ------- | ------------------------ |
+|---------|--------------------------|
| Type | table\|json |
| Default | table |
diff --git a/docs/reference/cli/schedule_start.md b/docs/reference/cli/schedule_start.md
index 771bb995e65b0..886e5edf1adaf 100644
--- a/docs/reference/cli/schedule_start.md
+++ b/docs/reference/cli/schedule_start.md
@@ -1,5 +1,4 @@
-
# schedule start
Edit workspace start schedule
diff --git a/docs/reference/cli/schedule_stop.md b/docs/reference/cli/schedule_stop.md
index 399bc69cd5fc9..a832c9c919573 100644
--- a/docs/reference/cli/schedule_stop.md
+++ b/docs/reference/cli/schedule_stop.md
@@ -1,5 +1,4 @@
-
# schedule stop
Edit workspace stop schedule
diff --git a/docs/reference/cli/server.md b/docs/reference/cli/server.md
index aa9b0cc0a3196..98cb2a90c20da 100644
--- a/docs/reference/cli/server.md
+++ b/docs/reference/cli/server.md
@@ -1,5 +1,4 @@
-
# server
Start a Coder server
@@ -13,7 +12,7 @@ coder server [flags]
## Subcommands
| Name | Purpose |
-| ------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------ |
+|---------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------|
| [create-admin-user](./server_create-admin-user.md) | Create a new admin user with the given username, email and password and adds it to every organization. |
| [postgres-builtin-url](./server_postgres-builtin-url.md) | Output the connection URL for the built-in PostgreSQL deployment. |
| [postgres-builtin-serve](./server_postgres-builtin-serve.md) | Run the built-in PostgreSQL deployment. |
@@ -24,7 +23,7 @@ coder server [flags]
### --access-url
| | |
-| ----------- | --------------------------------- |
+|-------------|-----------------------------------|
| Type | url |
| Environment | $CODER_ACCESS_URL |
| YAML | networking.accessURL |
@@ -34,17 +33,17 @@ The URL that users will use to access the Coder deployment.
### --wildcard-access-url
| | |
-| ----------- | ----------------------------------------- |
+|-------------|-------------------------------------------|
| Type | string |
| Environment | $CODER_WILDCARD_ACCESS_URL |
| YAML | networking.wildcardAccessURL |
-Specifies the wildcard hostname to use for workspace applications in the form "\*.example.com".
+Specifies the wildcard hostname to use for workspace applications in the form "*.example.com".
### --docs-url
| | |
-| ----------- | ----------------------------------- |
+|-------------|-------------------------------------|
| Type | url |
| Environment | $CODER_DOCS_URL |
| YAML | networking.docsURL |
@@ -55,7 +54,7 @@ Specifies the custom docs URL.
### --redirect-to-access-url
| | |
-| ----------- | ------------------------------------------- |
+|-------------|---------------------------------------------|
| Type | bool |
| Environment | $CODER_REDIRECT_TO_ACCESS_URL |
| YAML | networking.redirectToAccessURL |
@@ -65,7 +64,7 @@ Specifies whether to redirect requests that do not match the access URL host.
### --http-address
| | |
-| ----------- | ---------------------------------------- |
+|-------------|------------------------------------------|
| Type | string |
| Environment | $CODER_HTTP_ADDRESS |
| YAML | networking.http.httpAddress |
@@ -76,7 +75,7 @@ HTTP bind address of the server. Unset to disable the HTTP endpoint.
### --tls-address
| | |
-| ----------- | ----------------------------------- |
+|-------------|-------------------------------------|
| Type | host:port |
| Environment | $CODER_TLS_ADDRESS |
| YAML | networking.tls.address |
@@ -87,7 +86,7 @@ HTTPS bind address of the server.
### --tls-enable
| | |
-| ----------- | ---------------------------------- |
+|-------------|------------------------------------|
| Type | bool |
| Environment | $CODER_TLS_ENABLE |
| YAML | networking.tls.enable |
@@ -97,7 +96,7 @@ Whether TLS will be enabled.
### --tls-cert-file
| | |
-| ----------- | ------------------------------------- |
+|-------------|---------------------------------------|
| Type | string-array |
| Environment | $CODER_TLS_CERT_FILE |
| YAML | networking.tls.certFiles |
@@ -107,7 +106,7 @@ Path to each certificate for TLS. It requires a PEM-encoded file. To configure t
### --tls-client-ca-file
| | |
-| ----------- | ---------------------------------------- |
+|-------------|------------------------------------------|
| Type | string |
| Environment | $CODER_TLS_CLIENT_CA_FILE |
| YAML | networking.tls.clientCAFile |
@@ -117,7 +116,7 @@ PEM-encoded Certificate Authority file used for checking the authenticity of cli
### --tls-client-auth
| | |
-| ----------- | -------------------------------------- |
+|-------------|----------------------------------------|
| Type | string |
| Environment | $CODER_TLS_CLIENT_AUTH |
| YAML | networking.tls.clientAuth |
@@ -128,7 +127,7 @@ Policy the server will follow for TLS Client Authentication. Accepted values are
### --tls-key-file
| | |
-| ----------- | ------------------------------------ |
+|-------------|--------------------------------------|
| Type | string-array |
| Environment | $CODER_TLS_KEY_FILE |
| YAML | networking.tls.keyFiles |
@@ -138,7 +137,7 @@ Paths to the private keys for each of the certificates. It requires a PEM-encode
### --tls-min-version
| | |
-| ----------- | -------------------------------------- |
+|-------------|----------------------------------------|
| Type | string |
| Environment | $CODER_TLS_MIN_VERSION |
| YAML | networking.tls.minVersion |
@@ -149,7 +148,7 @@ Minimum supported version of TLS. Accepted values are "tls10", "tls11", "tls12"
### --tls-client-cert-file
| | |
-| ----------- | ------------------------------------------ |
+|-------------|--------------------------------------------|
| Type | string |
| Environment | $CODER_TLS_CLIENT_CERT_FILE |
| YAML | networking.tls.clientCertFile |
@@ -159,7 +158,7 @@ Path to certificate for client TLS authentication. It requires a PEM-encoded fil
### --tls-client-key-file
| | |
-| ----------- | ----------------------------------------- |
+|-------------|-------------------------------------------|
| Type | string |
| Environment | $CODER_TLS_CLIENT_KEY_FILE |
| YAML | networking.tls.clientKeyFile |
@@ -169,7 +168,7 @@ Path to key for client TLS authentication. It requires a PEM-encoded file.
### --tls-ciphers
| | |
-| ----------- | -------------------------------------- |
+|-------------|----------------------------------------|
| Type | string-array |
| Environment | $CODER_TLS_CIPHERS |
| YAML | networking.tls.tlsCiphers |
@@ -179,7 +178,7 @@ Specify specific TLS ciphers that allowed to be used. See https://github.com/gol
### --tls-allow-insecure-ciphers
| | |
-| ----------- | --------------------------------------------------- |
+|-------------|-----------------------------------------------------|
| Type | bool |
| Environment | $CODER_TLS_ALLOW_INSECURE_CIPHERS |
| YAML | networking.tls.tlsAllowInsecureCiphers |
@@ -190,7 +189,7 @@ By default, only ciphers marked as 'secure' are allowed to be used. See https://
### --derp-server-enable
| | |
-| ----------- | -------------------------------------- |
+|-------------|----------------------------------------|
| Type | bool |
| Environment | $CODER_DERP_SERVER_ENABLE |
| YAML | networking.derp.enable |
@@ -201,7 +200,7 @@ Whether to enable or disable the embedded DERP relay server.
### --derp-server-region-name
| | |
-| ----------- | ------------------------------------------- |
+|-------------|---------------------------------------------|
| Type | string |
| Environment | $CODER_DERP_SERVER_REGION_NAME |
| YAML | networking.derp.regionName |
@@ -212,7 +211,7 @@ Region name that for the embedded DERP server.
### --derp-server-stun-addresses
| | |
-| ----------- | ---------------------------------------------------------------------------------------------------------------------------------------- |
+|-------------|------------------------------------------------------------------------------------------------------------------------------------------|
| Type | string-array |
| Environment | $CODER_DERP_SERVER_STUN_ADDRESSES |
| YAML | networking.derp.stunAddresses |
@@ -223,7 +222,7 @@ Addresses for STUN servers to establish P2P connections. It's recommended to hav
### --derp-server-relay-url
| | |
-| ----------- | ----------------------------------------- |
+|-------------|-------------------------------------------|
| Type | url |
| Environment | $CODER_DERP_SERVER_RELAY_URL |
| YAML | networking.derp.relayURL |
@@ -233,7 +232,7 @@ An HTTP URL that is accessible by other replicas to relay DERP traffic. Required
### --block-direct-connections
| | |
-| ----------- | ---------------------------------------- |
+|-------------|------------------------------------------|
| Type | bool |
| Environment | $CODER_BLOCK_DIRECT |
| YAML | networking.derp.blockDirect |
@@ -243,7 +242,7 @@ Block peer-to-peer (aka. direct) workspace connections. All workspace connection
### --derp-force-websockets
| | |
-| ----------- | -------------------------------------------- |
+|-------------|----------------------------------------------|
| Type | bool |
| Environment | $CODER_DERP_FORCE_WEBSOCKETS |
| YAML | networking.derp.forceWebSockets |
@@ -253,7 +252,7 @@ Force clients and agents to always use WebSocket to connect to DERP relay server
### --derp-config-url
| | |
-| ----------- | ----------------------------------- |
+|-------------|-------------------------------------|
| Type | string |
| Environment | $CODER_DERP_CONFIG_URL |
| YAML | networking.derp.url |
@@ -263,7 +262,7 @@ URL to fetch a DERP mapping on startup. See: https://tailscale.com/kb/1118/custo
### --derp-config-path
| | |
-| ----------- | --------------------------------------- |
+|-------------|-----------------------------------------|
| Type | string |
| Environment | $CODER_DERP_CONFIG_PATH |
| YAML | networking.derp.configPath |
@@ -273,7 +272,7 @@ Path to read a DERP mapping from. See: https://tailscale.com/kb/1118/custom-derp
### --prometheus-enable
| | |
-| ----------- | -------------------------------------------- |
+|-------------|----------------------------------------------|
| Type | bool |
| Environment | $CODER_PROMETHEUS_ENABLE |
| YAML | introspection.prometheus.enable |
@@ -283,7 +282,7 @@ Serve prometheus metrics on the address defined by prometheus address.
### --prometheus-address
| | |
-| ----------- | --------------------------------------------- |
+|-------------|-----------------------------------------------|
| Type | host:port |
| Environment | $CODER_PROMETHEUS_ADDRESS |
| YAML | introspection.prometheus.address |
@@ -294,7 +293,7 @@ The bind address to serve prometheus metrics.
### --prometheus-collect-agent-stats
| | |
-| ----------- | --------------------------------------------------------- |
+|-------------|-----------------------------------------------------------|
| Type | bool |
| Environment | $CODER_PROMETHEUS_COLLECT_AGENT_STATS |
| YAML | introspection.prometheus.collect_agent_stats |
@@ -304,7 +303,7 @@ Collect agent stats (may increase charges for metrics storage).
### --prometheus-aggregate-agent-stats-by
| | |
-| ----------- | -------------------------------------------------------------- |
+|-------------|----------------------------------------------------------------|
| Type | string-array |
| Environment | $CODER_PROMETHEUS_AGGREGATE_AGENT_STATS_BY |
| YAML | introspection.prometheus.aggregate_agent_stats_by |
@@ -315,7 +314,7 @@ When collecting agent stats, aggregate metrics by a given set of comma-separated
### --prometheus-collect-db-metrics
| | |
-| ----------- | -------------------------------------------------------- |
+|-------------|----------------------------------------------------------|
| Type | bool |
| Environment | $CODER_PROMETHEUS_COLLECT_DB_METRICS |
| YAML | introspection.prometheus.collect_db_metrics |
@@ -326,7 +325,7 @@ Collect database query metrics (may increase charges for metrics storage). If se
### --pprof-enable
| | |
-| ----------- | --------------------------------------- |
+|-------------|-----------------------------------------|
| Type | bool |
| Environment | $CODER_PPROF_ENABLE |
| YAML | introspection.pprof.enable |
@@ -336,7 +335,7 @@ Serve pprof metrics on the address defined by pprof address.
### --pprof-address
| | |
-| ----------- | ---------------------------------------- |
+|-------------|------------------------------------------|
| Type | host:port |
| Environment | $CODER_PPROF_ADDRESS |
| YAML | introspection.pprof.address |
@@ -347,7 +346,7 @@ The bind address to serve pprof.
### --oauth2-github-client-id
| | |
-| ----------- | ------------------------------------------- |
+|-------------|---------------------------------------------|
| Type | string |
| Environment | $CODER_OAUTH2_GITHUB_CLIENT_ID |
| YAML | oauth2.github.clientID |
@@ -357,7 +356,7 @@ Client ID for Login with GitHub.
### --oauth2-github-client-secret
| | |
-| ----------- | ----------------------------------------------- |
+|-------------|-------------------------------------------------|
| Type | string |
| Environment | $CODER_OAUTH2_GITHUB_CLIENT_SECRET |
@@ -366,7 +365,7 @@ Client secret for Login with GitHub.
### --oauth2-github-allowed-orgs
| | |
-| ----------- | ---------------------------------------------- |
+|-------------|------------------------------------------------|
| Type | string-array |
| Environment | $CODER_OAUTH2_GITHUB_ALLOWED_ORGS |
| YAML | oauth2.github.allowedOrgs |
@@ -376,7 +375,7 @@ Organizations the user must be a member of to Login with GitHub.
### --oauth2-github-allowed-teams
| | |
-| ----------- | ----------------------------------------------- |
+|-------------|-------------------------------------------------|
| Type | string-array |
| Environment | $CODER_OAUTH2_GITHUB_ALLOWED_TEAMS |
| YAML | oauth2.github.allowedTeams |
@@ -386,7 +385,7 @@ Teams inside organizations the user must be a member of to Login with GitHub. St
### --oauth2-github-allow-signups
| | |
-| ----------- | ----------------------------------------------- |
+|-------------|-------------------------------------------------|
| Type | bool |
| Environment | $CODER_OAUTH2_GITHUB_ALLOW_SIGNUPS |
| YAML | oauth2.github.allowSignups |
@@ -396,7 +395,7 @@ Whether new users can sign up with GitHub.
### --oauth2-github-allow-everyone
| | |
-| ----------- | ------------------------------------------------ |
+|-------------|--------------------------------------------------|
| Type | bool |
| Environment | $CODER_OAUTH2_GITHUB_ALLOW_EVERYONE |
| YAML | oauth2.github.allowEveryone |
@@ -406,7 +405,7 @@ Allow all logins, setting this option means allowed orgs and teams must be empty
### --oauth2-github-enterprise-base-url
| | |
-| ----------- | ----------------------------------------------------- |
+|-------------|-------------------------------------------------------|
| Type | string |
| Environment | $CODER_OAUTH2_GITHUB_ENTERPRISE_BASE_URL |
| YAML | oauth2.github.enterpriseBaseURL |
@@ -416,7 +415,7 @@ Base URL of a GitHub Enterprise deployment to use for Login with GitHub.
### --oidc-allow-signups
| | |
-| ----------- | -------------------------------------- |
+|-------------|----------------------------------------|
| Type | bool |
| Environment | $CODER_OIDC_ALLOW_SIGNUPS |
| YAML | oidc.allowSignups |
@@ -427,7 +426,7 @@ Whether new users can sign up with OIDC.
### --oidc-client-id
| | |
-| ----------- | ---------------------------------- |
+|-------------|------------------------------------|
| Type | string |
| Environment | $CODER_OIDC_CLIENT_ID |
| YAML | oidc.clientID |
@@ -437,7 +436,7 @@ Client ID to use for Login with OIDC.
### --oidc-client-secret
| | |
-| ----------- | -------------------------------------- |
+|-------------|----------------------------------------|
| Type | string |
| Environment | $CODER_OIDC_CLIENT_SECRET |
@@ -446,7 +445,7 @@ Client secret to use for Login with OIDC.
### --oidc-client-key-file
| | |
-| ----------- | ---------------------------------------- |
+|-------------|------------------------------------------|
| Type | string |
| Environment | $CODER_OIDC_CLIENT_KEY_FILE |
| YAML | oidc.oidcClientKeyFile |
@@ -456,7 +455,7 @@ Pem encoded RSA private key to use for oauth2 PKI/JWT authorization. This can be
### --oidc-client-cert-file
| | |
-| ----------- | ----------------------------------------- |
+|-------------|-------------------------------------------|
| Type | string |
| Environment | $CODER_OIDC_CLIENT_CERT_FILE |
| YAML | oidc.oidcClientCertFile |
@@ -466,7 +465,7 @@ Pem encoded certificate file to use for oauth2 PKI/JWT authorization. The public
### --oidc-email-domain
| | |
-| ----------- | ------------------------------------- |
+|-------------|---------------------------------------|
| Type | string-array |
| Environment | $CODER_OIDC_EMAIL_DOMAIN |
| YAML | oidc.emailDomain |
@@ -476,7 +475,7 @@ Email domains that clients logging in with OIDC must match.
### --oidc-issuer-url
| | |
-| ----------- | ----------------------------------- |
+|-------------|-------------------------------------|
| Type | string |
| Environment | $CODER_OIDC_ISSUER_URL |
| YAML | oidc.issuerURL |
@@ -486,7 +485,7 @@ Issuer URL to use for Login with OIDC.
### --oidc-scopes
| | |
-| ----------- | --------------------------------- |
+|-------------|-----------------------------------|
| Type | string-array |
| Environment | $CODER_OIDC_SCOPES |
| YAML | oidc.scopes |
@@ -497,7 +496,7 @@ Scopes to grant when authenticating with OIDC.
### --oidc-ignore-email-verified
| | |
-| ----------- | ---------------------------------------------- |
+|-------------|------------------------------------------------|
| Type | bool |
| Environment | $CODER_OIDC_IGNORE_EMAIL_VERIFIED |
| YAML | oidc.ignoreEmailVerified |
@@ -507,7 +506,7 @@ Ignore the email_verified claim from the upstream provider.
### --oidc-username-field
| | |
-| ----------- | --------------------------------------- |
+|-------------|-----------------------------------------|
| Type | string |
| Environment | $CODER_OIDC_USERNAME_FIELD |
| YAML | oidc.usernameField |
@@ -518,7 +517,7 @@ OIDC claim field to use as the username.
### --oidc-name-field
| | |
-| ----------- | ----------------------------------- |
+|-------------|-------------------------------------|
| Type | string |
| Environment | $CODER_OIDC_NAME_FIELD |
| YAML | oidc.nameField |
@@ -529,7 +528,7 @@ OIDC claim field to use as the name.
### --oidc-email-field
| | |
-| ----------- | ------------------------------------ |
+|-------------|--------------------------------------|
| Type | string |
| Environment | $CODER_OIDC_EMAIL_FIELD |
| YAML | oidc.emailField |
@@ -540,7 +539,7 @@ OIDC claim field to use as the email.
### --oidc-auth-url-params
| | |
-| ----------- | ---------------------------------------- |
+|-------------|------------------------------------------|
| Type | struct[map[string]string] |
| Environment | $CODER_OIDC_AUTH_URL_PARAMS |
| YAML | oidc.authURLParams |
@@ -551,7 +550,7 @@ OIDC auth URL parameters to pass to the upstream provider.
### --oidc-ignore-userinfo
| | |
-| ----------- | ---------------------------------------- |
+|-------------|------------------------------------------|
| Type | bool |
| Environment | $CODER_OIDC_IGNORE_USERINFO |
| YAML | oidc.ignoreUserInfo |
@@ -562,7 +561,7 @@ Ignore the userinfo endpoint and only use the ID token for user information.
### --oidc-group-field
| | |
-| ----------- | ------------------------------------ |
+|-------------|--------------------------------------|
| Type | string |
| Environment | $CODER_OIDC_GROUP_FIELD |
| YAML | oidc.groupField |
@@ -572,7 +571,7 @@ This field must be set if using the group sync feature and the scope name is not
### --oidc-group-mapping
| | |
-| ----------- | -------------------------------------- |
+|-------------|----------------------------------------|
| Type | struct[map[string]string] |
| Environment | $CODER_OIDC_GROUP_MAPPING |
| YAML | oidc.groupMapping |
@@ -583,7 +582,7 @@ A map of OIDC group IDs and the group in Coder it should map to. This is useful
### --oidc-group-auto-create
| | |
-| ----------- | ------------------------------------------ |
+|-------------|--------------------------------------------|
| Type | bool |
| Environment | $CODER_OIDC_GROUP_AUTO_CREATE |
| YAML | oidc.enableGroupAutoCreate |
@@ -594,18 +593,18 @@ Automatically creates missing groups from a user's groups claim.
### --oidc-group-regex-filter
| | |
-| ----------- | ------------------------------------------- |
+|-------------|---------------------------------------------|
| Type | regexp |
| Environment | $CODER_OIDC_GROUP_REGEX_FILTER |
| YAML | oidc.groupRegexFilter |
-| Default | .\* |
+| Default | .* |
If provided any group name not matching the regex is ignored. This allows for filtering out groups that are not needed. This filter is applied after the group mapping.
### --oidc-allowed-groups
| | |
-| ----------- | --------------------------------------- |
+|-------------|-----------------------------------------|
| Type | string-array |
| Environment | $CODER_OIDC_ALLOWED_GROUPS |
| YAML | oidc.groupAllowed |
@@ -615,7 +614,7 @@ If provided any group name not in the list will not be allowed to authenticate.
### --oidc-user-role-field
| | |
-| ----------- | ---------------------------------------- |
+|-------------|------------------------------------------|
| Type | string |
| Environment | $CODER_OIDC_USER_ROLE_FIELD |
| YAML | oidc.userRoleField |
@@ -625,7 +624,7 @@ This field must be set if using the user roles sync feature. Set this to the nam
### --oidc-user-role-mapping
| | |
-| ----------- | ------------------------------------------ |
+|-------------|--------------------------------------------|
| Type | struct[map[string][]string] |
| Environment | $CODER_OIDC_USER_ROLE_MAPPING |
| YAML | oidc.userRoleMapping |
@@ -636,7 +635,7 @@ A map of the OIDC passed in user roles and the groups in Coder it should map to.
### --oidc-user-role-default
| | |
-| ----------- | ------------------------------------------ |
+|-------------|--------------------------------------------|
| Type | string-array |
| Environment | $CODER_OIDC_USER_ROLE_DEFAULT |
| YAML | oidc.userRoleDefault |
@@ -646,7 +645,7 @@ If user role sync is enabled, these roles are always included for all authentica
### --oidc-sign-in-text
| | |
-| ----------- | ------------------------------------- |
+|-------------|---------------------------------------|
| Type | string |
| Environment | $CODER_OIDC_SIGN_IN_TEXT |
| YAML | oidc.signInText |
@@ -657,7 +656,7 @@ The text to show on the OpenID Connect sign in button.
### --oidc-icon-url
| | |
-| ----------- | --------------------------------- |
+|-------------|-----------------------------------|
| Type | url |
| Environment | $CODER_OIDC_ICON_URL |
| YAML | oidc.iconURL |
@@ -667,7 +666,7 @@ URL pointing to the icon to use on the OpenID Connect login button.
### --oidc-signups-disabled-text
| | |
-| ----------- | ---------------------------------------------- |
+|-------------|------------------------------------------------|
| Type | string |
| Environment | $CODER_OIDC_SIGNUPS_DISABLED_TEXT |
| YAML | oidc.signupsDisabledText |
@@ -677,7 +676,7 @@ The custom text to show on the error page informing about disabled OIDC signups.
### --dangerous-oidc-skip-issuer-checks
| | |
-| ----------- | ----------------------------------------------------- |
+|-------------|-------------------------------------------------------|
| Type | bool |
| Environment | $CODER_DANGEROUS_OIDC_SKIP_ISSUER_CHECKS |
| YAML | oidc.dangerousSkipIssuerChecks |
@@ -687,7 +686,7 @@ OIDC issuer urls must match in the request, the id_token 'iss' claim, and in the
### --telemetry
| | |
-| ----------- | ------------------------------------ |
+|-------------|--------------------------------------|
| Type | bool |
| Environment | $CODER_TELEMETRY_ENABLE |
| YAML | telemetry.enable |
@@ -698,7 +697,7 @@ Whether telemetry is enabled or not. Coder collects anonymized usage data to hel
### --trace
| | |
-| ----------- | ----------------------------------------- |
+|-------------|-------------------------------------------|
| Type | bool |
| Environment | $CODER_TRACE_ENABLE |
| YAML | introspection.tracing.enable |
@@ -708,7 +707,7 @@ Whether application tracing data is collected. It exports to a backend configure
### --trace-honeycomb-api-key
| | |
-| ----------- | ------------------------------------------- |
+|-------------|---------------------------------------------|
| Type | string |
| Environment | $CODER_TRACE_HONEYCOMB_API_KEY |
@@ -717,7 +716,7 @@ Enables trace exporting to Honeycomb.io using the provided API Key.
### --trace-logs
| | |
-| ----------- | ---------------------------------------------- |
+|-------------|------------------------------------------------|
| Type | bool |
| Environment | $CODER_TRACE_LOGS |
| YAML | introspection.tracing.captureLogs |
@@ -727,7 +726,7 @@ Enables capturing of logs as events in traces. This is useful for debugging, but
### --provisioner-daemons
| | |
-| ----------- | --------------------------------------- |
+|-------------|-----------------------------------------|
| Type | int |
| Environment | $CODER_PROVISIONER_DAEMONS |
| YAML | provisioning.daemons |
@@ -738,7 +737,7 @@ Number of provisioner daemons to create on start. If builds are stuck in queued
### --provisioner-daemon-poll-interval
| | |
-| ----------- | ---------------------------------------------------- |
+|-------------|------------------------------------------------------|
| Type | duration |
| Environment | $CODER_PROVISIONER_DAEMON_POLL_INTERVAL |
| YAML | provisioning.daemonPollInterval |
@@ -749,7 +748,7 @@ Deprecated and ignored.
### --provisioner-daemon-poll-jitter
| | |
-| ----------- | -------------------------------------------------- |
+|-------------|----------------------------------------------------|
| Type | duration |
| Environment | $CODER_PROVISIONER_DAEMON_POLL_JITTER |
| YAML | provisioning.daemonPollJitter |
@@ -760,7 +759,7 @@ Deprecated and ignored.
### --provisioner-force-cancel-interval
| | |
-| ----------- | ----------------------------------------------------- |
+|-------------|-------------------------------------------------------|
| Type | duration |
| Environment | $CODER_PROVISIONER_FORCE_CANCEL_INTERVAL |
| YAML | provisioning.forceCancelInterval |
@@ -771,7 +770,7 @@ Time to force cancel provisioning tasks that are stuck.
### --provisioner-daemon-psk
| | |
-| ----------- | ------------------------------------------ |
+|-------------|--------------------------------------------|
| Type | string |
| Environment | $CODER_PROVISIONER_DAEMON_PSK |
@@ -780,17 +779,17 @@ Pre-shared key to authenticate external provisioner daemons to Coder server.
### -l, --log-filter
| | |
-| ----------- | ----------------------------------------- |
+|-------------|-------------------------------------------|
| Type | string-array |
| Environment | $CODER_LOG_FILTER |
| YAML | introspection.logging.filter |
-Filter debug logs by matching against a given regex. Use .\* to match all debug logs.
+Filter debug logs by matching against a given regex. Use .* to match all debug logs.
### --log-human
| | |
-| ----------- | -------------------------------------------- |
+|-------------|----------------------------------------------|
| Type | string |
| Environment | $CODER_LOGGING_HUMAN |
| YAML | introspection.logging.humanPath |
@@ -801,7 +800,7 @@ Output human-readable logs to a given file.
### --log-json
| | |
-| ----------- | ------------------------------------------- |
+|-------------|---------------------------------------------|
| Type | string |
| Environment | $CODER_LOGGING_JSON |
| YAML | introspection.logging.jsonPath |
@@ -811,7 +810,7 @@ Output JSON logs to a given file.
### --log-stackdriver
| | |
-| ----------- | -------------------------------------------------- |
+|-------------|----------------------------------------------------|
| Type | string |
| Environment | $CODER_LOGGING_STACKDRIVER |
| YAML | introspection.logging.stackdriverPath |
@@ -821,7 +820,7 @@ Output Stackdriver compatible logs to a given file.
### --enable-terraform-debug-mode
| | |
-| ----------- | ----------------------------------------------------------- |
+|-------------|-------------------------------------------------------------|
| Type | bool |
| Environment | $CODER_ENABLE_TERRAFORM_DEBUG_MODE |
| YAML | introspection.logging.enableTerraformDebugMode |
@@ -832,7 +831,7 @@ Allow administrators to enable Terraform debug output.
### --additional-csp-policy
| | |
-| ----------- | ------------------------------------------------ |
+|-------------|--------------------------------------------------|
| Type | string-array |
| Environment | $CODER_ADDITIONAL_CSP_POLICY |
| YAML | networking.http.additionalCSPPolicy |
@@ -842,7 +841,7 @@ Coder configures a Content Security Policy (CSP) to protect against XSS attacks.
### --dangerous-allow-path-app-sharing
| | |
-| ----------- | ---------------------------------------------------- |
+|-------------|------------------------------------------------------|
| Type | bool |
| Environment | $CODER_DANGEROUS_ALLOW_PATH_APP_SHARING |
@@ -851,7 +850,7 @@ Allow workspace apps that are not served from subdomains to be shared. Path-base
### --dangerous-allow-path-app-site-owner-access
| | |
-| ----------- | -------------------------------------------------------------- |
+|-------------|----------------------------------------------------------------|
| Type | bool |
| Environment | $CODER_DANGEROUS_ALLOW_PATH_APP_SITE_OWNER_ACCESS |
@@ -860,17 +859,17 @@ Allow site-owners to access workspace apps from workspaces they do not own. Owne
### --experiments
| | |
-| ----------- | ------------------------------- |
+|-------------|---------------------------------|
| Type | string-array |
| Environment | $CODER_EXPERIMENTS |
| YAML | experiments |
-Enable one or more experiments. These are not ready for production. Separate multiple experiments with commas, or enter '\*' to opt-in to all available experiments.
+Enable one or more experiments. These are not ready for production. Separate multiple experiments with commas, or enter '*' to opt-in to all available experiments.
### --update-check
| | |
-| ----------- | -------------------------------- |
+|-------------|----------------------------------|
| Type | bool |
| Environment | $CODER_UPDATE_CHECK |
| YAML | updateCheck |
@@ -881,7 +880,7 @@ Periodically check for new releases of Coder and inform the owner. The check is
### --max-token-lifetime
| | |
-| ----------- | --------------------------------------------- |
+|-------------|-----------------------------------------------|
| Type | duration |
| Environment | $CODER_MAX_TOKEN_LIFETIME |
| YAML | networking.http.maxTokenLifetime |
@@ -892,7 +891,7 @@ The maximum lifetime duration users can specify when creating an API token.
### --default-token-lifetime
| | |
-| ----------- | ------------------------------------------ |
+|-------------|--------------------------------------------|
| Type | duration |
| Environment | $CODER_DEFAULT_TOKEN_LIFETIME |
| YAML | defaultTokenLifetime |
@@ -903,7 +902,7 @@ The default lifetime duration for API tokens. This value is used when creating a
### --swagger-enable
| | |
-| ----------- | ---------------------------------- |
+|-------------|------------------------------------|
| Type | bool |
| Environment | $CODER_SWAGGER_ENABLE |
| YAML | enableSwagger |
@@ -913,7 +912,7 @@ Expose the swagger endpoint via /swagger.
### --proxy-trusted-headers
| | |
-| ----------- | ------------------------------------------- |
+|-------------|---------------------------------------------|
| Type | string-array |
| Environment | $CODER_PROXY_TRUSTED_HEADERS |
| YAML | networking.proxyTrustedHeaders |
@@ -923,7 +922,7 @@ Headers to trust for forwarding IP addresses. e.g. Cf-Connecting-Ip, True-Client
### --proxy-trusted-origins
| | |
-| ----------- | ------------------------------------------- |
+|-------------|---------------------------------------------|
| Type | string-array |
| Environment | $CODER_PROXY_TRUSTED_ORIGINS |
| YAML | networking.proxyTrustedOrigins |
@@ -933,7 +932,7 @@ Origin addresses to respect "proxy-trusted-headers". e.g. 192.168.1.0/24.
### --cache-dir
| | |
-| ----------- | ----------------------------------- |
+|-------------|-------------------------------------|
| Type | string |
| Environment | $CODER_CACHE_DIRECTORY |
| YAML | cacheDir |
@@ -944,7 +943,7 @@ The directory to cache temporary files. If unspecified and $CACHE_DIRECTORY is s
### --postgres-url
| | |
-| ----------- | ------------------------------------- |
+|-------------|---------------------------------------|
| Type | string |
| Environment | $CODER_PG_CONNECTION_URL |
@@ -953,7 +952,7 @@ URL of a PostgreSQL database. If empty, PostgreSQL binaries will be downloaded f
### --postgres-auth
| | |
-| ----------- | -------------------------------- |
+|-------------|----------------------------------|
| Type | password\|awsiamrds |
| Environment | $CODER_PG_AUTH |
| YAML | pgAuth |
@@ -964,7 +963,7 @@ Type of auth to use when connecting to postgres. For AWS RDS, using IAM authenti
### --secure-auth-cookie
| | |
-| ----------- | ---------------------------------------- |
+|-------------|------------------------------------------|
| Type | bool |
| Environment | $CODER_SECURE_AUTH_COOKIE |
| YAML | networking.secureAuthCookie |
@@ -974,7 +973,7 @@ Controls if the 'Secure' property is set on browser session cookies.
### --terms-of-service-url
| | |
-| ----------- | ---------------------------------------- |
+|-------------|------------------------------------------|
| Type | string |
| Environment | $CODER_TERMS_OF_SERVICE_URL |
| YAML | termsOfServiceURL |
@@ -984,7 +983,7 @@ A URL to an external Terms of Service that must be accepted by users when loggin
### --strict-transport-security
| | |
-| ----------- | --------------------------------------------------- |
+|-------------|-----------------------------------------------------|
| Type | int |
| Environment | $CODER_STRICT_TRANSPORT_SECURITY |
| YAML | networking.tls.strictTransportSecurity |
@@ -995,7 +994,7 @@ Controls if the 'Strict-Transport-Security' header is set on all static file res
### --strict-transport-security-options
| | |
-| ----------- | ---------------------------------------------------------- |
+|-------------|------------------------------------------------------------|
| Type | string-array |
| Environment | $CODER_STRICT_TRANSPORT_SECURITY_OPTIONS |
| YAML | networking.tls.strictTransportSecurityOptions |
@@ -1005,7 +1004,7 @@ Two optional fields can be set in the Strict-Transport-Security header; 'include
### --ssh-keygen-algorithm
| | |
-| ----------- | ---------------------------------------- |
+|-------------|------------------------------------------|
| Type | string |
| Environment | $CODER_SSH_KEYGEN_ALGORITHM |
| YAML | sshKeygenAlgorithm |
@@ -1016,7 +1015,7 @@ The algorithm to use for generating ssh keys. Accepted values are "ed25519", "ec
### --browser-only
| | |
-| ----------- | ----------------------------------- |
+|-------------|-------------------------------------|
| Type | bool |
| Environment | $CODER_BROWSER_ONLY |
| YAML | networking.browserOnly |
@@ -1026,7 +1025,7 @@ Whether Coder only allows connections to workspaces via the browser.
### --scim-auth-header
| | |
-| ----------- | ------------------------------------ |
+|-------------|--------------------------------------|
| Type | string |
| Environment | $CODER_SCIM_AUTH_HEADER |
@@ -1035,7 +1034,7 @@ Enables SCIM and sets the authentication header for the built-in SCIM server. Ne
### --external-token-encryption-keys
| | |
-| ----------- | -------------------------------------------------- |
+|-------------|----------------------------------------------------|
| Type | string-array |
| Environment | $CODER_EXTERNAL_TOKEN_ENCRYPTION_KEYS |
@@ -1044,7 +1043,7 @@ Encrypt OIDC and Git authentication tokens with AES-256-GCM in the database. The
### --disable-path-apps
| | |
-| ----------- | ------------------------------------- |
+|-------------|---------------------------------------|
| Type | bool |
| Environment | $CODER_DISABLE_PATH_APPS |
| YAML | disablePathApps |
@@ -1054,7 +1053,7 @@ Disable workspace apps that are not served from subdomains. Path-based apps can
### --disable-owner-workspace-access
| | |
-| ----------- | -------------------------------------------------- |
+|-------------|----------------------------------------------------|
| Type | bool |
| Environment | $CODER_DISABLE_OWNER_WORKSPACE_ACCESS |
| YAML | disableOwnerWorkspaceAccess |
@@ -1064,7 +1063,7 @@ Remove the permission for the 'owner' role to have workspace execution on all wo
### --session-duration
| | |
-| ----------- | -------------------------------------------- |
+|-------------|----------------------------------------------|
| Type | duration |
| Environment | $CODER_SESSION_DURATION |
| YAML | networking.http.sessionDuration |
@@ -1075,7 +1074,7 @@ The token expiry duration for browser sessions. Sessions may last longer if they
### --disable-session-expiry-refresh
| | |
-| ----------- | -------------------------------------------------------- |
+|-------------|----------------------------------------------------------|
| Type | bool |
| Environment | $CODER_DISABLE_SESSION_EXPIRY_REFRESH |
| YAML | networking.http.disableSessionExpiryRefresh |
@@ -1085,7 +1084,7 @@ Disable automatic session expiry bumping due to activity. This forces all sessio
### --disable-password-auth
| | |
-| ----------- | ------------------------------------------------ |
+|-------------|--------------------------------------------------|
| Type | bool |
| Environment | $CODER_DISABLE_PASSWORD_AUTH |
| YAML | networking.http.disablePasswordAuth |
@@ -1095,7 +1094,7 @@ Disable password authentication. This is recommended for security purposes in pr
### -c, --config
| | |
-| ----------- | ------------------------------- |
+|-------------|---------------------------------|
| Type | yaml-config-path |
| Environment | $CODER_CONFIG_PATH |
@@ -1104,7 +1103,7 @@ Specify a YAML file to load configuration from.
### --ssh-hostname-prefix
| | |
-| ----------- | --------------------------------------- |
+|-------------|-----------------------------------------|
| Type | string |
| Environment | $CODER_SSH_HOSTNAME_PREFIX |
| YAML | client.sshHostnamePrefix |
@@ -1115,7 +1114,7 @@ The SSH deployment prefix is used in the Host of the ssh config.
### --ssh-config-options
| | |
-| ----------- | -------------------------------------- |
+|-------------|----------------------------------------|
| Type | string-array |
| Environment | $CODER_SSH_CONFIG_OPTIONS |
| YAML | client.sshConfigOptions |
@@ -1125,7 +1124,7 @@ These SSH config options will override the default SSH config options. Provide o
### --cli-upgrade-message
| | |
-| ----------- | --------------------------------------- |
+|-------------|-----------------------------------------|
| Type | string |
| Environment | $CODER_CLI_UPGRADE_MESSAGE |
| YAML | client.cliUpgradeMessage |
@@ -1135,7 +1134,7 @@ The upgrade message to display to users when a client/server mismatch is detecte
### --write-config
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Write out the current server config as YAML to stdout.
@@ -1143,7 +1142,7 @@ The upgrade message to display to users when a client/server mismatch is detecte
### --support-links
| | |
-| ----------- | ------------------------------------------ |
+|-------------|--------------------------------------------|
| Type | struct[[]codersdk.LinkConfig] |
| Environment | $CODER_SUPPORT_LINKS |
| YAML | supportLinks |
@@ -1153,7 +1152,7 @@ Support links to display in the top right drop down menu.
### --proxy-health-interval
| | |
-| ----------- | ------------------------------------------------ |
+|-------------|--------------------------------------------------|
| Type | duration |
| Environment | $CODER_PROXY_HEALTH_INTERVAL |
| YAML | networking.http.proxyHealthInterval |
@@ -1164,18 +1163,18 @@ The interval in which coderd should be checking the status of workspace proxies.
### --default-quiet-hours-schedule
| | |
-| ----------- | ------------------------------------------------------------- |
+|-------------|---------------------------------------------------------------|
| Type | string |
| Environment | $CODER_QUIET_HOURS_DEFAULT_SCHEDULE |
| YAML | userQuietHoursSchedule.defaultQuietHoursSchedule |
-| Default | CRON_TZ=UTC 0 0 \* \* \* |
+| Default | CRON_TZ=UTC 0 0 ** * |
-The default daily cron schedule applied to users that haven't set a custom quiet hours schedule themselves. The quiet hours schedule determines when workspaces will be force stopped due to the template's autostop requirement, and will round the max deadline up to be within the user's quiet hours window (or default). The format is the same as the standard cron format, but the day-of-month, month and day-of-week must be \*. Only one hour and minute can be specified (ranges or comma separated values are not supported).
+The default daily cron schedule applied to users that haven't set a custom quiet hours schedule themselves. The quiet hours schedule determines when workspaces will be force stopped due to the template's autostop requirement, and will round the max deadline up to be within the user's quiet hours window (or default). The format is the same as the standard cron format, but the day-of-month, month and day-of-week must be *. Only one hour and minute can be specified (ranges or comma separated values are not supported).
### --allow-custom-quiet-hours
| | |
-| ----------- | --------------------------------------------------------- |
+|-------------|-----------------------------------------------------------|
| Type | bool |
| Environment | $CODER_ALLOW_CUSTOM_QUIET_HOURS |
| YAML | userQuietHoursSchedule.allowCustomQuietHours |
@@ -1186,7 +1185,7 @@ Allow users to set their own quiet hours schedule for workspaces to stop in (dep
### --web-terminal-renderer
| | |
-| ----------- | ----------------------------------------- |
+|-------------|-------------------------------------------|
| Type | string |
| Environment | $CODER_WEB_TERMINAL_RENDERER |
| YAML | client.webTerminalRenderer |
@@ -1197,7 +1196,7 @@ The renderer to use when opening a web terminal. Valid values are 'canvas', 'web
### --allow-workspace-renames
| | |
-| ----------- | ------------------------------------------- |
+|-------------|---------------------------------------------|
| Type | bool |
| Environment | $CODER_ALLOW_WORKSPACE_RENAMES |
| YAML | allowWorkspaceRenames |
@@ -1208,7 +1207,7 @@ DEPRECATED: Allow users to rename their workspaces. Use only for temporary compa
### --health-check-refresh
| | |
-| ----------- | ---------------------------------------------- |
+|-------------|------------------------------------------------|
| Type | duration |
| Environment | $CODER_HEALTH_CHECK_REFRESH |
| YAML | introspection.healthcheck.refresh |
@@ -1219,7 +1218,7 @@ Refresh interval for healthchecks.
### --health-check-threshold-database
| | |
-| ----------- | -------------------------------------------------------- |
+|-------------|----------------------------------------------------------|
| Type | duration |
| Environment | $CODER_HEALTH_CHECK_THRESHOLD_DATABASE |
| YAML | introspection.healthcheck.thresholdDatabase |
@@ -1230,7 +1229,7 @@ The threshold for the database health check. If the median latency of the databa
### --email-from
| | |
-| ----------- | ------------------------------ |
+|-------------|--------------------------------|
| Type | string |
| Environment | $CODER_EMAIL_FROM |
| YAML | email.from |
@@ -1240,7 +1239,7 @@ The sender's address to use.
### --email-smarthost
| | |
-| ----------- | ----------------------------------- |
+|-------------|-------------------------------------|
| Type | string |
| Environment | $CODER_EMAIL_SMARTHOST |
| YAML | email.smarthost |
@@ -1250,7 +1249,7 @@ The intermediary SMTP host through which emails are sent.
### --email-hello
| | |
-| ----------- | ------------------------------- |
+|-------------|---------------------------------|
| Type | string |
| Environment | $CODER_EMAIL_HELLO |
| YAML | email.hello |
@@ -1261,7 +1260,7 @@ The hostname identifying the SMTP server.
### --email-force-tls
| | |
-| ----------- | ----------------------------------- |
+|-------------|-------------------------------------|
| Type | bool |
| Environment | $CODER_EMAIL_FORCE_TLS |
| YAML | email.forceTLS |
@@ -1272,7 +1271,7 @@ Force a TLS connection to the configured SMTP smarthost.
### --email-auth-identity
| | |
-| ----------- | --------------------------------------- |
+|-------------|-----------------------------------------|
| Type | string |
| Environment | $CODER_EMAIL_AUTH_IDENTITY |
| YAML | email.emailAuth.identity |
@@ -1282,7 +1281,7 @@ Identity to use with PLAIN authentication.
### --email-auth-username
| | |
-| ----------- | --------------------------------------- |
+|-------------|-----------------------------------------|
| Type | string |
| Environment | $CODER_EMAIL_AUTH_USERNAME |
| YAML | email.emailAuth.username |
@@ -1292,7 +1291,7 @@ Username to use with PLAIN/LOGIN authentication.
### --email-auth-password
| | |
-| ----------- | --------------------------------------- |
+|-------------|-----------------------------------------|
| Type | string |
| Environment | $CODER_EMAIL_AUTH_PASSWORD |
@@ -1301,7 +1300,7 @@ Password to use with PLAIN/LOGIN authentication.
### --email-auth-password-file
| | |
-| ----------- | -------------------------------------------- |
+|-------------|----------------------------------------------|
| Type | string |
| Environment | $CODER_EMAIL_AUTH_PASSWORD_FILE |
| YAML | email.emailAuth.passwordFile |
@@ -1311,7 +1310,7 @@ File from which to load password for use with PLAIN/LOGIN authentication.
### --email-tls-starttls
| | |
-| ----------- | -------------------------------------- |
+|-------------|----------------------------------------|
| Type | bool |
| Environment | $CODER_EMAIL_TLS_STARTTLS |
| YAML | email.emailTLS.startTLS |
@@ -1321,7 +1320,7 @@ Enable STARTTLS to upgrade insecure SMTP connections using TLS.
### --email-tls-server-name
| | |
-| ----------- | ---------------------------------------- |
+|-------------|------------------------------------------|
| Type | string |
| Environment | $CODER_EMAIL_TLS_SERVERNAME |
| YAML | email.emailTLS.serverName |
@@ -1331,7 +1330,7 @@ Server name to verify against the target certificate.
### --email-tls-skip-verify
| | |
-| ----------- | ---------------------------------------------- |
+|-------------|------------------------------------------------|
| Type | bool |
| Environment | $CODER_EMAIL_TLS_SKIPVERIFY |
| YAML | email.emailTLS.insecureSkipVerify |
@@ -1341,7 +1340,7 @@ Skip verification of the target server's certificate (insecure).
### --email-tls-ca-cert-file
| | |
-| ----------- | ---------------------------------------- |
+|-------------|------------------------------------------|
| Type | string |
| Environment | $CODER_EMAIL_TLS_CACERTFILE |
| YAML | email.emailTLS.caCertFile |
@@ -1351,7 +1350,7 @@ CA certificate file to use.
### --email-tls-cert-file
| | |
-| ----------- | -------------------------------------- |
+|-------------|----------------------------------------|
| Type | string |
| Environment | $CODER_EMAIL_TLS_CERTFILE |
| YAML | email.emailTLS.certFile |
@@ -1361,7 +1360,7 @@ Certificate file to use.
### --email-tls-cert-key-file
| | |
-| ----------- | ----------------------------------------- |
+|-------------|-------------------------------------------|
| Type | string |
| Environment | $CODER_EMAIL_TLS_CERTKEYFILE |
| YAML | email.emailTLS.certKeyFile |
@@ -1371,7 +1370,7 @@ Certificate key file to use.
### --notifications-method
| | |
-| ----------- | ---------------------------------------- |
+|-------------|------------------------------------------|
| Type | string |
| Environment | $CODER_NOTIFICATIONS_METHOD |
| YAML | notifications.method |
@@ -1382,7 +1381,7 @@ Which delivery method to use (available options: 'smtp', 'webhook').
### --notifications-dispatch-timeout
| | |
-| ----------- | -------------------------------------------------- |
+|-------------|----------------------------------------------------|
| Type | duration |
| Environment | $CODER_NOTIFICATIONS_DISPATCH_TIMEOUT |
| YAML | notifications.dispatchTimeout |
@@ -1393,7 +1392,7 @@ How long to wait while a notification is being sent before giving up.
### --notifications-email-from
| | |
-| ----------- | -------------------------------------------- |
+|-------------|----------------------------------------------|
| Type | string |
| Environment | $CODER_NOTIFICATIONS_EMAIL_FROM |
| YAML | notifications.email.from |
@@ -1403,7 +1402,7 @@ The sender's address to use.
### --notifications-email-smarthost
| | |
-| ----------- | ------------------------------------------------- |
+|-------------|---------------------------------------------------|
| Type | string |
| Environment | $CODER_NOTIFICATIONS_EMAIL_SMARTHOST |
| YAML | notifications.email.smarthost |
@@ -1413,7 +1412,7 @@ The intermediary SMTP host through which emails are sent.
### --notifications-email-hello
| | |
-| ----------- | --------------------------------------------- |
+|-------------|-----------------------------------------------|
| Type | string |
| Environment | $CODER_NOTIFICATIONS_EMAIL_HELLO |
| YAML | notifications.email.hello |
@@ -1423,7 +1422,7 @@ The hostname identifying the SMTP server.
### --notifications-email-force-tls
| | |
-| ----------- | ------------------------------------------------- |
+|-------------|---------------------------------------------------|
| Type | bool |
| Environment | $CODER_NOTIFICATIONS_EMAIL_FORCE_TLS |
| YAML | notifications.email.forceTLS |
@@ -1433,7 +1432,7 @@ Force a TLS connection to the configured SMTP smarthost.
### --notifications-email-auth-identity
| | |
-| ----------- | ----------------------------------------------------- |
+|-------------|-------------------------------------------------------|
| Type | string |
| Environment | $CODER_NOTIFICATIONS_EMAIL_AUTH_IDENTITY |
| YAML | notifications.email.emailAuth.identity |
@@ -1443,7 +1442,7 @@ Identity to use with PLAIN authentication.
### --notifications-email-auth-username
| | |
-| ----------- | ----------------------------------------------------- |
+|-------------|-------------------------------------------------------|
| Type | string |
| Environment | $CODER_NOTIFICATIONS_EMAIL_AUTH_USERNAME |
| YAML | notifications.email.emailAuth.username |
@@ -1453,7 +1452,7 @@ Username to use with PLAIN/LOGIN authentication.
### --notifications-email-auth-password
| | |
-| ----------- | ----------------------------------------------------- |
+|-------------|-------------------------------------------------------|
| Type | string |
| Environment | $CODER_NOTIFICATIONS_EMAIL_AUTH_PASSWORD |
@@ -1462,7 +1461,7 @@ Password to use with PLAIN/LOGIN authentication.
### --notifications-email-auth-password-file
| | |
-| ----------- | ---------------------------------------------------------- |
+|-------------|------------------------------------------------------------|
| Type | string |
| Environment | $CODER_NOTIFICATIONS_EMAIL_AUTH_PASSWORD_FILE |
| YAML | notifications.email.emailAuth.passwordFile |
@@ -1472,7 +1471,7 @@ File from which to load password for use with PLAIN/LOGIN authentication.
### --notifications-email-tls-starttls
| | |
-| ----------- | ---------------------------------------------------- |
+|-------------|------------------------------------------------------|
| Type | bool |
| Environment | $CODER_NOTIFICATIONS_EMAIL_TLS_STARTTLS |
| YAML | notifications.email.emailTLS.startTLS |
@@ -1482,7 +1481,7 @@ Enable STARTTLS to upgrade insecure SMTP connections using TLS.
### --notifications-email-tls-server-name
| | |
-| ----------- | ------------------------------------------------------ |
+|-------------|--------------------------------------------------------|
| Type | string |
| Environment | $CODER_NOTIFICATIONS_EMAIL_TLS_SERVERNAME |
| YAML | notifications.email.emailTLS.serverName |
@@ -1492,7 +1491,7 @@ Server name to verify against the target certificate.
### --notifications-email-tls-skip-verify
| | |
-| ----------- | ------------------------------------------------------------ |
+|-------------|--------------------------------------------------------------|
| Type | bool |
| Environment | $CODER_NOTIFICATIONS_EMAIL_TLS_SKIPVERIFY |
| YAML | notifications.email.emailTLS.insecureSkipVerify |
@@ -1502,7 +1501,7 @@ Skip verification of the target server's certificate (insecure).
### --notifications-email-tls-ca-cert-file
| | |
-| ----------- | ------------------------------------------------------ |
+|-------------|--------------------------------------------------------|
| Type | string |
| Environment | $CODER_NOTIFICATIONS_EMAIL_TLS_CACERTFILE |
| YAML | notifications.email.emailTLS.caCertFile |
@@ -1512,7 +1511,7 @@ CA certificate file to use.
### --notifications-email-tls-cert-file
| | |
-| ----------- | ---------------------------------------------------- |
+|-------------|------------------------------------------------------|
| Type | string |
| Environment | $CODER_NOTIFICATIONS_EMAIL_TLS_CERTFILE |
| YAML | notifications.email.emailTLS.certFile |
@@ -1522,7 +1521,7 @@ Certificate file to use.
### --notifications-email-tls-cert-key-file
| | |
-| ----------- | ------------------------------------------------------- |
+|-------------|---------------------------------------------------------|
| Type | string |
| Environment | $CODER_NOTIFICATIONS_EMAIL_TLS_CERTKEYFILE |
| YAML | notifications.email.emailTLS.certKeyFile |
@@ -1532,7 +1531,7 @@ Certificate key file to use.
### --notifications-webhook-endpoint
| | |
-| ----------- | -------------------------------------------------- |
+|-------------|----------------------------------------------------|
| Type | url |
| Environment | $CODER_NOTIFICATIONS_WEBHOOK_ENDPOINT |
| YAML | notifications.webhook.endpoint |
@@ -1542,7 +1541,7 @@ The endpoint to which to send webhooks.
### --notifications-max-send-attempts
| | |
-| ----------- | --------------------------------------------------- |
+|-------------|-----------------------------------------------------|
| Type | int |
| Environment | $CODER_NOTIFICATIONS_MAX_SEND_ATTEMPTS |
| YAML | notifications.maxSendAttempts |
diff --git a/docs/reference/cli/server_create-admin-user.md b/docs/reference/cli/server_create-admin-user.md
index 611d95094c92e..361465c896dac 100644
--- a/docs/reference/cli/server_create-admin-user.md
+++ b/docs/reference/cli/server_create-admin-user.md
@@ -1,5 +1,4 @@
-
# server create-admin-user
Create a new admin user with the given username, email and password and adds it to every organization.
@@ -15,7 +14,7 @@ coder server create-admin-user [flags]
### --postgres-url
| | |
-| ----------- | ------------------------------------- |
+|-------------|---------------------------------------|
| Type | string |
| Environment | $CODER_PG_CONNECTION_URL |
@@ -24,7 +23,7 @@ URL of a PostgreSQL database. If empty, the built-in PostgreSQL deployment will
### --postgres-connection-auth
| | |
-| ----------- | -------------------------------------- |
+|-------------|----------------------------------------|
| Type | password\|awsiamrds |
| Environment | $CODER_PG_CONNECTION_AUTH |
| Default | password |
@@ -34,7 +33,7 @@ Type of auth to use when connecting to postgres.
### --ssh-keygen-algorithm
| | |
-| ----------- | ---------------------------------------- |
+|-------------|------------------------------------------|
| Type | string |
| Environment | $CODER_SSH_KEYGEN_ALGORITHM |
| Default | ed25519 |
@@ -44,7 +43,7 @@ The algorithm to use for generating ssh keys. Accepted values are "ed25519", "ec
### --username
| | |
-| ----------- | ---------------------------- |
+|-------------|------------------------------|
| Type | string |
| Environment | $CODER_USERNAME |
@@ -53,7 +52,7 @@ The username of the new user. If not specified, you will be prompted via stdin.
### --email
| | |
-| ----------- | ------------------------- |
+|-------------|---------------------------|
| Type | string |
| Environment | $CODER_EMAIL |
@@ -62,7 +61,7 @@ The email of the new user. If not specified, you will be prompted via stdin.
### --password
| | |
-| ----------- | ---------------------------- |
+|-------------|------------------------------|
| Type | string |
| Environment | $CODER_PASSWORD |
@@ -71,7 +70,7 @@ The password of the new user. If not specified, you will be prompted via stdin.
### --raw-url
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Output the raw connection URL instead of a psql command.
diff --git a/docs/reference/cli/server_dbcrypt.md b/docs/reference/cli/server_dbcrypt.md
index be06560a275ca..f8d638a05ad53 100644
--- a/docs/reference/cli/server_dbcrypt.md
+++ b/docs/reference/cli/server_dbcrypt.md
@@ -1,5 +1,4 @@
-
# server dbcrypt
Manage database encryption.
@@ -13,7 +12,7 @@ coder server dbcrypt
## Subcommands
| Name | Purpose |
-| --------------------------------------------------- | ----------------------------------------------------------------------------- |
+|-----------------------------------------------------|-------------------------------------------------------------------------------|
| [decrypt](./server_dbcrypt_decrypt.md) | Decrypt a previously encrypted database. |
| [delete](./server_dbcrypt_delete.md) | Delete all encrypted data from the database. THIS IS A DESTRUCTIVE OPERATION. |
| [rotate](./server_dbcrypt_rotate.md) | Rotate database encryption keys. |
diff --git a/docs/reference/cli/server_dbcrypt_decrypt.md b/docs/reference/cli/server_dbcrypt_decrypt.md
index 69780471817b1..5126ef0fccb25 100644
--- a/docs/reference/cli/server_dbcrypt_decrypt.md
+++ b/docs/reference/cli/server_dbcrypt_decrypt.md
@@ -1,5 +1,4 @@
-
# server dbcrypt decrypt
Decrypt a previously encrypted database.
@@ -15,7 +14,7 @@ coder server dbcrypt decrypt [flags]
### --postgres-url
| | |
-| ----------- | ------------------------------------- |
+|-------------|---------------------------------------|
| Type | string |
| Environment | $CODER_PG_CONNECTION_URL |
@@ -24,7 +23,7 @@ The connection URL for the Postgres database.
### --postgres-connection-auth
| | |
-| ----------- | -------------------------------------- |
+|-------------|----------------------------------------|
| Type | password\|awsiamrds |
| Environment | $CODER_PG_CONNECTION_AUTH |
| Default | password |
@@ -34,7 +33,7 @@ Type of auth to use when connecting to postgres.
### --keys
| | |
-| ----------- | ---------------------------------------------------------- |
+|-------------|------------------------------------------------------------|
| Type | string-array |
| Environment | $CODER_EXTERNAL_TOKEN_ENCRYPTION_DECRYPT_KEYS |
@@ -43,7 +42,7 @@ Keys required to decrypt existing data. Must be a comma-separated list of base64
### -y, --yes
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Bypass prompts.
diff --git a/docs/reference/cli/server_dbcrypt_delete.md b/docs/reference/cli/server_dbcrypt_delete.md
index e33560d2ae990..a5e7d16715ecf 100644
--- a/docs/reference/cli/server_dbcrypt_delete.md
+++ b/docs/reference/cli/server_dbcrypt_delete.md
@@ -1,12 +1,11 @@
-
# server dbcrypt delete
Delete all encrypted data from the database. THIS IS A DESTRUCTIVE OPERATION.
Aliases:
-- rm
+* rm
## Usage
@@ -19,7 +18,7 @@ coder server dbcrypt delete [flags]
### --postgres-url
| | |
-| ----------- | ---------------------------------------------------------- |
+|-------------|------------------------------------------------------------|
| Type | string |
| Environment | $CODER_EXTERNAL_TOKEN_ENCRYPTION_POSTGRES_URL |
@@ -28,7 +27,7 @@ The connection URL for the Postgres database.
### --postgres-connection-auth
| | |
-| ----------- | -------------------------------------- |
+|-------------|----------------------------------------|
| Type | password\|awsiamrds |
| Environment | $CODER_PG_CONNECTION_AUTH |
| Default | password |
@@ -38,7 +37,7 @@ Type of auth to use when connecting to postgres.
### -y, --yes
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Bypass prompts.
diff --git a/docs/reference/cli/server_dbcrypt_rotate.md b/docs/reference/cli/server_dbcrypt_rotate.md
index 02aaa1451f004..322a909a087b8 100644
--- a/docs/reference/cli/server_dbcrypt_rotate.md
+++ b/docs/reference/cli/server_dbcrypt_rotate.md
@@ -1,5 +1,4 @@
-
# server dbcrypt rotate
Rotate database encryption keys.
@@ -15,7 +14,7 @@ coder server dbcrypt rotate [flags]
### --postgres-url
| | |
-| ----------- | ------------------------------------- |
+|-------------|---------------------------------------|
| Type | string |
| Environment | $CODER_PG_CONNECTION_URL |
@@ -24,7 +23,7 @@ The connection URL for the Postgres database.
### --postgres-connection-auth
| | |
-| ----------- | -------------------------------------- |
+|-------------|----------------------------------------|
| Type | password\|awsiamrds |
| Environment | $CODER_PG_CONNECTION_AUTH |
| Default | password |
@@ -34,7 +33,7 @@ Type of auth to use when connecting to postgres.
### --new-key
| | |
-| ----------- | ------------------------------------------------------------- |
+|-------------|---------------------------------------------------------------|
| Type | string |
| Environment | $CODER_EXTERNAL_TOKEN_ENCRYPTION_ENCRYPT_NEW_KEY |
@@ -43,7 +42,7 @@ The new external token encryption key. Must be base64-encoded.
### --old-keys
| | |
-| ----------- | -------------------------------------------------------------- |
+|-------------|----------------------------------------------------------------|
| Type | string-array |
| Environment | $CODER_EXTERNAL_TOKEN_ENCRYPTION_ENCRYPT_OLD_KEYS |
@@ -52,7 +51,7 @@ The old external token encryption keys. Must be a comma-separated list of base64
### -y, --yes
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Bypass prompts.
diff --git a/docs/reference/cli/server_postgres-builtin-serve.md b/docs/reference/cli/server_postgres-builtin-serve.md
index dda91692a0f78..55d8ad2a8d269 100644
--- a/docs/reference/cli/server_postgres-builtin-serve.md
+++ b/docs/reference/cli/server_postgres-builtin-serve.md
@@ -1,5 +1,4 @@
-
# server postgres-builtin-serve
Run the built-in PostgreSQL deployment.
@@ -15,7 +14,7 @@ coder server postgres-builtin-serve [flags]
### --raw-url
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Output the raw connection URL instead of a psql command.
diff --git a/docs/reference/cli/server_postgres-builtin-url.md b/docs/reference/cli/server_postgres-builtin-url.md
index 8f3eb73307055..f8fdebb042e4a 100644
--- a/docs/reference/cli/server_postgres-builtin-url.md
+++ b/docs/reference/cli/server_postgres-builtin-url.md
@@ -1,5 +1,4 @@
-
# server postgres-builtin-url
Output the connection URL for the built-in PostgreSQL deployment.
@@ -15,7 +14,7 @@ coder server postgres-builtin-url [flags]
### --raw-url
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Output the raw connection URL instead of a psql command.
diff --git a/docs/reference/cli/show.md b/docs/reference/cli/show.md
index c3a81f9e2c83f..87c527ed939f9 100644
--- a/docs/reference/cli/show.md
+++ b/docs/reference/cli/show.md
@@ -1,5 +1,4 @@
-
# show
Display details of a workspace's resources and agents
diff --git a/docs/reference/cli/speedtest.md b/docs/reference/cli/speedtest.md
index 664ac2d3f383e..d17125ad2abcb 100644
--- a/docs/reference/cli/speedtest.md
+++ b/docs/reference/cli/speedtest.md
@@ -1,5 +1,4 @@
-
# speedtest
Run upload and download tests from your machine to a workspace
@@ -15,7 +14,7 @@ coder speedtest [flags]
### -d, --direct
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Specifies whether to wait for a direct connection before testing speed.
@@ -23,7 +22,7 @@ Specifies whether to wait for a direct connection before testing speed.
### --direction
| | |
-| ------- | --------------------- |
+|---------|-----------------------|
| Type | up\|down |
| Default | down |
@@ -32,7 +31,7 @@ Specifies whether to run in reverse mode where the client receives and the serve
### -t, --time
| | |
-| ------- | --------------------- |
+|---------|-----------------------|
| Type | duration |
| Default | 5s |
@@ -41,7 +40,7 @@ Specifies the duration to monitor traffic.
### --pcap-file
| | |
-| ---- | ------------------- |
+|------|---------------------|
| Type | string |
Specifies a file to write a network capture to.
@@ -49,7 +48,7 @@ Specifies a file to write a network capture to.
### -c, --column
| | |
-| ------- | ----------------------------------- |
+|---------|-------------------------------------|
| Type | [Interval\|Throughput] |
| Default | Interval,Throughput |
@@ -58,7 +57,7 @@ Columns to display in table output.
### -o, --output
| | |
-| ------- | ------------------------ |
+|---------|--------------------------|
| Type | table\|json |
| Default | table |
diff --git a/docs/reference/cli/ssh.md b/docs/reference/cli/ssh.md
index 477c706775e87..72513e0c9ecdc 100644
--- a/docs/reference/cli/ssh.md
+++ b/docs/reference/cli/ssh.md
@@ -1,5 +1,4 @@
-
# ssh
Start a shell into a workspace
@@ -15,7 +14,7 @@ coder ssh [flags]
### --stdio
| | |
-| ----------- | ----------------------------- |
+|-------------|-------------------------------|
| Type | bool |
| Environment | $CODER_SSH_STDIO |
@@ -24,7 +23,7 @@ Specifies whether to emit SSH output over stdin/stdout.
### -A, --forward-agent
| | |
-| ----------- | ------------------------------------- |
+|-------------|---------------------------------------|
| Type | bool |
| Environment | $CODER_SSH_FORWARD_AGENT |
@@ -33,7 +32,7 @@ Specifies whether to forward the SSH agent specified in $SSH_AUTH_SOCK.
### -G, --forward-gpg
| | |
-| ----------- | ----------------------------------- |
+|-------------|-------------------------------------|
| Type | bool |
| Environment | $CODER_SSH_FORWARD_GPG |
@@ -42,7 +41,7 @@ Specifies whether to forward the GPG agent. Unsupported on Windows workspaces, b
### --identity-agent
| | |
-| ----------- | -------------------------------------- |
+|-------------|----------------------------------------|
| Type | string |
| Environment | $CODER_SSH_IDENTITY_AGENT |
@@ -51,7 +50,7 @@ Specifies which identity agent to use (overrides $SSH_AUTH_SOCK), forward agent
### --workspace-poll-interval
| | |
-| ----------- | ------------------------------------------- |
+|-------------|---------------------------------------------|
| Type | duration |
| Environment | $CODER_WORKSPACE_POLL_INTERVAL |
| Default | 1m |
@@ -61,7 +60,7 @@ Specifies how often to poll for workspace automated shutdown.
### --wait
| | |
-| ----------- | ---------------------------- |
+|-------------|------------------------------|
| Type | yes\|no\|auto |
| Environment | $CODER_SSH_WAIT |
| Default | auto |
@@ -71,7 +70,7 @@ Specifies whether or not to wait for the startup script to finish executing. Aut
### --no-wait
| | |
-| ----------- | ------------------------------- |
+|-------------|---------------------------------|
| Type | bool |
| Environment | $CODER_SSH_NO_WAIT |
@@ -80,7 +79,7 @@ Enter workspace immediately after the agent has connected. This is the default i
### -l, --log-dir
| | |
-| ----------- | ------------------------------- |
+|-------------|---------------------------------|
| Type | string |
| Environment | $CODER_SSH_LOG_DIR |
@@ -89,7 +88,7 @@ Specify the directory containing SSH diagnostic log files.
### -R, --remote-forward
| | |
-| ----------- | -------------------------------------- |
+|-------------|----------------------------------------|
| Type | string-array |
| Environment | $CODER_SSH_REMOTE_FORWARD |
@@ -98,7 +97,7 @@ Enable remote port forwarding (remote_port:local_address:local_port).
### -e, --env
| | |
-| ----------- | --------------------------- |
+|-------------|-----------------------------|
| Type | string-array |
| Environment | $CODER_SSH_ENV |
@@ -107,7 +106,7 @@ Set environment variable(s) for session (key1=value1,key2=value2,...).
### --disable-autostart
| | |
-| ----------- | ----------------------------------------- |
+|-------------|-------------------------------------------|
| Type | bool |
| Environment | $CODER_SSH_DISABLE_AUTOSTART |
| Default | false |
diff --git a/docs/reference/cli/start.md b/docs/reference/cli/start.md
index 9be64d5a83d85..1ab6df5a9c891 100644
--- a/docs/reference/cli/start.md
+++ b/docs/reference/cli/start.md
@@ -1,5 +1,4 @@
-
# start
Start a workspace
@@ -15,7 +14,7 @@ coder start [flags]
### -y, --yes
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Bypass prompts.
@@ -23,7 +22,7 @@ Bypass prompts.
### --build-option
| | |
-| ----------- | -------------------------------- |
+|-------------|----------------------------------|
| Type | string-array |
| Environment | $CODER_BUILD_OPTION |
@@ -32,7 +31,7 @@ Build option value in the format "name=value".
### --build-options
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Prompt for one-time build options defined with ephemeral parameters.
@@ -40,7 +39,7 @@ Prompt for one-time build options defined with ephemeral parameters.
### --ephemeral-parameter
| | |
-| ----------- | --------------------------------------- |
+|-------------|-----------------------------------------|
| Type | string-array |
| Environment | $CODER_EPHEMERAL_PARAMETER |
@@ -49,7 +48,7 @@ Set the value of ephemeral parameters defined in the template. The format is "na
### --prompt-ephemeral-parameters
| | |
-| ----------- | ----------------------------------------------- |
+|-------------|-------------------------------------------------|
| Type | bool |
| Environment | $CODER_PROMPT_EPHEMERAL_PARAMETERS |
@@ -58,7 +57,7 @@ Prompt to set values of ephemeral parameters defined in the template. If a value
### --parameter
| | |
-| ----------- | ---------------------------------- |
+|-------------|------------------------------------|
| Type | string-array |
| Environment | $CODER_RICH_PARAMETER |
@@ -67,7 +66,7 @@ Rich parameter value in the format "name=value".
### --rich-parameter-file
| | |
-| ----------- | --------------------------------------- |
+|-------------|-----------------------------------------|
| Type | string |
| Environment | $CODER_RICH_PARAMETER_FILE |
@@ -76,7 +75,7 @@ Specify a file path with values for rich parameters defined in the template. The
### --parameter-default
| | |
-| ----------- | ------------------------------------------ |
+|-------------|--------------------------------------------|
| Type | string-array |
| Environment | $CODER_RICH_PARAMETER_DEFAULT |
@@ -85,7 +84,7 @@ Rich parameter default values in the format "name=value".
### --always-prompt
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Always prompt all parameters. Does not pull parameter values from existing workspace.
diff --git a/docs/reference/cli/stat.md b/docs/reference/cli/stat.md
index 70da8dee47f7a..c84c56ee5afdc 100644
--- a/docs/reference/cli/stat.md
+++ b/docs/reference/cli/stat.md
@@ -1,5 +1,4 @@
-
# stat
Show resource usage for the current workspace.
@@ -13,7 +12,7 @@ coder stat [flags]
## Subcommands
| Name | Purpose |
-| ----------------------------------- | -------------------------------- |
+|-------------------------------------|----------------------------------|
| [cpu](./stat_cpu.md) | Show CPU usage, in cores. |
| [mem](./stat_mem.md) | Show memory usage, in gigabytes. |
| [disk](./stat_disk.md) | Show disk usage, in gigabytes. |
@@ -23,7 +22,7 @@ coder stat [flags]
### -c, --column
| | |
-| ------- | -------------------------------------------------------------------------------- |
+|---------|----------------------------------------------------------------------------------|
| Type | [host cpu\|host memory\|home disk\|container cpu\|container memory] |
| Default | host cpu,host memory,home disk,container cpu,container memory |
@@ -32,7 +31,7 @@ Columns to display in table output.
### -o, --output
| | |
-| ------- | ------------------------ |
+|---------|--------------------------|
| Type | table\|json |
| Default | table |
diff --git a/docs/reference/cli/stat_cpu.md b/docs/reference/cli/stat_cpu.md
index 8e86ef4ddc7f9..c7013e1683ec4 100644
--- a/docs/reference/cli/stat_cpu.md
+++ b/docs/reference/cli/stat_cpu.md
@@ -1,5 +1,4 @@
-
# stat cpu
Show CPU usage, in cores.
@@ -15,7 +14,7 @@ coder stat cpu [flags]
### --host
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Force host CPU measurement.
@@ -23,7 +22,7 @@ Force host CPU measurement.
### -o, --output
| | |
-| ------- | ----------------------- |
+|---------|-------------------------|
| Type | text\|json |
| Default | text |
diff --git a/docs/reference/cli/stat_disk.md b/docs/reference/cli/stat_disk.md
index 6b5ca22ee5750..4cf80f6075e7d 100644
--- a/docs/reference/cli/stat_disk.md
+++ b/docs/reference/cli/stat_disk.md
@@ -1,5 +1,4 @@
-
# stat disk
Show disk usage, in gigabytes.
@@ -15,7 +14,7 @@ coder stat disk [flags]
### --path
| | |
-| ------- | ------------------- |
+|---------|---------------------|
| Type | string |
| Default | / |
@@ -24,7 +23,7 @@ Path for which to check disk usage.
### --prefix
| | |
-| ------- | --------------------------- |
+|---------|-----------------------------|
| Type | Ki\|Mi\|Gi\|Ti |
| Default | Gi |
@@ -33,7 +32,7 @@ SI Prefix for disk measurement.
### -o, --output
| | |
-| ------- | ----------------------- |
+|---------|-------------------------|
| Type | text\|json |
| Default | text |
diff --git a/docs/reference/cli/stat_mem.md b/docs/reference/cli/stat_mem.md
index 1f8b85d32e5fd..d69ba19ee8d11 100644
--- a/docs/reference/cli/stat_mem.md
+++ b/docs/reference/cli/stat_mem.md
@@ -1,5 +1,4 @@
-
# stat mem
Show memory usage, in gigabytes.
@@ -15,7 +14,7 @@ coder stat mem [flags]
### --host
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Force host memory measurement.
@@ -23,7 +22,7 @@ Force host memory measurement.
### --prefix
| | |
-| ------- | --------------------------- |
+|---------|-----------------------------|
| Type | Ki\|Mi\|Gi\|Ti |
| Default | Gi |
@@ -32,7 +31,7 @@ SI Prefix for memory measurement.
### -o, --output
| | |
-| ------- | ----------------------- |
+|---------|-------------------------|
| Type | text\|json |
| Default | text |
diff --git a/docs/reference/cli/state.md b/docs/reference/cli/state.md
index b0e9ca7433750..ebac28a646895 100644
--- a/docs/reference/cli/state.md
+++ b/docs/reference/cli/state.md
@@ -1,5 +1,4 @@
-
# state
Manually manage Terraform state to fix broken workspaces
@@ -13,6 +12,6 @@ coder state
## Subcommands
| Name | Purpose |
-| ------------------------------------ | --------------------------------------------- |
+|--------------------------------------|-----------------------------------------------|
| [pull](./state_pull.md) | Pull a Terraform state file from a workspace. |
| [push](./state_push.md) | Push a Terraform state file to a workspace. |
diff --git a/docs/reference/cli/state_pull.md b/docs/reference/cli/state_pull.md
index 57009750cf64a..089548ab936b2 100644
--- a/docs/reference/cli/state_pull.md
+++ b/docs/reference/cli/state_pull.md
@@ -1,5 +1,4 @@
-
# state pull
Pull a Terraform state file from a workspace.
@@ -15,7 +14,7 @@ coder state pull [flags] [file]
### -b, --build
| | |
-| ---- | ---------------- |
+|------|------------------|
| Type | int |
Specify a workspace build to target by name. Defaults to latest.
diff --git a/docs/reference/cli/state_push.md b/docs/reference/cli/state_push.md
index c39831acc4992..039b03fc01c2f 100644
--- a/docs/reference/cli/state_push.md
+++ b/docs/reference/cli/state_push.md
@@ -1,5 +1,4 @@
-
# state push
Push a Terraform state file to a workspace.
@@ -15,7 +14,7 @@ coder state push [flags]
### -b, --build
| | |
-| ---- | ---------------- |
+|------|------------------|
| Type | int |
Specify a workspace build to target by name. Defaults to latest.
diff --git a/docs/reference/cli/stop.md b/docs/reference/cli/stop.md
index 65197a2cdbb66..dba81c5cf7e92 100644
--- a/docs/reference/cli/stop.md
+++ b/docs/reference/cli/stop.md
@@ -1,5 +1,4 @@
-
# stop
Stop a workspace
@@ -15,7 +14,7 @@ coder stop [flags]
### -y, --yes
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Bypass prompts.
diff --git a/docs/reference/cli/support.md b/docs/reference/cli/support.md
index 81bb0509d16ab..b530264f36dd0 100644
--- a/docs/reference/cli/support.md
+++ b/docs/reference/cli/support.md
@@ -1,5 +1,4 @@
-
# support
Commands for troubleshooting issues with a Coder deployment.
@@ -13,5 +12,5 @@ coder support
## Subcommands
| Name | Purpose |
-| ------------------------------------------ | --------------------------------------------------------------------------- |
+|--------------------------------------------|-----------------------------------------------------------------------------|
| [bundle](./support_bundle.md) | Generate a support bundle to troubleshoot issues connecting to a workspace. |
diff --git a/docs/reference/cli/support_bundle.md b/docs/reference/cli/support_bundle.md
index 602d11297ea3d..59b1fa4130deb 100644
--- a/docs/reference/cli/support_bundle.md
+++ b/docs/reference/cli/support_bundle.md
@@ -1,5 +1,4 @@
-
# support bundle
Generate a support bundle to troubleshoot issues connecting to a workspace.
@@ -21,7 +20,7 @@ This command generates a file containing detailed troubleshooting information ab
### -y, --yes
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Bypass prompts.
@@ -29,7 +28,7 @@ Bypass prompts.
### -O, --output-file
| | |
-| ----------- | ---------------------------------------------- |
+|-------------|------------------------------------------------|
| Type | string |
| Environment | $CODER_SUPPORT_BUNDLE_OUTPUT_FILE |
@@ -38,7 +37,7 @@ File path for writing the generated support bundle. Defaults to coder-support-$(
### --url-override
| | |
-| ----------- | ----------------------------------------------- |
+|-------------|-------------------------------------------------|
| Type | string |
| Environment | $CODER_SUPPORT_BUNDLE_URL_OVERRIDE |
diff --git a/docs/reference/cli/templates.md b/docs/reference/cli/templates.md
index 9f3936daf787f..99052aa6c3e20 100644
--- a/docs/reference/cli/templates.md
+++ b/docs/reference/cli/templates.md
@@ -1,12 +1,11 @@
-
# templates
Manage templates
Aliases:
-- template
+* template
## Usage
@@ -27,7 +26,7 @@ workspaces:
## Subcommands
| Name | Purpose |
-| ------------------------------------------------ | -------------------------------------------------------------------------------- |
+|--------------------------------------------------|----------------------------------------------------------------------------------|
| [create](./templates_create.md) | DEPRECATED: Create a template from the current directory or as specified by flag |
| [edit](./templates_edit.md) | Edit the metadata of a template by name. |
| [init](./templates_init.md) | Get started with a templated template. |
diff --git a/docs/reference/cli/templates_archive.md b/docs/reference/cli/templates_archive.md
index a229222addf88..ef09707e5f323 100644
--- a/docs/reference/cli/templates_archive.md
+++ b/docs/reference/cli/templates_archive.md
@@ -1,5 +1,4 @@
-
# templates archive
Archive unused or failed template versions from a given template(s)
@@ -7,7 +6,7 @@ Archive unused or failed template versions from a given template(s)
## Usage
```console
-coder templates archive [flags] [template-name...]
+coder templates archive [flags] [template-name...]
```
## Options
@@ -15,7 +14,7 @@ coder templates archive [flags] [template-name...]
### -y, --yes
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Bypass prompts.
@@ -23,7 +22,7 @@ Bypass prompts.
### --all
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Include all unused template versions. By default, only failed template versions are archived.
@@ -31,7 +30,7 @@ Include all unused template versions. By default, only failed template versions
### -O, --org
| | |
-| ----------- | -------------------------------- |
+|-------------|----------------------------------|
| Type | string |
| Environment | $CODER_ORGANIZATION |
diff --git a/docs/reference/cli/templates_create.md b/docs/reference/cli/templates_create.md
index 01b153ff2911d..cd3754e383ad5 100644
--- a/docs/reference/cli/templates_create.md
+++ b/docs/reference/cli/templates_create.md
@@ -1,5 +1,4 @@
-
# templates create
DEPRECATED: Create a template from the current directory or as specified by flag
@@ -15,7 +14,7 @@ coder templates create [flags] [name]
### --private
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Disable the default behavior of granting template access to the 'everyone' group. The template permissions must be updated to allow non-admin users to use this template.
@@ -23,7 +22,7 @@ Disable the default behavior of granting template access to the 'everyone' group
### --variables-file
| | |
-| ---- | ------------------- |
+|------|---------------------|
| Type | string |
Specify a file path with values for Terraform-managed variables.
@@ -31,7 +30,7 @@ Specify a file path with values for Terraform-managed variables.
### --variable
| | |
-| ---- | ------------------------- |
+|------|---------------------------|
| Type | string-array |
Specify a set of values for Terraform-managed variables.
@@ -39,7 +38,7 @@ Specify a set of values for Terraform-managed variables.
### --var
| | |
-| ---- | ------------------------- |
+|------|---------------------------|
| Type | string-array |
Alias of --variable.
@@ -47,7 +46,7 @@ Alias of --variable.
### --provisioner-tag
| | |
-| ---- | ------------------------- |
+|------|---------------------------|
| Type | string-array |
Specify a set of tags to target provisioner daemons.
@@ -55,7 +54,7 @@ Specify a set of tags to target provisioner daemons.
### --default-ttl
| | |
-| ------- | --------------------- |
+|---------|-----------------------|
| Type | duration |
| Default | 24h |
@@ -64,7 +63,7 @@ Specify a default TTL for workspaces created from this template. It is the defau
### --failure-ttl
| | |
-| ------- | --------------------- |
+|---------|-----------------------|
| Type | duration |
| Default | 0h |
@@ -73,7 +72,7 @@ Specify a failure TTL for workspaces created from this template. It is the amoun
### --dormancy-threshold
| | |
-| ------- | --------------------- |
+|---------|-----------------------|
| Type | duration |
| Default | 0h |
@@ -82,7 +81,7 @@ Specify a duration workspaces may be inactive prior to being moved to the dorman
### --dormancy-auto-deletion
| | |
-| ------- | --------------------- |
+|---------|-----------------------|
| Type | duration |
| Default | 0h |
@@ -91,7 +90,7 @@ Specify a duration workspaces may be in the dormant state prior to being deleted
### --require-active-version
| | |
-| ------- | ------------------ |
+|---------|--------------------|
| Type | bool |
| Default | false |
@@ -100,7 +99,7 @@ Requires workspace builds to use the active template version. This setting does
### -y, --yes
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Bypass prompts.
@@ -108,7 +107,7 @@ Bypass prompts.
### -O, --org
| | |
-| ----------- | -------------------------------- |
+|-------------|----------------------------------|
| Type | string |
| Environment | $CODER_ORGANIZATION |
@@ -117,7 +116,7 @@ Select which organization (uuid or name) to use.
### -d, --directory
| | |
-| ------- | ------------------- |
+|---------|---------------------|
| Type | string |
| Default | . |
@@ -126,7 +125,7 @@ Specify the directory to create from, use '-' to read tar from stdin.
### --ignore-lockfile
| | |
-| ------- | ------------------ |
+|---------|--------------------|
| Type | bool |
| Default | false |
@@ -135,7 +134,7 @@ Ignore warnings about not having a .terraform.lock.hcl file present in the templ
### -m, --message
| | |
-| ---- | ------------------- |
+|------|---------------------|
| Type | string |
Specify a message describing the changes in this version of the template. Messages longer than 72 characters will be displayed as truncated.
diff --git a/docs/reference/cli/templates_delete.md b/docs/reference/cli/templates_delete.md
index 55730c7d609d8..9037a39d2b378 100644
--- a/docs/reference/cli/templates_delete.md
+++ b/docs/reference/cli/templates_delete.md
@@ -1,12 +1,11 @@
-
# templates delete
Delete templates
Aliases:
-- rm
+* rm
## Usage
@@ -19,7 +18,7 @@ coder templates delete [flags] [name...]
### -y, --yes
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Bypass prompts.
@@ -27,7 +26,7 @@ Bypass prompts.
### -O, --org
| | |
-| ----------- | -------------------------------- |
+|-------------|----------------------------------|
| Type | string |
| Environment | $CODER_ORGANIZATION |
diff --git a/docs/reference/cli/templates_edit.md b/docs/reference/cli/templates_edit.md
index 81fdc04d1a176..5d9f6f0a55a0d 100644
--- a/docs/reference/cli/templates_edit.md
+++ b/docs/reference/cli/templates_edit.md
@@ -1,5 +1,4 @@
-
# templates edit
Edit the metadata of a template by name.
@@ -15,7 +14,7 @@ coder templates edit [flags]
### --name
| | |
-| ---- | ------------------- |
+|------|---------------------|
| Type | string |
Edit the template name.
@@ -23,7 +22,7 @@ Edit the template name.
### --display-name
| | |
-| ---- | ------------------- |
+|------|---------------------|
| Type | string |
Edit the template display name.
@@ -31,7 +30,7 @@ Edit the template display name.
### --description
| | |
-| ---- | ------------------- |
+|------|---------------------|
| Type | string |
Edit the template description.
@@ -39,7 +38,7 @@ Edit the template description.
### --deprecated
| | |
-| ---- | ------------------- |
+|------|---------------------|
| Type | string |
Sets the template as deprecated. Must be a message explaining why the template is deprecated.
@@ -47,7 +46,7 @@ Sets the template as deprecated. Must be a message explaining why the template i
### --icon
| | |
-| ---- | ------------------- |
+|------|---------------------|
| Type | string |
Edit the template icon path.
@@ -55,7 +54,7 @@ Edit the template icon path.
### --default-ttl
| | |
-| ---- | --------------------- |
+|------|-----------------------|
| Type | duration |
Edit the template default time before shutdown - workspaces created from this template default to this value. Maps to "Default autostop" in the UI.
@@ -63,7 +62,7 @@ Edit the template default time before shutdown - workspaces created from this te
### --activity-bump
| | |
-| ---- | --------------------- |
+|------|-----------------------|
| Type | duration |
Edit the template activity bump - workspaces created from this template will have their shutdown time bumped by this value when activity is detected. Maps to "Activity bump" in the UI.
@@ -71,7 +70,7 @@ Edit the template activity bump - workspaces created from this template will hav
### --autostart-requirement-weekdays
| | |
-| ---- | ---------------------------------------------------------------------------------- |
+|------|------------------------------------------------------------------------------------|
| Type | [monday\|tuesday\|wednesday\|thursday\|friday\|saturday\|sunday\|all] |
Edit the template autostart requirement weekdays - workspaces created from this template can only autostart on the given weekdays. To unset this value for the template (and allow autostart on all days), pass 'all'.
@@ -79,7 +78,7 @@ Edit the template autostart requirement weekdays - workspaces created from this
### --autostop-requirement-weekdays
| | |
-| ---- | ----------------------------------------------------------------------------------- |
+|------|-------------------------------------------------------------------------------------|
| Type | [monday\|tuesday\|wednesday\|thursday\|friday\|saturday\|sunday\|none] |
Edit the template autostop requirement weekdays - workspaces created from this template must be restarted on the given weekdays. To unset this value for the template (and disable the autostop requirement for the template), pass 'none'.
@@ -87,7 +86,7 @@ Edit the template autostop requirement weekdays - workspaces created from this t
### --autostop-requirement-weeks
| | |
-| ---- | ---------------- |
+|------|------------------|
| Type | int |
Edit the template autostop requirement weeks - workspaces created from this template must be restarted on an n-weekly basis.
@@ -95,7 +94,7 @@ Edit the template autostop requirement weeks - workspaces created from this temp
### --failure-ttl
| | |
-| ------- | --------------------- |
+|---------|-----------------------|
| Type | duration |
| Default | 0h |
@@ -104,7 +103,7 @@ Specify a failure TTL for workspaces created from this template. It is the amoun
### --dormancy-threshold
| | |
-| ------- | --------------------- |
+|---------|-----------------------|
| Type | duration |
| Default | 0h |
@@ -113,7 +112,7 @@ Specify a duration workspaces may be inactive prior to being moved to the dorman
### --dormancy-auto-deletion
| | |
-| ------- | --------------------- |
+|---------|-----------------------|
| Type | duration |
| Default | 0h |
@@ -122,7 +121,7 @@ Specify a duration workspaces may be in the dormant state prior to being deleted
### --allow-user-cancel-workspace-jobs
| | |
-| ------- | ----------------- |
+|---------|-------------------|
| Type | bool |
| Default | true |
@@ -131,7 +130,7 @@ Allow users to cancel in-progress workspace jobs.
### --allow-user-autostart
| | |
-| ------- | ----------------- |
+|---------|-------------------|
| Type | bool |
| Default | true |
@@ -140,7 +139,7 @@ Allow users to configure autostart for workspaces on this template. This can onl
### --allow-user-autostop
| | |
-| ------- | ----------------- |
+|---------|-------------------|
| Type | bool |
| Default | true |
@@ -149,7 +148,7 @@ Allow users to customize the autostop TTL for workspaces on this template. This
### --require-active-version
| | |
-| ------- | ------------------ |
+|---------|--------------------|
| Type | bool |
| Default | false |
@@ -158,7 +157,7 @@ Requires workspace builds to use the active template version. This setting does
### --private
| | |
-| ------- | ------------------ |
+|---------|--------------------|
| Type | bool |
| Default | false |
@@ -167,7 +166,7 @@ Disable the default behavior of granting template access to the 'everyone' group
### -y, --yes
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Bypass prompts.
@@ -175,7 +174,7 @@ Bypass prompts.
### -O, --org
| | |
-| ----------- | -------------------------------- |
+|-------------|----------------------------------|
| Type | string |
| Environment | $CODER_ORGANIZATION |
diff --git a/docs/reference/cli/templates_init.md b/docs/reference/cli/templates_init.md
index 1e5da9ffbff9c..30df7bb9c0ad3 100644
--- a/docs/reference/cli/templates_init.md
+++ b/docs/reference/cli/templates_init.md
@@ -1,5 +1,4 @@
-
# templates init
Get started with a templated template.
@@ -15,7 +14,7 @@ coder templates init [flags] [directory]
### --id
| | |
-| ---- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
+|------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| Type | aws-devcontainer\|aws-linux\|aws-windows\|azure-linux\|digitalocean-linux\|docker\|docker-devcontainer\|gcp-devcontainer\|gcp-linux\|gcp-vm-container\|gcp-windows\|kubernetes\|kubernetes-devcontainer\|nomad-docker\|scratch |
Specify a given example template by ID.
diff --git a/docs/reference/cli/templates_list.md b/docs/reference/cli/templates_list.md
index 81747abd50d5a..d5ec9d3cea8e5 100644
--- a/docs/reference/cli/templates_list.md
+++ b/docs/reference/cli/templates_list.md
@@ -1,12 +1,11 @@
-
# templates list
List all the templates available for the organization
Aliases:
-- ls
+* ls
## Usage
@@ -19,7 +18,7 @@ coder templates list [flags]
### -c, --column
| | |
-| ------- | --------------------------------------------------------------------------------------------------------------------------------------- |
+|---------|-----------------------------------------------------------------------------------------------------------------------------------------|
| Type | [name\|created at\|last updated\|organization id\|organization name\|provisioner\|active version id\|used by\|default ttl] |
| Default | name,organization name,last updated,used by |
@@ -28,7 +27,7 @@ Columns to display in table output.
### -o, --output
| | |
-| ------- | ------------------------ |
+|---------|--------------------------|
| Type | table\|json |
| Default | table |
diff --git a/docs/reference/cli/templates_pull.md b/docs/reference/cli/templates_pull.md
index 3678426fd098e..529b110248475 100644
--- a/docs/reference/cli/templates_pull.md
+++ b/docs/reference/cli/templates_pull.md
@@ -1,5 +1,4 @@
-
# templates pull
Download the active, latest, or specified version of a template to a path.
@@ -15,7 +14,7 @@ coder templates pull [flags] [destination]
### --tar
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Output the template as a tar archive to stdout.
@@ -23,7 +22,7 @@ Output the template as a tar archive to stdout.
### --zip
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Output the template as a zip archive to stdout.
@@ -31,7 +30,7 @@ Output the template as a zip archive to stdout.
### --version
| | |
-| ---- | ------------------- |
+|------|---------------------|
| Type | string |
The name of the template version to pull. Use 'active' to pull the active version, 'latest' to pull the latest version, or the name of the template version to pull.
@@ -39,7 +38,7 @@ The name of the template version to pull. Use 'active' to pull the active versio
### -y, --yes
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Bypass prompts.
@@ -47,7 +46,7 @@ Bypass prompts.
### -O, --org
| | |
-| ----------- | -------------------------------- |
+|-------------|----------------------------------|
| Type | string |
| Environment | $CODER_ORGANIZATION |
diff --git a/docs/reference/cli/templates_push.md b/docs/reference/cli/templates_push.md
index e56528841ebda..46687d3fc672e 100644
--- a/docs/reference/cli/templates_push.md
+++ b/docs/reference/cli/templates_push.md
@@ -1,5 +1,4 @@
-
# templates push
Create or update a template from the current directory or as specified by flag
@@ -15,7 +14,7 @@ coder templates push [flags] [template]
### --variables-file
| | |
-| ---- | ------------------- |
+|------|---------------------|
| Type | string |
Specify a file path with values for Terraform-managed variables.
@@ -23,7 +22,7 @@ Specify a file path with values for Terraform-managed variables.
### --variable
| | |
-| ---- | ------------------------- |
+|------|---------------------------|
| Type | string-array |
Specify a set of values for Terraform-managed variables.
@@ -31,7 +30,7 @@ Specify a set of values for Terraform-managed variables.
### --var
| | |
-| ---- | ------------------------- |
+|------|---------------------------|
| Type | string-array |
Alias of --variable.
@@ -39,7 +38,7 @@ Alias of --variable.
### --provisioner-tag
| | |
-| ---- | ------------------------- |
+|------|---------------------------|
| Type | string-array |
Specify a set of tags to target provisioner daemons.
@@ -47,7 +46,7 @@ Specify a set of tags to target provisioner daemons.
### --name
| | |
-| ---- | ------------------- |
+|------|---------------------|
| Type | string |
Specify a name for the new template version. It will be automatically generated if not provided.
@@ -55,7 +54,7 @@ Specify a name for the new template version. It will be automatically generated
### --always-prompt
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Always prompt all parameters. Does not pull parameter values from active template version.
@@ -63,7 +62,7 @@ Always prompt all parameters. Does not pull parameter values from active templat
### --activate
| | |
-| ------- | ----------------- |
+|---------|-------------------|
| Type | bool |
| Default | true |
@@ -72,7 +71,7 @@ Whether the new template will be marked active.
### -y, --yes
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Bypass prompts.
@@ -80,7 +79,7 @@ Bypass prompts.
### -d, --directory
| | |
-| ------- | ------------------- |
+|---------|---------------------|
| Type | string |
| Default | . |
@@ -89,7 +88,7 @@ Specify the directory to create from, use '-' to read tar from stdin.
### --ignore-lockfile
| | |
-| ------- | ------------------ |
+|---------|--------------------|
| Type | bool |
| Default | false |
@@ -98,7 +97,7 @@ Ignore warnings about not having a .terraform.lock.hcl file present in the templ
### -m, --message
| | |
-| ---- | ------------------- |
+|------|---------------------|
| Type | string |
Specify a message describing the changes in this version of the template. Messages longer than 72 characters will be displayed as truncated.
@@ -106,7 +105,7 @@ Specify a message describing the changes in this version of the template. Messag
### -O, --org
| | |
-| ----------- | -------------------------------- |
+|-------------|----------------------------------|
| Type | string |
| Environment | $CODER_ORGANIZATION |
diff --git a/docs/reference/cli/templates_versions.md b/docs/reference/cli/templates_versions.md
index 5b1c3b2c2cfb8..8eb927967d162 100644
--- a/docs/reference/cli/templates_versions.md
+++ b/docs/reference/cli/templates_versions.md
@@ -1,12 +1,11 @@
-
# templates versions
Manage different versions of the specified template
Aliases:
-- version
+* version
## Usage
@@ -25,7 +24,7 @@ coder templates versions
## Subcommands
| Name | Purpose |
-| ----------------------------------------------------------- | ----------------------------------------------- |
+|-------------------------------------------------------------|-------------------------------------------------|
| [list](./templates_versions_list.md) | List all the versions of the specified template |
| [archive](./templates_versions_archive.md) | Archive a template version(s). |
| [unarchive](./templates_versions_unarchive.md) | Unarchive a template version(s). |
diff --git a/docs/reference/cli/templates_versions_archive.md b/docs/reference/cli/templates_versions_archive.md
index d6053db9ca185..1c7f4fd7d82c5 100644
--- a/docs/reference/cli/templates_versions_archive.md
+++ b/docs/reference/cli/templates_versions_archive.md
@@ -1,5 +1,4 @@
-
# templates versions archive
Archive a template version(s).
@@ -7,7 +6,7 @@ Archive a template version(s).
## Usage
```console
-coder templates versions archive [flags] [template-version-names...]
+coder templates versions archive [flags] [template-version-names...]
```
## Options
@@ -15,7 +14,7 @@ coder templates versions archive [flags] [template-version-names
### -y, --yes
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Bypass prompts.
@@ -23,7 +22,7 @@ Bypass prompts.
### -O, --org
| | |
-| ----------- | -------------------------------- |
+|-------------|----------------------------------|
| Type | string |
| Environment | $CODER_ORGANIZATION |
diff --git a/docs/reference/cli/templates_versions_list.md b/docs/reference/cli/templates_versions_list.md
index b19e18abe31e4..0c738f156916f 100644
--- a/docs/reference/cli/templates_versions_list.md
+++ b/docs/reference/cli/templates_versions_list.md
@@ -1,5 +1,4 @@
-
# templates versions list
List all the versions of the specified template
@@ -15,7 +14,7 @@ coder templates versions list [flags]
### --include-archived
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Include archived versions in the result list.
@@ -23,7 +22,7 @@ Include archived versions in the result list.
### -O, --org
| | |
-| ----------- | -------------------------------- |
+|-------------|----------------------------------|
| Type | string |
| Environment | $CODER_ORGANIZATION |
@@ -32,7 +31,7 @@ Select which organization (uuid or name) to use.
### -c, --column
| | |
-| ------- | --------------------------------------------------------------------- |
+|---------|-----------------------------------------------------------------------|
| Type | [name\|created at\|created by\|status\|active\|archived] |
| Default | name,created at,created by,status,active |
@@ -41,7 +40,7 @@ Columns to display in table output.
### -o, --output
| | |
-| ------- | ------------------------ |
+|---------|--------------------------|
| Type | table\|json |
| Default | table |
diff --git a/docs/reference/cli/templates_versions_promote.md b/docs/reference/cli/templates_versions_promote.md
index 30b5f1e8776c6..ecf3ab661cd22 100644
--- a/docs/reference/cli/templates_versions_promote.md
+++ b/docs/reference/cli/templates_versions_promote.md
@@ -1,5 +1,4 @@
-
# templates versions promote
Promote a template version to active.
@@ -21,7 +20,7 @@ Promote an existing template version to be the active version for the specified
### -t, --template
| | |
-| ----------- | --------------------------------- |
+|-------------|-----------------------------------|
| Type | string |
| Environment | $CODER_TEMPLATE_NAME |
@@ -30,7 +29,7 @@ Specify the template name.
### --template-version
| | |
-| ----------- | ----------------------------------------- |
+|-------------|-------------------------------------------|
| Type | string |
| Environment | $CODER_TEMPLATE_VERSION_NAME |
@@ -39,7 +38,7 @@ Specify the template version name to promote.
### -O, --org
| | |
-| ----------- | -------------------------------- |
+|-------------|----------------------------------|
| Type | string |
| Environment | $CODER_ORGANIZATION |
diff --git a/docs/reference/cli/templates_versions_unarchive.md b/docs/reference/cli/templates_versions_unarchive.md
index 7b8d15b4ea21c..c5351939bcf39 100644
--- a/docs/reference/cli/templates_versions_unarchive.md
+++ b/docs/reference/cli/templates_versions_unarchive.md
@@ -1,5 +1,4 @@
-
# templates versions unarchive
Unarchive a template version(s).
@@ -7,7 +6,7 @@ Unarchive a template version(s).
## Usage
```console
-coder templates versions unarchive [flags] [template-version-names...]
+coder templates versions unarchive [flags] [template-version-names...]
```
## Options
@@ -15,7 +14,7 @@ coder templates versions unarchive [flags] [template-version-nam
### -y, --yes
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Bypass prompts.
@@ -23,7 +22,7 @@ Bypass prompts.
### -O, --org
| | |
-| ----------- | -------------------------------- |
+|-------------|----------------------------------|
| Type | string |
| Environment | $CODER_ORGANIZATION |
diff --git a/docs/reference/cli/tokens.md b/docs/reference/cli/tokens.md
index 4e74eb9516057..36b6575ed323f 100644
--- a/docs/reference/cli/tokens.md
+++ b/docs/reference/cli/tokens.md
@@ -1,12 +1,11 @@
-
# tokens
Manage personal access tokens
Aliases:
-- token
+* token
## Usage
@@ -34,7 +33,7 @@ Tokens are used to authenticate automated clients to Coder.
## Subcommands
| Name | Purpose |
-| ----------------------------------------- | -------------- |
+|-------------------------------------------|----------------|
| [create](./tokens_create.md) | Create a token |
| [list](./tokens_list.md) | List tokens |
| [remove](./tokens_remove.md) | Delete a token |
diff --git a/docs/reference/cli/tokens_create.md b/docs/reference/cli/tokens_create.md
index bae168c25e50b..7ad9699c17c35 100644
--- a/docs/reference/cli/tokens_create.md
+++ b/docs/reference/cli/tokens_create.md
@@ -1,5 +1,4 @@
-
# tokens create
Create a token
@@ -15,7 +14,7 @@ coder tokens create [flags]
### --lifetime
| | |
-| ----------- | ---------------------------------- |
+|-------------|------------------------------------|
| Type | string |
| Environment | $CODER_TOKEN_LIFETIME |
@@ -24,7 +23,7 @@ Specify a duration for the lifetime of the token.
### -n, --name
| | |
-| ----------- | ------------------------------ |
+|-------------|--------------------------------|
| Type | string |
| Environment | $CODER_TOKEN_NAME |
@@ -33,7 +32,7 @@ Specify a human-readable name.
### -u, --user
| | |
-| ----------- | ------------------------------ |
+|-------------|--------------------------------|
| Type | string |
| Environment | $CODER_TOKEN_USER |
diff --git a/docs/reference/cli/tokens_list.md b/docs/reference/cli/tokens_list.md
index 5b0cdab5f070f..150b411855174 100644
--- a/docs/reference/cli/tokens_list.md
+++ b/docs/reference/cli/tokens_list.md
@@ -1,12 +1,11 @@
-
# tokens list
List tokens
Aliases:
-- ls
+* ls
## Usage
@@ -19,7 +18,7 @@ coder tokens list [flags]
### -a, --all
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Specifies whether all users' tokens will be listed or not (must have Owner role to see all tokens).
@@ -27,7 +26,7 @@ Specifies whether all users' tokens will be listed or not (must have Owner role
### -c, --column
| | |
-| ------- | ----------------------------------------------------------------- |
+|---------|-------------------------------------------------------------------|
| Type | [id\|name\|last used\|expires at\|created at\|owner] |
| Default | id,name,last used,expires at,created at |
@@ -36,7 +35,7 @@ Columns to display in table output.
### -o, --output
| | |
-| ------- | ------------------------ |
+|---------|--------------------------|
| Type | table\|json |
| Default | table |
diff --git a/docs/reference/cli/tokens_remove.md b/docs/reference/cli/tokens_remove.md
index 408f233494d1b..8825040f5e3a7 100644
--- a/docs/reference/cli/tokens_remove.md
+++ b/docs/reference/cli/tokens_remove.md
@@ -1,13 +1,12 @@
-
# tokens remove
Delete a token
Aliases:
-- delete
-- rm
+* delete
+* rm
## Usage
diff --git a/docs/reference/cli/unfavorite.md b/docs/reference/cli/unfavorite.md
index b1dca7a397bc7..2bf15b437e7b9 100644
--- a/docs/reference/cli/unfavorite.md
+++ b/docs/reference/cli/unfavorite.md
@@ -1,13 +1,12 @@
-
# unfavorite
Remove a workspace from your favorites
Aliases:
-- unfav
-- unfavourite
+* unfav
+* unfavourite
## Usage
diff --git a/docs/reference/cli/update.md b/docs/reference/cli/update.md
index 920fee20058b1..dd2bfa5ff76b5 100644
--- a/docs/reference/cli/update.md
+++ b/docs/reference/cli/update.md
@@ -1,5 +1,4 @@
-
# update
Will update and start a given workspace if it is out of date
@@ -21,7 +20,7 @@ Use --always-prompt to change the parameter values of the workspace.
### --build-option
| | |
-| ----------- | -------------------------------- |
+|-------------|----------------------------------|
| Type | string-array |
| Environment | $CODER_BUILD_OPTION |
@@ -30,7 +29,7 @@ Build option value in the format "name=value".
### --build-options
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Prompt for one-time build options defined with ephemeral parameters.
@@ -38,7 +37,7 @@ Prompt for one-time build options defined with ephemeral parameters.
### --ephemeral-parameter
| | |
-| ----------- | --------------------------------------- |
+|-------------|-----------------------------------------|
| Type | string-array |
| Environment | $CODER_EPHEMERAL_PARAMETER |
@@ -47,7 +46,7 @@ Set the value of ephemeral parameters defined in the template. The format is "na
### --prompt-ephemeral-parameters
| | |
-| ----------- | ----------------------------------------------- |
+|-------------|-------------------------------------------------|
| Type | bool |
| Environment | $CODER_PROMPT_EPHEMERAL_PARAMETERS |
@@ -56,7 +55,7 @@ Prompt to set values of ephemeral parameters defined in the template. If a value
### --parameter
| | |
-| ----------- | ---------------------------------- |
+|-------------|------------------------------------|
| Type | string-array |
| Environment | $CODER_RICH_PARAMETER |
@@ -65,7 +64,7 @@ Rich parameter value in the format "name=value".
### --rich-parameter-file
| | |
-| ----------- | --------------------------------------- |
+|-------------|-----------------------------------------|
| Type | string |
| Environment | $CODER_RICH_PARAMETER_FILE |
@@ -74,7 +73,7 @@ Specify a file path with values for rich parameters defined in the template. The
### --parameter-default
| | |
-| ----------- | ------------------------------------------ |
+|-------------|--------------------------------------------|
| Type | string-array |
| Environment | $CODER_RICH_PARAMETER_DEFAULT |
@@ -83,7 +82,7 @@ Rich parameter default values in the format "name=value".
### --always-prompt
| | |
-| ---- | ----------------- |
+|------|-------------------|
| Type | bool |
Always prompt all parameters. Does not pull parameter values from existing workspace.
diff --git a/docs/reference/cli/users.md b/docs/reference/cli/users.md
index a45f6b374dad2..174e08fe9f3a0 100644
--- a/docs/reference/cli/users.md
+++ b/docs/reference/cli/users.md
@@ -1,12 +1,11 @@
-
# users
Manage users
Aliases:
-- user
+* user
## Usage
@@ -17,7 +16,7 @@ coder users [subcommand]
## Subcommands
| Name | Purpose |
-| -------------------------------------------- | ------------------------------------------------------------------------------------- |
+|----------------------------------------------|---------------------------------------------------------------------------------------|
| [create](./users_create.md) | |
| [list](./users_list.md) | |
| [show](./users_show.md) | Show a single user. Use 'me' to indicate the currently authenticated user. |
diff --git a/docs/reference/cli/users_activate.md b/docs/reference/cli/users_activate.md
index fe8022616a9e8..e82313c0c817d 100644
--- a/docs/reference/cli/users_activate.md
+++ b/docs/reference/cli/users_activate.md
@@ -1,12 +1,11 @@
-
# users activate
Update a user's status to 'active'. Active users can fully interact with the platform
Aliases:
-- active
+* active
## Usage
@@ -17,7 +16,7 @@ coder users activate [flags]
## Description
```console
- $ coder users activate example_user
+ coder users activate example_user
```
## Options
@@ -25,7 +24,7 @@ coder users activate [flags]
### -c, --column
| | |
-| ------- | -------------------------------------------------- |
+|---------|----------------------------------------------------|
| Type | [username\|email\|created at\|status] |
| Default | username,email,created at,status |
diff --git a/docs/reference/cli/users_create.md b/docs/reference/cli/users_create.md
index 368f049e0a91d..61768ebfdbbf8 100644
--- a/docs/reference/cli/users_create.md
+++ b/docs/reference/cli/users_create.md
@@ -1,5 +1,4 @@
-
# users create
## Usage
@@ -13,7 +12,7 @@ coder users create [flags]
### -e, --email
| | |
-| ---- | ------------------- |
+|------|---------------------|
| Type | string |
Specifies an email address for the new user.
@@ -21,7 +20,7 @@ Specifies an email address for the new user.
### -u, --username
| | |
-| ---- | ------------------- |
+|------|---------------------|
| Type | string |
Specifies a username for the new user.
@@ -29,7 +28,7 @@ Specifies a username for the new user.
### -n, --full-name
| | |
-| ---- | ------------------- |
+|------|---------------------|
| Type | string |
Specifies an optional human-readable name for the new user.
@@ -37,7 +36,7 @@ Specifies an optional human-readable name for the new user.
### -p, --password
| | |
-| ---- | ------------------- |
+|------|---------------------|
| Type | string |
Specifies a password for the new user.
@@ -45,7 +44,7 @@ Specifies a password for the new user.
### --login-type
| | |
-| ---- | ------------------- |
+|------|---------------------|
| Type | string |
Optionally specify the login type for the user. Valid values are: password, none, github, oidc. Using 'none' prevents the user from authenticating and requires an API key/token to be generated by an admin.
@@ -53,7 +52,7 @@ Optionally specify the login type for the user. Valid values are: password, none
### -O, --org
| | |
-| ----------- | -------------------------------- |
+|-------------|----------------------------------|
| Type | string |
| Environment | $CODER_ORGANIZATION |
diff --git a/docs/reference/cli/users_delete.md b/docs/reference/cli/users_delete.md
index d4da1c8b5db7a..7bfe7db59c90a 100644
--- a/docs/reference/cli/users_delete.md
+++ b/docs/reference/cli/users_delete.md
@@ -1,12 +1,11 @@
-
# users delete
Delete a user by username or user_id.
Aliases:
-- rm
+* rm
## Usage
diff --git a/docs/reference/cli/users_list.md b/docs/reference/cli/users_list.md
index 4405179b61697..42adf1df8e2c1 100644
--- a/docs/reference/cli/users_list.md
+++ b/docs/reference/cli/users_list.md
@@ -1,10 +1,9 @@
-
# users list
Aliases:
-- ls
+* ls
## Usage
@@ -17,7 +16,7 @@ coder users list [flags]
### -c, --column
| | |
-| ------- | ------------------------------------------------------------------ |
+|---------|--------------------------------------------------------------------|
| Type | [id\|username\|email\|created at\|updated at\|status] |
| Default | username,email,created at,status |
@@ -26,7 +25,7 @@ Columns to display in table output.
### -o, --output
| | |
-| ------- | ------------------------ |
+|---------|--------------------------|
| Type | table\|json |
| Default | table |
diff --git a/docs/reference/cli/users_show.md b/docs/reference/cli/users_show.md
index 44887a7339518..de53d673849bf 100644
--- a/docs/reference/cli/users_show.md
+++ b/docs/reference/cli/users_show.md
@@ -1,5 +1,4 @@
-
# users show
Show a single user. Use 'me' to indicate the currently authenticated user.
@@ -13,7 +12,7 @@ coder users show [flags]
## Description
```console
- $ coder users show me
+ coder users show me
```
## Options
@@ -21,7 +20,7 @@ coder users show [flags]
### -o, --output
| | |
-| ------- | ------------------------ |
+|---------|--------------------------|
| Type | table\|json |
| Default | table |
diff --git a/docs/reference/cli/users_suspend.md b/docs/reference/cli/users_suspend.md
index a4bbc8c7b6f82..286a73cd2432c 100644
--- a/docs/reference/cli/users_suspend.md
+++ b/docs/reference/cli/users_suspend.md
@@ -1,5 +1,4 @@
-
# users suspend
Update a user's status to 'suspended'. A suspended user cannot log into the platform
@@ -13,7 +12,7 @@ coder users suspend [flags]
## Description
```console
- $ coder users suspend example_user
+ coder users suspend example_user
```
## Options
@@ -21,7 +20,7 @@ coder users suspend [flags]
### -c, --column
| | |
-| ------- | -------------------------------------------------- |
+|---------|----------------------------------------------------|
| Type | [username\|email\|created at\|status] |
| Default | username,email,created at,status |
diff --git a/docs/reference/cli/version.md b/docs/reference/cli/version.md
index a88652fed6489..cb0573c597bc9 100644
--- a/docs/reference/cli/version.md
+++ b/docs/reference/cli/version.md
@@ -1,5 +1,4 @@
-
# version
Show coder version
@@ -15,7 +14,7 @@ coder version [flags]
### -o, --output
| | |
-| ------- | ----------------------- |
+|---------|-------------------------|
| Type | text\|json |
| Default | text |
diff --git a/docs/reference/cli/whoami.md b/docs/reference/cli/whoami.md
index 7e2736d454bf4..f3038789f25ac 100644
--- a/docs/reference/cli/whoami.md
+++ b/docs/reference/cli/whoami.md
@@ -1,5 +1,4 @@
-
# whoami
Fetch authenticated user info for Coder deployment
diff --git a/docs/reference/index.md b/docs/reference/index.md
index 4ef592d5e0840..4de97ef4a3099 100644
--- a/docs/reference/index.md
+++ b/docs/reference/index.md
@@ -1,6 +1,6 @@
# Reference
-# Automation
+## Automation
All actions possible through the Coder dashboard can also be automated. There
are several ways to extend/automate Coder:
@@ -74,9 +74,9 @@ activity.
-d "{
\"logs\": [
{
- \"created_at\": \"$(date -u +'%Y-%m-%dT%H:%M:%SZ')\",
- \"level\": \"info\",
- \"output\": \"Restoring workspace from snapshot: 05%...\"
+ \"created_at\": \"$(date -u +'%Y-%m-%dT%H:%M:%SZ')\",
+ \"level\": \"info\",
+ \"output\": \"Restoring workspace from snapshot: 05%...\"
}
]
}"
@@ -97,7 +97,7 @@ activity.
curl -X PUT "https://coder.example.com/api/v2/workspaces/$WORKSPACE_ID/extend" \
-H "Coder-Session-Token: $CODER_AGENT_TOKEN" \
-d '{
- "deadline": "2019-08-24T14:15:22Z"
+ "deadline": "2019-08-24T14:15:22Z"
}'
# Sleep for 30 minutes (1800 seconds) if the job is running
diff --git a/docs/start/local-deploy.md b/docs/start/local-deploy.md
index 5a25a525bcec1..d3944caddf051 100644
--- a/docs/start/local-deploy.md
+++ b/docs/start/local-deploy.md
@@ -1,4 +1,4 @@
-## Setting up a Coder deployment
+# Setting up a Coder deployment
For day-zero Coder users, we recommend following this guide to set up a local
Coder deployment from our
@@ -11,7 +11,7 @@ slim deployment to experiment with [workspaces](../user-guides/index.md) and
Docker is not necessary for every Coder deployment and is only used here for
simplicity.
-### Install Coder daemon
+## Install Coder daemon
First, install [Docker](https://docs.docker.com/engine/install/) locally.
@@ -45,7 +45,7 @@ winget install Coder.Coder
-### Start the server
+## Start the server
To start or restart the Coder deployment, use the following command:
@@ -61,6 +61,6 @@ administrator account.
Once you've signed in, you'll be brought to an empty workspaces page, which
we'll soon populate with your first development environments.
-### Next steps
+## Next steps
TODO: Add link to next page.
diff --git a/docs/start/screenshots.md b/docs/start/screenshots.md
index 608e92e42ee5e..ddf71b823f7fc 100644
--- a/docs/start/screenshots.md
+++ b/docs/start/screenshots.md
@@ -46,7 +46,7 @@ Templates are most popular and how well they perform for developers.
![Coder administrators can control *every* aspect of their Coder deployment.](../images/screenshots/settings.png)
-Coder administrators can control _every_ aspect of their Coder deployment.
+Coder administrators can control *every* aspect of their Coder deployment.
![Coder administrators and auditor roles can review how users are interacting with their Coder Workspaces and Templates.](../images/screenshots/audit.png)
diff --git a/docs/tutorials/azure-federation.md b/docs/tutorials/azure-federation.md
index 325991e9f4359..18726af617bd8 100644
--- a/docs/tutorials/azure-federation.md
+++ b/docs/tutorials/azure-federation.md
@@ -3,7 +3,7 @@
January 26, 2024
diff --git a/docs/tutorials/best-practices/organizations.md b/docs/tutorials/best-practices/organizations.md
index 465a9301ec52f..473bf832e11d8 100644
--- a/docs/tutorials/best-practices/organizations.md
+++ b/docs/tutorials/best-practices/organizations.md
@@ -47,8 +47,8 @@ deployed with Coder and has 1000 users in production. Today, MegaCo has a single
(default) organization and a central platform team but is evaluating whether to
use organizations for several use cases.
-| **Use Case** | **Description** | **Good fit for organizations?** |
-| ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------- |
+| **Use Case** | **Description** | **Good fit for organizations?** |
+|--------------------------------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------|
| Mergers and acquisitions | Raptix, a 180-person startup recently acquired by MegaCo, has an independent cloud account, platform team, and Terraform modules and pipelines for deploying their code. They want to use Coder. | ✅ Organizations |
| Independent cloud-native teams that manage their namespaces, images, and/or clusters | MegaCo has six teams responsible for their own dev, staging, and production Kubernetes clusters and frequently deploy & test their work with `kubectl` and `helm`.They wish to hook up Coder to their cluster so they can write and manage IDE templates for connecting to the cluster with their IDE | ✅ Organizations |
| Java monolith | MegaCo has identified that anyone developing the Java monolith is best served with a VM instead of a container/cloud-native environment.However, the Java team is supported by MegaCo's central platform team. | ❌ Use instead:A separate template and/or groups |
@@ -115,18 +115,18 @@ within an organization. Custom roles can be applied to the default organization.
Some examples of custom roles that can be created:
-**Provisioner Admin**
+### Provisioner Admin
- The user can deploy provisioners but not manage templates. This may be useful
if automation is used to create and update templates in the organization.
-**Template Editor**
+### Template Editor
- Inverse of provisioner admin: User can manage templates but not deploy
provisioners. This may be useful if the provisioner and template are deployed
via automation and users are allowed to edit them.
-**Template Pusher**
+### Template Pusher
- A system account that can push new templates from a git repo but cannot manage
users or delete templates.
diff --git a/docs/tutorials/cloning-git-repositories.md b/docs/tutorials/cloning-git-repositories.md
index 3d3be2d37d659..30d93f4537238 100644
--- a/docs/tutorials/cloning-git-repositories.md
+++ b/docs/tutorials/cloning-git-repositories.md
@@ -4,7 +4,7 @@
Author: Bruno Quaresma
-
+
August 06, 2024
diff --git a/docs/tutorials/configuring-okta.md b/docs/tutorials/configuring-okta.md
index d52c99a5a7974..b5e936e922a39 100644
--- a/docs/tutorials/configuring-okta.md
+++ b/docs/tutorials/configuring-okta.md
@@ -4,7 +4,7 @@
Author: Steven Masley
-
+
December 13, 2023
@@ -22,7 +22,7 @@ roles.
You may use a hybrid of the following approaches.
-# (Easiest) Sync using Okta Groups
+## (Easiest) Sync using Okta Groups
If the Coder roles & Coder groups can be inferred from
[Okta groups](https://help.okta.com/en-us/content/topics/users-groups-profiles/usgp-about-groups.htm),
@@ -67,7 +67,7 @@ CODER_OIDC_USER_ROLE_FIELD=groups
CODER_OIDC_USER_ROLE_MAPPING='{"admin-group":["template-admin","user-admin"]}'
```
-# (Easy) Mapping Okta profile attributes
+## (Easy) Mapping Okta profile attributes
If roles or groups cannot be completely inferred from Okta group memberships,
another option is to source them from a user’s attributes. The user attribute
@@ -105,7 +105,7 @@ CODER_OIDC_USER_ROLE_MAPPING='{"admin-group":["template-admin","user-admin"]}'
# CODER_OIDC_GROUP_FIELD=...
```
-# (Advanced) Custom scopes to retrieve custom claims
+## (Advanced) Custom scopes to retrieve custom claims
Okta does not support setting custom scopes and claims in the default
authorization server used by your application. If you require this
diff --git a/docs/tutorials/example-guide.md b/docs/tutorials/example-guide.md
index f60ce6972710b..f287c265efc2f 100644
--- a/docs/tutorials/example-guide.md
+++ b/docs/tutorials/example-guide.md
@@ -3,7 +3,7 @@
December 13, 2023
diff --git a/docs/tutorials/external-database.md b/docs/tutorials/external-database.md
index a04969525334b..a115192a47d63 100644
--- a/docs/tutorials/external-database.md
+++ b/docs/tutorials/external-database.md
@@ -44,9 +44,9 @@ CREATE SCHEMA myschema;
Once the schema is created, you can list all schemas with `\dn`:
-```
- List of schemas
- Name | Owner
+```text
+List of schemas
+ Name | Owner
-----------+----------
myschema | coder
public | postgres
diff --git a/docs/tutorials/faqs.md b/docs/tutorials/faqs.md
index 8f27b92ebf92c..46f3856ee75ca 100644
--- a/docs/tutorials/faqs.md
+++ b/docs/tutorials/faqs.md
@@ -8,9 +8,9 @@ For other community resources, see our
[GitHub discussions](https://github.com/coder/coder/discussions), or join our
[Discord server](https://discord.gg/coder).
-### How do I add a Premium trial license?
+## How do I add a Premium trial license?
-Visit https://coder.com/trial or contact
+Visit or contact
[sales@coder.com](mailto:sales@coder.com?subject=License) to get a trial key.
@@ -34,7 +34,7 @@ If the license is in a file:
coder licenses add -f
```
-### I'm experiencing networking issues, so want to disable Tailscale, STUN, Direct connections and force use of websocket
+## I'm experiencing networking issues, so want to disable Tailscale, STUN, Direct connections and force use of websocket
The primary developer use case is a local IDE connecting over SSH to a Coder
workspace.
@@ -56,18 +56,18 @@ Setting the following flags as shown disables this logic to simplify
troubleshooting.
| Flag | Value | Meaning |
-| --------------------------------------------------------------------------------------------- | ----------- | ------------------------------------- |
+|-----------------------------------------------------------------------------------------------|-------------|---------------------------------------|
| [`CODER_BLOCK_DIRECT`](../reference/cli/server.md#--block-direct-connections) | `true` | Blocks direct connections |
| [`CODER_DERP_SERVER_STUN_ADDRESSES`](../reference/cli/server.md#--derp-server-stun-addresses) | `"disable"` | Disables STUN |
| [`CODER_DERP_FORCE_WEBSOCKETS`](../reference/cli/server.md#--derp-force-websockets) | `true` | Forces websockets over Tailscale DERP |
-### How do I configure NGINX as the reverse proxy in front of Coder?
+## How do I configure NGINX as the reverse proxy in front of Coder?
[This tutorial](./reverse-proxy-nginx.md) in our docs explains in detail how to
configure NGINX with Coder so that our Tailscale Wireguard networking functions
properly.
-### How do I hide some of the default icons in a workspace like VS Code Desktop, Terminal, SSH, Ports?
+## How do I hide some of the default icons in a workspace like VS Code Desktop, Terminal, SSH, Ports?
The visibility of Coder apps is configurable in the template. To change the
default (shows all), add this block inside the
@@ -88,7 +88,7 @@ This example will hide all built-in
[`coder_app`](https://registry.terraform.io/providers/coder/coder/latest/docs/resources/app)
icons except the web terminal.
-### I want to allow code-server to be accessible by other users in my deployment.
+## I want to allow code-server to be accessible by other users in my deployment
> It is **not** recommended to share a web IDE, but if required, the following
> deployment environment variable settings are required.
@@ -118,7 +118,7 @@ resource "coder_app" "code-server" {
}
```
-### I installed Coder and created a workspace but the icons do not load.
+## I installed Coder and created a workspace but the icons do not load
An important concept to understand is that Coder creates workspaces which have
an agent that must be able to reach the `coder server`.
@@ -139,9 +139,9 @@ coder server --access-url http://localhost:3000 --address 0.0.0.0:3000
```
> Even `coder server` which creates a reverse proxy, will let you use
-> http://localhost to access Coder from a browser.
+> to access Coder from a browser.
-### I updated a template, and an existing workspace based on that template fails to start.
+## I updated a template, and an existing workspace based on that template fails to start
When updating a template, be aware of potential issues with input variables. For
example, if a template prompts users to choose options like a
@@ -161,7 +161,7 @@ workspace from a failed status.
coder update --always-prompt
```
-### I'm running coder on a VM with systemd but latest release installed isn't showing up.
+## I'm running coder on a VM with systemd but latest release installed isn't showing up
Take, for example, a Coder deployment on a VM with a 2 shared vCPU systemd
service. In this scenario, it's necessary to reload the daemon and then restart
@@ -176,7 +176,7 @@ sudo systemctl daemon-reload
sudo systemctl restart coder.service
```
-### I'm using the built-in Postgres database and forgot admin email I set up.
+## I'm using the built-in Postgres database and forgot admin email I set up
1. Run the `coder server` command below to retrieve the `psql` connection URL
which includes the database user and password.
@@ -189,7 +189,7 @@ coder server postgres-builtin-url
psql "postgres://coder@localhost:53737/coder?sslmode=disable&password=I2S...pTk"
```
-### How to find out Coder's latest Terraform provider version?
+## How to find out Coder's latest Terraform provider version?
[Coder is on the HashiCorp's Terraform registry](https://registry.terraform.io/providers/coder/coder/latest).
Check this frequently to make sure you are on the latest version.
@@ -198,7 +198,7 @@ Sometimes, the version may change and `resource` configurations will either
become deprecated or new ones will be added when you get warnings or errors
creating and pushing templates.
-### How can I set up TLS for my deployment and not create a signed certificate?
+## How can I set up TLS for my deployment and not create a signed certificate?
Caddy is an easy-to-configure reverse proxy that also automatically creates
certificates from Let's Encrypt.
@@ -223,7 +223,7 @@ coder.example.com {
}
```
-### I'm using Caddy as my reverse proxy in front of Coder. How do I set up a wildcard domain for port forwarding?
+## I'm using Caddy as my reverse proxy in front of Coder. How do I set up a wildcard domain for port forwarding?
Caddy requires your DNS provider's credentials to create wildcard certificates.
This involves building the Caddy binary
@@ -253,7 +253,7 @@ The updated Caddyfile configuration will look like this:
}
```
-### Can I use local or remote Terraform Modules in Coder templates?
+## Can I use local or remote Terraform Modules in Coder templates?
One way is to reference a Terraform module from a GitHub repo to avoid
duplication and then just extend it or pass template-specific
@@ -296,7 +296,7 @@ References:
- [Public GitHub Issue 5677](https://github.com/coder/coder/issues/5677)
- [Coder docs: Templates/Change Management](../admin/templates/managing-templates/change-management.md)
-### Can I run Coder in an air-gapped or offline mode? (no Internet)?
+## Can I run Coder in an air-gapped or offline mode? (no Internet)?
Yes, Coder can be deployed in
[air-gapped or offline mode](../install/offline.md).
@@ -310,7 +310,7 @@ defaults to Google's STUN servers, so you can either create your STUN server in
your network or disable and force all traffic through the control plane's DERP
proxy.
-### Create a randomized computer_name for an Azure VM
+## Create a randomized computer_name for an Azure VM
Azure VMs have a 15 character limit for the `computer_name` which can lead to
duplicate name errors.
@@ -325,7 +325,7 @@ locals {
}
```
-### Do you have example JetBrains Gateway templates?
+## Do you have example JetBrains Gateway templates?
In August 2023, JetBrains certified the Coder plugin signifying enhanced
stability and reliability.
@@ -347,7 +347,7 @@ open the IDE.
- [IntelliJ IDEA](https://github.com/sharkymark/v2-templates/tree/main/src/pod-idea)
- [IntelliJ IDEA with Icon](https://github.com/sharkymark/v2-templates/tree/main/src/pod-idea-icon)
-### What options do I have for adding VS Code extensions into code-server, VS Code Desktop or Microsoft's Code Server?
+## What options do I have for adding VS Code extensions into code-server, VS Code Desktop or Microsoft's Code Server?
Coder has an open-source project called
[`code-marketplace`](https://github.com/coder/code-marketplace) which is a
@@ -371,7 +371,7 @@ Microsoft's marketplace.
> Note: these are example templates with no SLAs on them and are not guaranteed
> for long-term support.
-### I want to run Docker for my workspaces but not install Docker Desktop.
+## I want to run Docker for my workspaces but not install Docker Desktop
[Colima](https://github.com/abiosoft/colima) is a Docker Desktop alternative.
@@ -406,7 +406,7 @@ Colima will show the path to the docker socket so we have a
[community template](https://github.com/sharkymark/v2-templates/tree/main/src/docker-code-server)
that prompts the Coder admin to enter the docker socket as a Terraform variable.
-### How to make a `coder_app` optional?
+## How to make a `coder_app` optional?
An example use case is the user should decide if they want a browser-based IDE
like code-server when creating the workspace.
@@ -414,57 +414,57 @@ like code-server when creating the workspace.
1. Add a `coder_parameter` with type `bool` to ask the user if they want the
code-server IDE
-```tf
-data "coder_parameter" "code_server" {
- name = "Do you want code-server in your workspace?"
- description = "Use VS Code in a browser."
- type = "bool"
- default = false
- mutable = true
- icon = "/icon/code.svg"
- order = 6
-}
-```
+ ```tf
+ data "coder_parameter" "code_server" {
+ name = "Do you want code-server in your workspace?"
+ description = "Use VS Code in a browser."
+ type = "bool"
+ default = false
+ mutable = true
+ icon = "/icon/code.svg"
+ order = 6
+ }
+ ```
2. Add conditional logic to the `startup_script` to install and start
code-server depending on the value of the added `coder_parameter`
-```sh
-# install and start code-server, VS Code in a browser
+ ```sh
+ # install and start code-server, VS Code in a browser
-if [ ${data.coder_parameter.code_server.value} = true ]; then
- echo "🧑🏼💻 Downloading and installing the latest code-server IDE..."
- curl -fsSL https://code-server.dev/install.sh | sh
- code-server --auth none --port 13337 >/dev/null 2>&1 &
-fi
-```
+ if [ ${data.coder_parameter.code_server.value} = true ]; then
+ echo "🧑🏼💻 Downloading and installing the latest code-server IDE..."
+ curl -fsSL https://code-server.dev/install.sh | sh
+ code-server --auth none --port 13337 >/dev/null 2>&1 &
+ fi
+ ```
3. Add a Terraform meta-argument
[`count`](https://developer.hashicorp.com/terraform/language/meta-arguments/count)
in the `coder_app` resource so it will only create the resource if the
`coder_parameter` is `true`
-```tf
-# code-server
-resource "coder_app" "code-server" {
- count = data.coder_parameter.code_server.value ? 1 : 0
- agent_id = coder_agent.coder.id
- slug = "code-server"
- display_name = "code-server"
- icon = "/icon/code.svg"
- url = "http://localhost:13337?folder=/home/coder"
- subdomain = false
- share = "owner"
-
- healthcheck {
- url = "http://localhost:13337/healthz"
- interval = 3
- threshold = 10
- }
-}
-```
+ ```tf
+ # code-server
+ resource "coder_app" "code-server" {
+ count = data.coder_parameter.code_server.value ? 1 : 0
+ agent_id = coder_agent.coder.id
+ slug = "code-server"
+ display_name = "code-server"
+ icon = "/icon/code.svg"
+ url = "http://localhost:13337?folder=/home/coder"
+ subdomain = false
+ share = "owner"
+
+ healthcheck {
+ url = "http://localhost:13337/healthz"
+ interval = 3
+ threshold = 10
+ }
+ }
+ ```
-### Why am I getting this "remote host doesn't meet VS Code Server's prerequisites" error when opening up VSCode remote in a Linux environment?
+## Why am I getting this "remote host doesn't meet VS Code Server's prerequisites" error when opening up VSCode remote in a Linux environment?
![VS Code Server prerequisite](https://github.com/coder/coder/assets/10648092/150c5996-18b1-4fae-afd0-be2b386a3239)
@@ -473,9 +473,9 @@ or VM/VPS doesn't have the proper C libraries to run the VS Code Server. For
instance, Alpine is not supported at all. If so, you need to find a container
image or supported OS for the VS Code Server. For more information on OS
prerequisites for Linux, please look at the VSCode docs.
-https://code.visualstudio.com/docs/remote/linux#_local-linux-prerequisites
+
-### How can I resolve disconnects when connected to Coder via JetBrains Gateway?
+## How can I resolve disconnects when connected to Coder via JetBrains Gateway?
If your JetBrains IDE is disconnected for a long period of time due to a network
change (for example turning off a VPN), you may find that the IDE will not
@@ -504,7 +504,7 @@ config file will be overwritten by the JetBrains Gateway client when it
re-authenticates to your Coder deployment so you must add the above config as a
separate block and not add it to any existing ones.
-### How can I restrict inbound/outbound file transfers from Coder workspaces?
+## How can I restrict inbound/outbound file transfers from Coder workspaces?
In certain environments, it is essential to keep confidential files within
workspaces and prevent users from uploading or downloading resources using tools
@@ -526,7 +526,7 @@ resource "docker_container" "workspace" {
}
```
-#### Important Notice
+### Important Notice
This control operates at the `ssh-exec` level or during `sftp` sessions. While
it can help prevent automated file transfers using the specified tools, users
diff --git a/docs/tutorials/gcp-to-aws.md b/docs/tutorials/gcp-to-aws.md
index 4c4821fbb2d14..85e8737bedbbc 100644
--- a/docs/tutorials/gcp-to-aws.md
+++ b/docs/tutorials/gcp-to-aws.md
@@ -3,7 +3,7 @@
February 24, 2024
diff --git a/docs/tutorials/reverse-proxy-caddy.md b/docs/tutorials/reverse-proxy-caddy.md
index 7a7583491c5cc..5f14745f4868c 100644
--- a/docs/tutorials/reverse-proxy-caddy.md
+++ b/docs/tutorials/reverse-proxy-caddy.md
@@ -16,82 +16,82 @@ certificates, you'll need a domain name that resolves to your Caddy server.
```yaml
services:
coder:
- image: ghcr.io/coder/coder:${CODER_VERSION:-latest}
- environment:
- CODER_PG_CONNECTION_URL: "postgresql://${POSTGRES_USER:-username}:${POSTGRES_PASSWORD:-password}@database/${POSTGRES_DB:-coder}?sslmode=disable"
- CODER_HTTP_ADDRESS: "0.0.0.0:7080"
- # You'll need to set CODER_ACCESS_URL to an IP or domain
- # that workspaces can reach. This cannot be localhost
- # or 127.0.0.1 for non-Docker templates!
- CODER_ACCESS_URL: "${CODER_ACCESS_URL}"
- # Optional) Enable wildcard apps/dashboard port forwarding
- CODER_WILDCARD_ACCESS_URL: "${CODER_WILDCARD_ACCESS_URL}"
- # If the coder user does not have write permissions on
- # the docker socket, you can uncomment the following
- # lines and set the group ID to one that has write
- # permissions on the docker socket.
- #group_add:
- # - "998" # docker group on host
- volumes:
- - /var/run/docker.sock:/var/run/docker.sock
- depends_on:
- database:
- condition: service_healthy
+ image: ghcr.io/coder/coder:${CODER_VERSION:-latest}
+ environment:
+ CODER_PG_CONNECTION_URL: "postgresql://${POSTGRES_USER:-username}:${POSTGRES_PASSWORD:-password}@database/${POSTGRES_DB:-coder}?sslmode=disable"
+ CODER_HTTP_ADDRESS: "0.0.0.0:7080"
+ # You'll need to set CODER_ACCESS_URL to an IP or domain
+ # that workspaces can reach. This cannot be localhost
+ # or 127.0.0.1 for non-Docker templates!
+ CODER_ACCESS_URL: "${CODER_ACCESS_URL}"
+ # Optional) Enable wildcard apps/dashboard port forwarding
+ CODER_WILDCARD_ACCESS_URL: "${CODER_WILDCARD_ACCESS_URL}"
+ # If the coder user does not have write permissions on
+ # the docker socket, you can uncomment the following
+ # lines and set the group ID to one that has write
+ # permissions on the docker socket.
+ #group_add:
+ # - "998" # docker group on host
+ volumes:
+ - /var/run/docker.sock:/var/run/docker.sock
+ depends_on:
+ database:
+ condition: service_healthy
database:
- image: "postgres:16"
- ports:
- - "5432:5432"
- environment:
- POSTGRES_USER: ${POSTGRES_USER:-username} # The PostgreSQL user (useful to connect to the database)
- POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-password} # The PostgreSQL password (useful to connect to the database)
- POSTGRES_DB: ${POSTGRES_DB:-coder} # The PostgreSQL default database (automatically created at first launch)
- volumes:
- - coder_data:/var/lib/postgresql/data # Use "docker volume rm coder_coder_data" to reset Coder
- healthcheck:
- test:
- [
- "CMD-SHELL",
- "pg_isready -U ${POSTGRES_USER:-username} -d ${POSTGRES_DB:-coder}",
- ]
- interval: 5s
- timeout: 5s
- retries: 5
+ image: "postgres:16"
+ ports:
+ - "5432:5432"
+ environment:
+ POSTGRES_USER: ${POSTGRES_USER:-username} # The PostgreSQL user (useful to connect to the database)
+ POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-password} # The PostgreSQL password (useful to connect to the database)
+ POSTGRES_DB: ${POSTGRES_DB:-coder} # The PostgreSQL default database (automatically created at first launch)
+ volumes:
+ - coder_data:/var/lib/postgresql/data # Use "docker volume rm coder_coder_data" to reset Coder
+ healthcheck:
+ test:
+ [
+ "CMD-SHELL",
+ "pg_isready -U ${POSTGRES_USER:-username} -d ${POSTGRES_DB:-coder}",
+ ]
+ interval: 5s
+ timeout: 5s
+ retries: 5
caddy:
- image: caddy:2.6.2
- ports:
- - "80:80"
- - "443:443"
- - "443:443/udp"
- volumes:
- - $PWD/Caddyfile:/etc/caddy/Caddyfile
- - caddy_data:/data
- - caddy_config:/config
+ image: caddy:2.6.2
+ ports:
+ - "80:80"
+ - "443:443"
+ - "443:443/udp"
+ volumes:
+ - $PWD/Caddyfile:/etc/caddy/Caddyfile
+ - caddy_data:/data
+ - caddy_config:/config
volumes:
- coder_data:
- caddy_data:
- caddy_config:
+ coder_data:
+ caddy_data:
+ caddy_config:
```
3. Create a `Caddyfile` and add the following:
```caddyfile
{
- on_demand_tls {
- ask http://example.com
- }
+ on_demand_tls {
+ ask http://example.com
+ }
}
coder.example.com, *.coder.example.com {
reverse_proxy coder:7080
tls {
- on_demand
+ on_demand
issuer acme {
email email@example.com
}
- }
+ }
}
```
@@ -124,9 +124,9 @@ certificates, you'll need a domain name that resolves to your Caddy server.
```caddyfile
{
- on_demand_tls {
- ask http://example.com
- }
+ on_demand_tls {
+ ask http://example.com
+ }
}
coder.example.com, *.coder.example.com {
diff --git a/docs/tutorials/support-bundle.md b/docs/tutorials/support-bundle.md
index f26c264b087d2..688e87908b338 100644
--- a/docs/tutorials/support-bundle.md
+++ b/docs/tutorials/support-bundle.md
@@ -27,7 +27,7 @@ A brief overview of all files contained in the bundle is provided below:
> out of scope, as support bundles are primarily intended for internal use.
| Filename | Description |
-| --------------------------------- | ---------------------------------------------------------------------------------------------------------- |
+|-----------------------------------|------------------------------------------------------------------------------------------------------------|
| `agent/agent.json` | The agent used to connect to the workspace with environment variables stripped. |
| `agent/agent_magicsock.html` | The contents of the HTTP debug endpoint of the agent's Tailscale Wireguard connection. |
| `agent/client_magicsock.html` | The contents of the HTTP debug endpoint of the client's Tailscale Wireguard connection. |
diff --git a/docs/tutorials/template-from-scratch.md b/docs/tutorials/template-from-scratch.md
index 3198b622724bf..b240f4ae2e292 100644
--- a/docs/tutorials/template-from-scratch.md
+++ b/docs/tutorials/template-from-scratch.md
@@ -46,15 +46,15 @@ and save the following text in `Dockerfile` then exit the editor:
FROM ubuntu
RUN apt-get update \
- && apt-get install -y \
- sudo \
- curl \
- && rm -rf /var/lib/apt/lists/*
+ && apt-get install -y \
+ sudo \
+ curl \
+ && rm -rf /var/lib/apt/lists/*
ARG USER=coder
RUN useradd --groups sudo --no-create-home --shell /bin/bash ${USER} \
- && echo "${USER} ALL=(ALL) NOPASSWD:ALL" >/etc/sudoers.d/${USER} \
- && chmod 0440 /etc/sudoers.d/${USER}
+ && echo "${USER} ALL=(ALL) NOPASSWD:ALL" >/etc/sudoers.d/${USER} \
+ && chmod 0440 /etc/sudoers.d/${USER}
USER ${USER}
WORKDIR /home/${USER}
```
@@ -99,6 +99,9 @@ provider "coder" {
data "coder_workspace" "me" {
}
+
+data "coder_workspace_owner" "me" {
+}
```
Notice that the `provider` blocks for `coder` and `docker` are empty. In a more
@@ -340,7 +343,7 @@ use the Coder CLI.
Attempting to authenticate with config URL: 'https://coder.example.com'
Open the following in your browser:
- https://coder.example.com/cli-auth
+ https://coder.example.com/cli-auth
> Paste your token here:
```
diff --git a/docs/tutorials/testing-templates.md b/docs/tutorials/testing-templates.md
index c98852c7ae1f5..c3572286049e0 100644
--- a/docs/tutorials/testing-templates.md
+++ b/docs/tutorials/testing-templates.md
@@ -3,7 +3,8 @@
+ Please try reloading the page. If reloading does not work, you can
+ ask for help in the{" "}
+
+ Coder Discord community
+ (link opens in a new tab)
+ {" "}
+ or{" "}
+
+ open an issue on GitHub
+ (link opens in a new tab)
+
+ .
+
+ );
+};
+
+type ErrorStackProps = Readonly<{ error: Error | ErrorResponse }>;
+const ErrorStack: FC = ({ error }) => {
+ return (
+
+ );
+};
+
+function serializeDataAsJson(data: unknown): string | null {
+ try {
+ return JSON.stringify(data, null, 2);
+ } catch {
+ return null;
+ }
+}
+
+function publicGithubIssueLink(
+ coderVersion: string | undefined,
+ pathName: string,
+ error: unknown,
+): string {
+ const baseLink = "https://github.com/coder/coder/issues/new";
+
+ // Anytime you see \`\`\`txt, that's wrapping the text in a GitHub codeblock
+ let printableError: string;
+ if (error instanceof Error) {
+ printableError = [
+ `${error.name}: ${error.message}`,
+ error.stack ? `\`\`\`txt\n${error.stack}\n\`\`\`` : "No stack",
+ ].join("\n");
+ } else if (isRouteErrorResponse(error)) {
+ const serialized = serializeDataAsJson(error.data);
+ printableError = [
+ `HTTP ${error.status} - ${error.statusText}`,
+ serialized ? `\`\`\`txt\n${serialized}\n\`\`\`` : "(No data)",
+ ].join("\n");
+ } else {
+ printableError = "No error message available";
+ }
+
+ const messageBody = `\
+**Version**
+${coderVersion ?? "-- Set version --"}
+
+**Path**
+\`${pathName}\`
+
+**Error**
+${printableError}`;
+
+ return `${baseLink}?body=${encodeURIComponent(messageBody)}`;
+}
diff --git a/site/src/components/ErrorBoundary/RuntimeErrorState.stories.tsx b/site/src/components/ErrorBoundary/RuntimeErrorState.stories.tsx
deleted file mode 100644
index dd0480e054ce4..0000000000000
--- a/site/src/components/ErrorBoundary/RuntimeErrorState.stories.tsx
+++ /dev/null
@@ -1,24 +0,0 @@
-import type { Meta, StoryObj } from "@storybook/react";
-import { RuntimeErrorState } from "./RuntimeErrorState";
-
-const error = new Error("An error occurred");
-
-const meta: Meta = {
- title: "components/RuntimeErrorState",
- component: RuntimeErrorState,
- args: {
- error,
- },
- parameters: {
- // The RuntimeErrorState is noisy for chromatic, because it renders an actual error
- // along with the stacktrace - and the stacktrace includes the full URL of
- // scripts in the stack. This is problematic, because every deployment uses
- // a different URL, causing the validation to fail.
- chromatic: { disableSnapshot: true },
- },
-};
-
-export default meta;
-type Story = StoryObj;
-
-export const Errored: Story = {};
diff --git a/site/src/components/ErrorBoundary/RuntimeErrorState.tsx b/site/src/components/ErrorBoundary/RuntimeErrorState.tsx
deleted file mode 100644
index 8e169647cf3cc..0000000000000
--- a/site/src/components/ErrorBoundary/RuntimeErrorState.tsx
+++ /dev/null
@@ -1,198 +0,0 @@
-import { type Interpolation, type Theme, css } from "@emotion/react";
-import RefreshOutlined from "@mui/icons-material/RefreshOutlined";
-import Button from "@mui/material/Button";
-import Link from "@mui/material/Link";
-import type { BuildInfoResponse } from "api/typesGenerated";
-import { CopyButton } from "components/CopyButton/CopyButton";
-import { CoderIcon } from "components/Icons/CoderIcon";
-import { Loader } from "components/Loader/Loader";
-import { Margins } from "components/Margins/Margins";
-import { Stack } from "components/Stack/Stack";
-import { type FC, useEffect, useState } from "react";
-import { Helmet } from "react-helmet-async";
-import { getStaticBuildInfo } from "utils/buildInfo";
-
-const fetchDynamicallyImportedModuleError =
- "Failed to fetch dynamically imported module";
-
-export type RuntimeErrorStateProps = { error: Error };
-
-export const RuntimeErrorState: FC = ({ error }) => {
- const [checkingError, setCheckingError] = useState(true);
- const [staticBuildInfo, setStaticBuildInfo] = useState();
- const coderVersion = staticBuildInfo?.version;
-
- // We use an effect to show a loading state if the page is trying to reload
- useEffect(() => {
- const isImportError = error.message.includes(
- fetchDynamicallyImportedModuleError,
- );
- const isRetried = window.location.search.includes("retries=1");
-
- if (isImportError && !isRetried) {
- const url = new URL(location.href);
- // Add a retry to avoid loops
- url.searchParams.set("retries", "1");
- location.assign(url.search);
- return;
- }
-
- setCheckingError(false);
- }, [error.message]);
-
- useEffect(() => {
- if (!checkingError) {
- setStaticBuildInfo(getStaticBuildInfo());
- }
- }, [checkingError]);
-
- return (
- <>
-
- Something went wrong...
-
- {checkingError ? (
-
- ) : (
-
-
-
-
Something went wrong...
-
- Please try reloading the page, if that doesn‘t work, you can
- ask for help in the{" "}
-
- Coder Discord community
- {" "}
- or{" "}
-
- open an issue
-
- .
-