From ccfffc69111d75d8591ce7f30626a68500008d1c Mon Sep 17 00:00:00 2001 From: Steven Masley Date: Fri, 25 Oct 2024 12:14:15 -0400 Subject: [PATCH 01/42] chore: add tx metrics and logs for serialization errors (#15215) Before db_metrics were all or nothing. Now `InTx` metrics are always recorded, and query metrics are opt in. Adds instrumentation & logging around serialization failures in the database. --- cli/server.go | 4 +- cli/testdata/coder_server_--help.golden | 4 +- cli/testdata/server-config.yaml.golden | 3 +- coderd/autobuild/lifecycle_executor.go | 5 +- coderd/coderdtest/promhelp/doc.go | 3 + coderd/coderdtest/promhelp/metrics.go | 87 + coderd/cryptokeys/rotate.go | 5 +- coderd/database/db.go | 69 +- coderd/database/db_test.go | 2 +- coderd/database/dbauthz/dbauthz.go | 2 +- coderd/database/dbmem/dbmem.go | 5 +- coderd/database/dbmetrics/dbmetrics.go | 2766 +---------------- coderd/database/dbmetrics/dbmetrics_test.go | 109 + coderd/database/dbmetrics/querymetrics.go | 2710 ++++++++++++++++ coderd/database/dbmock/dbmock.go | 3 +- coderd/database/dbpurge/dbpurge.go | 2 +- coderd/database/dbrollup/dbrollup.go | 2 +- coderd/database/dbrollup/dbrollup_test.go | 2 +- coderd/database/tx.go | 2 +- coderd/database/tx_test.go | 10 +- coderd/idpsync/role_test.go | 3 +- coderd/promoauth/oauth2_test.go | 76 +- coderd/templates.go | 2 +- coderd/wsbuilder/wsbuilder_test.go | 6 +- codersdk/deployment.go | 20 +- docs/reference/cli/server.md | 2 +- .../cli/testdata/coder_server_--help.golden | 4 +- enterprise/coderd/workspacequota.go | 5 +- enterprise/dbcrypt/cliutil.go | 4 +- enterprise/dbcrypt/dbcrypt.go | 4 +- enterprise/dbcrypt/dbcrypt_internal_test.go | 2 +- scripts/dbgen/main.go | 2 +- 32 files changed, 3124 insertions(+), 2801 deletions(-) create mode 100644 coderd/coderdtest/promhelp/doc.go create mode 100644 coderd/coderdtest/promhelp/metrics.go create mode 100644 coderd/database/dbmetrics/dbmetrics_test.go create mode 100644 coderd/database/dbmetrics/querymetrics.go diff --git a/cli/server.go b/cli/server.go index 5adb44c3c0a7d..2cf2d95a065e9 100644 --- a/cli/server.go +++ b/cli/server.go @@ -718,7 +718,9 @@ func (r *RootCmd) Server(newAPI func(context.Context, *coderd.Options) (*coderd. } if options.DeploymentValues.Prometheus.Enable && options.DeploymentValues.Prometheus.CollectDBMetrics { - options.Database = dbmetrics.New(options.Database, options.PrometheusRegistry) + options.Database = dbmetrics.NewQueryMetrics(options.Database, options.Logger, options.PrometheusRegistry) + } else { + options.Database = dbmetrics.NewDBMetrics(options.Database, options.Logger, options.PrometheusRegistry) } var deploymentID string diff --git a/cli/testdata/coder_server_--help.golden b/cli/testdata/coder_server_--help.golden index 1387e31710e88..d5c26d98115cb 100644 --- a/cli/testdata/coder_server_--help.golden +++ b/cli/testdata/coder_server_--help.golden @@ -145,7 +145,9 @@ INTROSPECTION / PROMETHEUS OPTIONS: Collect agent stats (may increase charges for metrics storage). --prometheus-collect-db-metrics bool, $CODER_PROMETHEUS_COLLECT_DB_METRICS (default: false) - Collect database metrics (may increase charges for metrics storage). + Collect database query metrics (may increase charges for metrics + storage). If set to false, a reduced set of database metrics are still + collected. --prometheus-enable bool, $CODER_PROMETHEUS_ENABLE Serve prometheus metrics on the address defined by prometheus address. diff --git a/cli/testdata/server-config.yaml.golden b/cli/testdata/server-config.yaml.golden index 78c893c58ae16..95486a26344b8 100644 --- a/cli/testdata/server-config.yaml.golden +++ b/cli/testdata/server-config.yaml.golden @@ -197,7 +197,8 @@ introspection: - template_name - username - workspace_name - # Collect database metrics (may increase charges for metrics storage). + # Collect database query metrics (may increase charges for metrics storage). If + # set to false, a reduced set of database metrics are still collected. # (default: false, type: bool) collect_db_metrics: false pprof: diff --git a/coderd/autobuild/lifecycle_executor.go b/coderd/autobuild/lifecycle_executor.go index 400f0406aee0e..db3c1cfd3dd31 100644 --- a/coderd/autobuild/lifecycle_executor.go +++ b/coderd/autobuild/lifecycle_executor.go @@ -285,7 +285,10 @@ func (e *Executor) runOnce(t time.Time) Stats { // Run with RepeatableRead isolation so that the build process sees the same data // as our calculation that determines whether an autobuild is necessary. - }, &sql.TxOptions{Isolation: sql.LevelRepeatableRead}) + }, &database.TxOptions{ + Isolation: sql.LevelRepeatableRead, + TxIdentifier: "lifecycle", + }) if auditLog != nil { // If the transition didn't succeed then updating the workspace // to indicate dormant didn't either. diff --git a/coderd/coderdtest/promhelp/doc.go b/coderd/coderdtest/promhelp/doc.go new file mode 100644 index 0000000000000..48b7e4b5aa550 --- /dev/null +++ b/coderd/coderdtest/promhelp/doc.go @@ -0,0 +1,3 @@ +// Package promhelp provides helper functions for asserting Prometheus +// metric values in unit tests. +package promhelp diff --git a/coderd/coderdtest/promhelp/metrics.go b/coderd/coderdtest/promhelp/metrics.go new file mode 100644 index 0000000000000..39c8af6ef9561 --- /dev/null +++ b/coderd/coderdtest/promhelp/metrics.go @@ -0,0 +1,87 @@ +package promhelp + +import ( + "context" + "io" + "maps" + "net/http" + "net/http/httptest" + "strings" + "testing" + + "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/client_golang/prometheus/promhttp" + ptestutil "github.com/prometheus/client_golang/prometheus/testutil" + io_prometheus_client "github.com/prometheus/client_model/go" + "github.com/stretchr/testify/require" +) + +// RegistryDump returns the http page for a given registry's metrics. +// Very useful for visual debugging. +func RegistryDump(reg *prometheus.Registry) string { + h := promhttp.HandlerFor(reg, promhttp.HandlerOpts{}) + rec := httptest.NewRecorder() + req, _ := http.NewRequestWithContext(context.Background(), http.MethodGet, "/", nil) + h.ServeHTTP(rec, req) + resp := rec.Result() + data, _ := io.ReadAll(resp.Body) + _ = resp.Body.Close() + return string(data) +} + +// Compare can be used to compare a registry to some prometheus formatted +// text. If any values differ, an error is returned. +// If metric names are passed in, only those metrics will be compared. +// Usage: `Compare(reg, RegistryDump(reg))` +func Compare(reg prometheus.Gatherer, compare string, metricNames ...string) error { + return ptestutil.GatherAndCompare(reg, strings.NewReader(compare), metricNames...) +} + +// HistogramValue returns the value of a histogram metric with the given name and labels. +func HistogramValue(t testing.TB, reg prometheus.Gatherer, metricName string, labels prometheus.Labels) *io_prometheus_client.Histogram { + t.Helper() + + labeled := MetricValue(t, reg, metricName, labels) + require.NotNilf(t, labeled, "metric %q with labels %v not found", metricName, labels) + return labeled.GetHistogram() +} + +// GaugeValue returns the value of a gauge metric with the given name and labels. +func GaugeValue(t testing.TB, reg prometheus.Gatherer, metricName string, labels prometheus.Labels) int { + t.Helper() + + labeled := MetricValue(t, reg, metricName, labels) + require.NotNilf(t, labeled, "metric %q with labels %v not found", metricName, labels) + return int(labeled.GetGauge().GetValue()) +} + +// CounterValue returns the value of a counter metric with the given name and labels. +func CounterValue(t testing.TB, reg prometheus.Gatherer, metricName string, labels prometheus.Labels) int { + t.Helper() + + labeled := MetricValue(t, reg, metricName, labels) + require.NotNilf(t, labeled, "metric %q with labels %v not found", metricName, labels) + return int(labeled.GetCounter().GetValue()) +} + +func MetricValue(t testing.TB, reg prometheus.Gatherer, metricName string, labels prometheus.Labels) *io_prometheus_client.Metric { + t.Helper() + + metrics, err := reg.Gather() + require.NoError(t, err) + + for _, m := range metrics { + if m.GetName() == metricName { + for _, labeled := range m.GetMetric() { + mLabels := make(prometheus.Labels) + for _, v := range labeled.GetLabel() { + mLabels[v.GetName()] = v.GetValue() + } + if maps.Equal(mLabels, labels) { + return labeled + } + } + } + } + return nil +} diff --git a/coderd/cryptokeys/rotate.go b/coderd/cryptokeys/rotate.go index 14a623e2156db..83e4106584b03 100644 --- a/coderd/cryptokeys/rotate.go +++ b/coderd/cryptokeys/rotate.go @@ -161,8 +161,9 @@ func (k *rotator) rotateKeys(ctx context.Context) error { } } return nil - }, &sql.TxOptions{ - Isolation: sql.LevelRepeatableRead, + }, &database.TxOptions{ + Isolation: sql.LevelRepeatableRead, + TxIdentifier: "rotate_keys", }) } diff --git a/coderd/database/db.go b/coderd/database/db.go index 51e61e4ce2027..ae2c31a566cb3 100644 --- a/coderd/database/db.go +++ b/coderd/database/db.go @@ -28,7 +28,7 @@ type Store interface { wrapper Ping(ctx context.Context) (time.Duration, error) - InTx(func(Store) error, *sql.TxOptions) error + InTx(func(Store) error, *TxOptions) error } type wrapper interface { @@ -57,6 +57,43 @@ func New(sdb *sql.DB) Store { } } +// TxOptions is used to pass some execution metadata to the callers. +// Ideally we could throw this into a context, but no context is used for +// transactions. So instead, the return context is attached to the options +// passed in. +// This metadata should not be returned in the method signature, because it +// is only used for metric tracking. It should never be used by business logic. +type TxOptions struct { + // Isolation is the transaction isolation level. + // If zero, the driver or database's default level is used. + Isolation sql.IsolationLevel + ReadOnly bool + + // -- Coder specific metadata -- + // TxIdentifier is a unique identifier for the transaction to be used + // in metrics. Can be any string. + TxIdentifier string + + // Set by InTx + executionCount int +} + +// IncrementExecutionCount is a helper function for external packages +// to increment the unexported count. +// Mainly for `dbmem`. +func IncrementExecutionCount(opts *TxOptions) { + opts.executionCount++ +} + +func (o TxOptions) ExecutionCount() int { + return o.executionCount +} + +func (o *TxOptions) WithID(id string) *TxOptions { + o.TxIdentifier = id + return o +} + // queries encompasses both are sqlc generated // queries and our custom queries. type querier interface { @@ -80,11 +117,24 @@ func (q *sqlQuerier) Ping(ctx context.Context) (time.Duration, error) { return time.Since(start), err } -func (q *sqlQuerier) InTx(function func(Store) error, txOpts *sql.TxOptions) error { +func DefaultTXOptions() *TxOptions { + return &TxOptions{ + Isolation: sql.LevelDefault, + ReadOnly: false, + } +} + +func (q *sqlQuerier) InTx(function func(Store) error, txOpts *TxOptions) error { _, inTx := q.db.(*sqlx.Tx) - isolation := sql.LevelDefault - if txOpts != nil { - isolation = txOpts.Isolation + + if txOpts == nil { + // create a default txOpts if left to nil + txOpts = DefaultTXOptions() + } + + sqlOpts := &sql.TxOptions{ + Isolation: txOpts.Isolation, + ReadOnly: txOpts.ReadOnly, } // If we are not already in a transaction, and we are running in serializable @@ -92,13 +142,14 @@ func (q *sqlQuerier) InTx(function func(Store) error, txOpts *sql.TxOptions) err // prepared to allow retries if using serializable mode. // If we are in a transaction already, the parent InTx call will handle the retry. // We do not want to duplicate those retries. - if !inTx && isolation == sql.LevelSerializable { + if !inTx && sqlOpts.Isolation == sql.LevelSerializable { // This is an arbitrarily chosen number. const retryAmount = 3 var err error attempts := 0 for attempts = 0; attempts < retryAmount; attempts++ { - err = q.runTx(function, txOpts) + txOpts.executionCount++ + err = q.runTx(function, sqlOpts) if err == nil { // Transaction succeeded. return nil @@ -111,7 +162,9 @@ func (q *sqlQuerier) InTx(function func(Store) error, txOpts *sql.TxOptions) err // Transaction kept failing in serializable mode. return xerrors.Errorf("transaction failed after %d attempts: %w", attempts, err) } - return q.runTx(function, txOpts) + + txOpts.executionCount++ + return q.runTx(function, sqlOpts) } // InTx performs database operations inside a transaction. diff --git a/coderd/database/db_test.go b/coderd/database/db_test.go index db7fe41eea3dc..a6df18fcbb8c8 100644 --- a/coderd/database/db_test.go +++ b/coderd/database/db_test.go @@ -27,7 +27,7 @@ func TestSerializedRetry(t *testing.T) { db := database.New(sqlDB) called := 0 - txOpts := &sql.TxOptions{Isolation: sql.LevelSerializable} + txOpts := &database.TxOptions{Isolation: sql.LevelSerializable} err := db.InTx(func(tx database.Store) error { // Test nested error return tx.InTx(func(tx database.Store) error { diff --git a/coderd/database/dbauthz/dbauthz.go b/coderd/database/dbauthz/dbauthz.go index 052f25450e6a5..46ccdd15933e8 100644 --- a/coderd/database/dbauthz/dbauthz.go +++ b/coderd/database/dbauthz/dbauthz.go @@ -558,7 +558,7 @@ func (q *querier) Ping(ctx context.Context) (time.Duration, error) { } // InTx runs the given function in a transaction. -func (q *querier) InTx(function func(querier database.Store) error, txOpts *sql.TxOptions) error { +func (q *querier) InTx(function func(querier database.Store) error, txOpts *database.TxOptions) error { return q.db.InTx(func(tx database.Store) error { // Wrap the transaction store in a querier. wrapped := New(tx, q.auth, q.log, q.acs) diff --git a/coderd/database/dbmem/dbmem.go b/coderd/database/dbmem/dbmem.go index 24498d88c9dbc..4f54598744dd0 100644 --- a/coderd/database/dbmem/dbmem.go +++ b/coderd/database/dbmem/dbmem.go @@ -365,7 +365,7 @@ func (tx *fakeTx) releaseLocks() { } // InTx doesn't rollback data properly for in-memory yet. -func (q *FakeQuerier) InTx(fn func(database.Store) error, _ *sql.TxOptions) error { +func (q *FakeQuerier) InTx(fn func(database.Store) error, opts *database.TxOptions) error { q.mutex.Lock() defer q.mutex.Unlock() tx := &fakeTx{ @@ -374,6 +374,9 @@ func (q *FakeQuerier) InTx(fn func(database.Store) error, _ *sql.TxOptions) erro } defer tx.releaseLocks() + if opts != nil { + database.IncrementExecutionCount(opts) + } return fn(tx) } diff --git a/coderd/database/dbmetrics/dbmetrics.go b/coderd/database/dbmetrics/dbmetrics.go index c3e9de22fb0d8..404a685876bc0 100644 --- a/coderd/database/dbmetrics/dbmetrics.go +++ b/coderd/database/dbmetrics/dbmetrics.go @@ -1,2721 +1,117 @@ -// Code generated by coderd/database/gen/metrics. -// Any function can be edited and will not be overwritten. -// New database functions are automatically generated! package dbmetrics import ( "context" - "database/sql" + "strconv" "time" - "github.com/google/uuid" "github.com/prometheus/client_golang/prometheus" "golang.org/x/exp/slices" + "cdr.dev/slog" "github.com/coder/coder/v2/coderd/database" - "github.com/coder/coder/v2/coderd/rbac" - "github.com/coder/coder/v2/coderd/rbac/policy" ) -var ( - // Force these imports, for some reason the autogen does not include them. - _ uuid.UUID - _ policy.Action - _ rbac.Objecter -) - -const wrapname = "dbmetrics.metricsStore" - -// New returns a database.Store that registers metrics for all queries to reg. -func New(s database.Store, reg prometheus.Registerer) database.Store { +type metricsStore struct { + database.Store + logger slog.Logger + // txDuration is how long transactions take to execute. + txDuration *prometheus.HistogramVec + // txRetries is how many retries we are seeing for a given tx. + txRetries *prometheus.CounterVec +} + +// NewDBMetrics returns a database.Store that registers metrics for the database +// but does not handle individual queries. +// metricsStore is intended to always be used, because queryMetrics are a bit +// too verbose for many use cases. +func NewDBMetrics(s database.Store, logger slog.Logger, reg prometheus.Registerer) database.Store { // Don't double-wrap. if slices.Contains(s.Wrappers(), wrapname) { return s } - queryLatencies := prometheus.NewHistogramVec(prometheus.HistogramOpts{ + txRetries := prometheus.NewCounterVec(prometheus.CounterOpts{ Namespace: "coderd", Subsystem: "db", - Name: "query_latencies_seconds", - Help: "Latency distribution of queries in seconds.", - Buckets: prometheus.DefBuckets, - }, []string{"query"}) - txDuration := prometheus.NewHistogram(prometheus.HistogramOpts{ + Name: "tx_executions_count", + Help: "Total count of transactions executed. 'retries' is expected to be 0 for a successful transaction.", + }, []string{ + "success", // Did the InTx function return an error? + // Number of executions, since we have retry logic on serialization errors. + // retries = Executions - 1 (as 1 execute is expected) + "retries", + // Uniquely naming some transactions can help debug reoccurring errors. + "id", + }) + reg.MustRegister(txRetries) + + txDuration := prometheus.NewHistogramVec(prometheus.HistogramOpts{ Namespace: "coderd", Subsystem: "db", Name: "tx_duration_seconds", Help: "Duration of transactions in seconds.", Buckets: prometheus.DefBuckets, + }, []string{ + "success", // Did the InTx function return an error? + // Uniquely naming some transactions can help debug reoccurring errors. + "id", }) - reg.MustRegister(queryLatencies) reg.MustRegister(txDuration) return &metricsStore{ - s: s, - queryLatencies: queryLatencies, - txDuration: txDuration, + Store: s, + txDuration: txDuration, + txRetries: txRetries, + logger: logger, } } -var _ database.Store = (*metricsStore)(nil) - -type metricsStore struct { - s database.Store - queryLatencies *prometheus.HistogramVec - txDuration prometheus.Histogram -} - func (m metricsStore) Wrappers() []string { - return append(m.s.Wrappers(), wrapname) -} - -func (m metricsStore) Ping(ctx context.Context) (time.Duration, error) { - start := time.Now() - duration, err := m.s.Ping(ctx) - m.queryLatencies.WithLabelValues("Ping").Observe(time.Since(start).Seconds()) - return duration, err -} - -func (m metricsStore) InTx(f func(database.Store) error, options *sql.TxOptions) error { - start := time.Now() - err := m.s.InTx(f, options) - m.txDuration.Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) AcquireLock(ctx context.Context, pgAdvisoryXactLock int64) error { - start := time.Now() - err := m.s.AcquireLock(ctx, pgAdvisoryXactLock) - m.queryLatencies.WithLabelValues("AcquireLock").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) AcquireNotificationMessages(ctx context.Context, arg database.AcquireNotificationMessagesParams) ([]database.AcquireNotificationMessagesRow, error) { - start := time.Now() - r0, r1 := m.s.AcquireNotificationMessages(ctx, arg) - m.queryLatencies.WithLabelValues("AcquireNotificationMessages").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) AcquireProvisionerJob(ctx context.Context, arg database.AcquireProvisionerJobParams) (database.ProvisionerJob, error) { - start := time.Now() - provisionerJob, err := m.s.AcquireProvisionerJob(ctx, arg) - m.queryLatencies.WithLabelValues("AcquireProvisionerJob").Observe(time.Since(start).Seconds()) - return provisionerJob, err -} - -func (m metricsStore) ActivityBumpWorkspace(ctx context.Context, arg database.ActivityBumpWorkspaceParams) error { - start := time.Now() - r0 := m.s.ActivityBumpWorkspace(ctx, arg) - m.queryLatencies.WithLabelValues("ActivityBumpWorkspace").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) AllUserIDs(ctx context.Context) ([]uuid.UUID, error) { - start := time.Now() - r0, r1 := m.s.AllUserIDs(ctx) - m.queryLatencies.WithLabelValues("AllUserIDs").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) ArchiveUnusedTemplateVersions(ctx context.Context, arg database.ArchiveUnusedTemplateVersionsParams) ([]uuid.UUID, error) { - start := time.Now() - r0, r1 := m.s.ArchiveUnusedTemplateVersions(ctx, arg) - m.queryLatencies.WithLabelValues("ArchiveUnusedTemplateVersions").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) BatchUpdateWorkspaceLastUsedAt(ctx context.Context, arg database.BatchUpdateWorkspaceLastUsedAtParams) error { - start := time.Now() - r0 := m.s.BatchUpdateWorkspaceLastUsedAt(ctx, arg) - m.queryLatencies.WithLabelValues("BatchUpdateWorkspaceLastUsedAt").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) BulkMarkNotificationMessagesFailed(ctx context.Context, arg database.BulkMarkNotificationMessagesFailedParams) (int64, error) { - start := time.Now() - r0, r1 := m.s.BulkMarkNotificationMessagesFailed(ctx, arg) - m.queryLatencies.WithLabelValues("BulkMarkNotificationMessagesFailed").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) BulkMarkNotificationMessagesSent(ctx context.Context, arg database.BulkMarkNotificationMessagesSentParams) (int64, error) { - start := time.Now() - r0, r1 := m.s.BulkMarkNotificationMessagesSent(ctx, arg) - m.queryLatencies.WithLabelValues("BulkMarkNotificationMessagesSent").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) CleanTailnetCoordinators(ctx context.Context) error { - start := time.Now() - err := m.s.CleanTailnetCoordinators(ctx) - m.queryLatencies.WithLabelValues("CleanTailnetCoordinators").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) CleanTailnetLostPeers(ctx context.Context) error { - start := time.Now() - r0 := m.s.CleanTailnetLostPeers(ctx) - m.queryLatencies.WithLabelValues("CleanTailnetLostPeers").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) CleanTailnetTunnels(ctx context.Context) error { - start := time.Now() - r0 := m.s.CleanTailnetTunnels(ctx) - m.queryLatencies.WithLabelValues("CleanTailnetTunnels").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) CustomRoles(ctx context.Context, arg database.CustomRolesParams) ([]database.CustomRole, error) { - start := time.Now() - r0, r1 := m.s.CustomRoles(ctx, arg) - m.queryLatencies.WithLabelValues("CustomRoles").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) DeleteAPIKeyByID(ctx context.Context, id string) error { - start := time.Now() - err := m.s.DeleteAPIKeyByID(ctx, id) - m.queryLatencies.WithLabelValues("DeleteAPIKeyByID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) DeleteAPIKeysByUserID(ctx context.Context, userID uuid.UUID) error { - start := time.Now() - err := m.s.DeleteAPIKeysByUserID(ctx, userID) - m.queryLatencies.WithLabelValues("DeleteAPIKeysByUserID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) DeleteAllTailnetClientSubscriptions(ctx context.Context, arg database.DeleteAllTailnetClientSubscriptionsParams) error { - start := time.Now() - r0 := m.s.DeleteAllTailnetClientSubscriptions(ctx, arg) - m.queryLatencies.WithLabelValues("DeleteAllTailnetClientSubscriptions").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) DeleteAllTailnetTunnels(ctx context.Context, arg database.DeleteAllTailnetTunnelsParams) error { - start := time.Now() - r0 := m.s.DeleteAllTailnetTunnels(ctx, arg) - m.queryLatencies.WithLabelValues("DeleteAllTailnetTunnels").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) DeleteApplicationConnectAPIKeysByUserID(ctx context.Context, userID uuid.UUID) error { - start := time.Now() - err := m.s.DeleteApplicationConnectAPIKeysByUserID(ctx, userID) - m.queryLatencies.WithLabelValues("DeleteApplicationConnectAPIKeysByUserID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) DeleteCoordinator(ctx context.Context, id uuid.UUID) error { - start := time.Now() - r0 := m.s.DeleteCoordinator(ctx, id) - m.queryLatencies.WithLabelValues("DeleteCoordinator").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) DeleteCryptoKey(ctx context.Context, arg database.DeleteCryptoKeyParams) (database.CryptoKey, error) { - start := time.Now() - r0, r1 := m.s.DeleteCryptoKey(ctx, arg) - m.queryLatencies.WithLabelValues("DeleteCryptoKey").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) DeleteCustomRole(ctx context.Context, arg database.DeleteCustomRoleParams) error { - start := time.Now() - r0 := m.s.DeleteCustomRole(ctx, arg) - m.queryLatencies.WithLabelValues("DeleteCustomRole").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) DeleteExternalAuthLink(ctx context.Context, arg database.DeleteExternalAuthLinkParams) error { - start := time.Now() - r0 := m.s.DeleteExternalAuthLink(ctx, arg) - m.queryLatencies.WithLabelValues("DeleteExternalAuthLink").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) DeleteGitSSHKey(ctx context.Context, userID uuid.UUID) error { - start := time.Now() - err := m.s.DeleteGitSSHKey(ctx, userID) - m.queryLatencies.WithLabelValues("DeleteGitSSHKey").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) DeleteGroupByID(ctx context.Context, id uuid.UUID) error { - start := time.Now() - err := m.s.DeleteGroupByID(ctx, id) - m.queryLatencies.WithLabelValues("DeleteGroupByID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) DeleteGroupMemberFromGroup(ctx context.Context, arg database.DeleteGroupMemberFromGroupParams) error { - start := time.Now() - err := m.s.DeleteGroupMemberFromGroup(ctx, arg) - m.queryLatencies.WithLabelValues("DeleteGroupMemberFromGroup").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) DeleteLicense(ctx context.Context, id int32) (int32, error) { - start := time.Now() - licenseID, err := m.s.DeleteLicense(ctx, id) - m.queryLatencies.WithLabelValues("DeleteLicense").Observe(time.Since(start).Seconds()) - return licenseID, err -} - -func (m metricsStore) DeleteOAuth2ProviderAppByID(ctx context.Context, id uuid.UUID) error { - start := time.Now() - r0 := m.s.DeleteOAuth2ProviderAppByID(ctx, id) - m.queryLatencies.WithLabelValues("DeleteOAuth2ProviderAppByID").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) DeleteOAuth2ProviderAppCodeByID(ctx context.Context, id uuid.UUID) error { - start := time.Now() - r0 := m.s.DeleteOAuth2ProviderAppCodeByID(ctx, id) - m.queryLatencies.WithLabelValues("DeleteOAuth2ProviderAppCodeByID").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) DeleteOAuth2ProviderAppCodesByAppAndUserID(ctx context.Context, arg database.DeleteOAuth2ProviderAppCodesByAppAndUserIDParams) error { - start := time.Now() - r0 := m.s.DeleteOAuth2ProviderAppCodesByAppAndUserID(ctx, arg) - m.queryLatencies.WithLabelValues("DeleteOAuth2ProviderAppCodesByAppAndUserID").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) DeleteOAuth2ProviderAppSecretByID(ctx context.Context, id uuid.UUID) error { - start := time.Now() - r0 := m.s.DeleteOAuth2ProviderAppSecretByID(ctx, id) - m.queryLatencies.WithLabelValues("DeleteOAuth2ProviderAppSecretByID").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) DeleteOAuth2ProviderAppTokensByAppAndUserID(ctx context.Context, arg database.DeleteOAuth2ProviderAppTokensByAppAndUserIDParams) error { - start := time.Now() - r0 := m.s.DeleteOAuth2ProviderAppTokensByAppAndUserID(ctx, arg) - m.queryLatencies.WithLabelValues("DeleteOAuth2ProviderAppTokensByAppAndUserID").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) DeleteOldNotificationMessages(ctx context.Context) error { - start := time.Now() - r0 := m.s.DeleteOldNotificationMessages(ctx) - m.queryLatencies.WithLabelValues("DeleteOldNotificationMessages").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) DeleteOldProvisionerDaemons(ctx context.Context) error { - start := time.Now() - r0 := m.s.DeleteOldProvisionerDaemons(ctx) - m.queryLatencies.WithLabelValues("DeleteOldProvisionerDaemons").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) DeleteOldWorkspaceAgentLogs(ctx context.Context, arg time.Time) error { - start := time.Now() - r0 := m.s.DeleteOldWorkspaceAgentLogs(ctx, arg) - m.queryLatencies.WithLabelValues("DeleteOldWorkspaceAgentLogs").Observe(time.Since(start).Seconds()) - return r0 + return append(m.Store.Wrappers(), wrapname) } -func (m metricsStore) DeleteOldWorkspaceAgentStats(ctx context.Context) error { - start := time.Now() - err := m.s.DeleteOldWorkspaceAgentStats(ctx) - m.queryLatencies.WithLabelValues("DeleteOldWorkspaceAgentStats").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) DeleteOrganization(ctx context.Context, id uuid.UUID) error { - start := time.Now() - r0 := m.s.DeleteOrganization(ctx, id) - m.queryLatencies.WithLabelValues("DeleteOrganization").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) DeleteOrganizationMember(ctx context.Context, arg database.DeleteOrganizationMemberParams) error { - start := time.Now() - r0 := m.s.DeleteOrganizationMember(ctx, arg) - m.queryLatencies.WithLabelValues("DeleteOrganizationMember").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) DeleteProvisionerKey(ctx context.Context, id uuid.UUID) error { - start := time.Now() - r0 := m.s.DeleteProvisionerKey(ctx, id) - m.queryLatencies.WithLabelValues("DeleteProvisionerKey").Observe(time.Since(start).Seconds()) - return r0 -} +func (m metricsStore) InTx(f func(database.Store) error, options *database.TxOptions) error { + if options == nil { + options = database.DefaultTXOptions() + } -func (m metricsStore) DeleteReplicasUpdatedBefore(ctx context.Context, updatedAt time.Time) error { start := time.Now() - err := m.s.DeleteReplicasUpdatedBefore(ctx, updatedAt) - m.queryLatencies.WithLabelValues("DeleteReplicasUpdatedBefore").Observe(time.Since(start).Seconds()) + err := m.Store.InTx(f, options) + dur := time.Since(start) + // The number of unique label combinations is + // 2 x #IDs x #of buckets + // So IDs should be used sparingly to prevent too much bloat. + m.txDuration.With(prometheus.Labels{ + "success": strconv.FormatBool(err == nil), + "id": options.TxIdentifier, // Can be empty string for unlabeled + }).Observe(dur.Seconds()) + + m.txRetries.With(prometheus.Labels{ + "success": strconv.FormatBool(err == nil), + "retries": strconv.FormatInt(int64(options.ExecutionCount()-1), 10), + "id": options.TxIdentifier, // Can be empty string for unlabeled + }).Inc() + + // Log all serializable transactions that are retried. + // This is expected to happen in production, but should be kept + // to a minimum. If these logs happen frequently, something is wrong. + if options.ExecutionCount() > 1 { + l := m.logger.Warn + if err != nil { + // Error level if retries were not enough + l = m.logger.Error + } + // No context is present in this function :( + l(context.Background(), "database transaction hit serialization error and had to retry", + slog.F("success", err == nil), // It can succeed on retry + // Note the error might not be a serialization error. It is possible + // the first error was a serialization error, and the error on the + // retry is different. If this is the case, we still want to log it + // since the first error was a serialization error. + slog.Error(err), // Might be nil, that is ok! + slog.F("executions", options.ExecutionCount()), + slog.F("id", options.TxIdentifier), + slog.F("duration", dur), + ) + } return err } - -func (m metricsStore) DeleteRuntimeConfig(ctx context.Context, key string) error { - start := time.Now() - r0 := m.s.DeleteRuntimeConfig(ctx, key) - m.queryLatencies.WithLabelValues("DeleteRuntimeConfig").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) DeleteTailnetAgent(ctx context.Context, arg database.DeleteTailnetAgentParams) (database.DeleteTailnetAgentRow, error) { - start := time.Now() - r0, r1 := m.s.DeleteTailnetAgent(ctx, arg) - m.queryLatencies.WithLabelValues("DeleteTailnetAgent").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) DeleteTailnetClient(ctx context.Context, arg database.DeleteTailnetClientParams) (database.DeleteTailnetClientRow, error) { - start := time.Now() - r0, r1 := m.s.DeleteTailnetClient(ctx, arg) - m.queryLatencies.WithLabelValues("DeleteTailnetClient").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) DeleteTailnetClientSubscription(ctx context.Context, arg database.DeleteTailnetClientSubscriptionParams) error { - start := time.Now() - r0 := m.s.DeleteTailnetClientSubscription(ctx, arg) - m.queryLatencies.WithLabelValues("DeleteTailnetClientSubscription").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) DeleteTailnetPeer(ctx context.Context, arg database.DeleteTailnetPeerParams) (database.DeleteTailnetPeerRow, error) { - start := time.Now() - r0, r1 := m.s.DeleteTailnetPeer(ctx, arg) - m.queryLatencies.WithLabelValues("DeleteTailnetPeer").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) DeleteTailnetTunnel(ctx context.Context, arg database.DeleteTailnetTunnelParams) (database.DeleteTailnetTunnelRow, error) { - start := time.Now() - r0, r1 := m.s.DeleteTailnetTunnel(ctx, arg) - m.queryLatencies.WithLabelValues("DeleteTailnetTunnel").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) DeleteWorkspaceAgentPortShare(ctx context.Context, arg database.DeleteWorkspaceAgentPortShareParams) error { - start := time.Now() - r0 := m.s.DeleteWorkspaceAgentPortShare(ctx, arg) - m.queryLatencies.WithLabelValues("DeleteWorkspaceAgentPortShare").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) DeleteWorkspaceAgentPortSharesByTemplate(ctx context.Context, templateID uuid.UUID) error { - start := time.Now() - r0 := m.s.DeleteWorkspaceAgentPortSharesByTemplate(ctx, templateID) - m.queryLatencies.WithLabelValues("DeleteWorkspaceAgentPortSharesByTemplate").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) EnqueueNotificationMessage(ctx context.Context, arg database.EnqueueNotificationMessageParams) error { - start := time.Now() - r0 := m.s.EnqueueNotificationMessage(ctx, arg) - m.queryLatencies.WithLabelValues("EnqueueNotificationMessage").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) FavoriteWorkspace(ctx context.Context, arg uuid.UUID) error { - start := time.Now() - r0 := m.s.FavoriteWorkspace(ctx, arg) - m.queryLatencies.WithLabelValues("FavoriteWorkspace").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) FetchNewMessageMetadata(ctx context.Context, arg database.FetchNewMessageMetadataParams) (database.FetchNewMessageMetadataRow, error) { - start := time.Now() - r0, r1 := m.s.FetchNewMessageMetadata(ctx, arg) - m.queryLatencies.WithLabelValues("FetchNewMessageMetadata").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetAPIKeyByID(ctx context.Context, id string) (database.APIKey, error) { - start := time.Now() - apiKey, err := m.s.GetAPIKeyByID(ctx, id) - m.queryLatencies.WithLabelValues("GetAPIKeyByID").Observe(time.Since(start).Seconds()) - return apiKey, err -} - -func (m metricsStore) GetAPIKeyByName(ctx context.Context, arg database.GetAPIKeyByNameParams) (database.APIKey, error) { - start := time.Now() - apiKey, err := m.s.GetAPIKeyByName(ctx, arg) - m.queryLatencies.WithLabelValues("GetAPIKeyByName").Observe(time.Since(start).Seconds()) - return apiKey, err -} - -func (m metricsStore) GetAPIKeysByLoginType(ctx context.Context, loginType database.LoginType) ([]database.APIKey, error) { - start := time.Now() - apiKeys, err := m.s.GetAPIKeysByLoginType(ctx, loginType) - m.queryLatencies.WithLabelValues("GetAPIKeysByLoginType").Observe(time.Since(start).Seconds()) - return apiKeys, err -} - -func (m metricsStore) GetAPIKeysByUserID(ctx context.Context, arg database.GetAPIKeysByUserIDParams) ([]database.APIKey, error) { - start := time.Now() - apiKeys, err := m.s.GetAPIKeysByUserID(ctx, arg) - m.queryLatencies.WithLabelValues("GetAPIKeysByUserID").Observe(time.Since(start).Seconds()) - return apiKeys, err -} - -func (m metricsStore) GetAPIKeysLastUsedAfter(ctx context.Context, lastUsed time.Time) ([]database.APIKey, error) { - start := time.Now() - apiKeys, err := m.s.GetAPIKeysLastUsedAfter(ctx, lastUsed) - m.queryLatencies.WithLabelValues("GetAPIKeysLastUsedAfter").Observe(time.Since(start).Seconds()) - return apiKeys, err -} - -func (m metricsStore) GetActiveUserCount(ctx context.Context) (int64, error) { - start := time.Now() - count, err := m.s.GetActiveUserCount(ctx) - m.queryLatencies.WithLabelValues("GetActiveUserCount").Observe(time.Since(start).Seconds()) - return count, err -} - -func (m metricsStore) GetActiveWorkspaceBuildsByTemplateID(ctx context.Context, templateID uuid.UUID) ([]database.WorkspaceBuild, error) { - start := time.Now() - r0, r1 := m.s.GetActiveWorkspaceBuildsByTemplateID(ctx, templateID) - m.queryLatencies.WithLabelValues("GetActiveWorkspaceBuildsByTemplateID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetAllTailnetAgents(ctx context.Context) ([]database.TailnetAgent, error) { - start := time.Now() - r0, r1 := m.s.GetAllTailnetAgents(ctx) - m.queryLatencies.WithLabelValues("GetAllTailnetAgents").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetAllTailnetCoordinators(ctx context.Context) ([]database.TailnetCoordinator, error) { - start := time.Now() - r0, r1 := m.s.GetAllTailnetCoordinators(ctx) - m.queryLatencies.WithLabelValues("GetAllTailnetCoordinators").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetAllTailnetPeers(ctx context.Context) ([]database.TailnetPeer, error) { - start := time.Now() - r0, r1 := m.s.GetAllTailnetPeers(ctx) - m.queryLatencies.WithLabelValues("GetAllTailnetPeers").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetAllTailnetTunnels(ctx context.Context) ([]database.TailnetTunnel, error) { - start := time.Now() - r0, r1 := m.s.GetAllTailnetTunnels(ctx) - m.queryLatencies.WithLabelValues("GetAllTailnetTunnels").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetAnnouncementBanners(ctx context.Context) (string, error) { - start := time.Now() - r0, r1 := m.s.GetAnnouncementBanners(ctx) - m.queryLatencies.WithLabelValues("GetAnnouncementBanners").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetAppSecurityKey(ctx context.Context) (string, error) { - start := time.Now() - key, err := m.s.GetAppSecurityKey(ctx) - m.queryLatencies.WithLabelValues("GetAppSecurityKey").Observe(time.Since(start).Seconds()) - return key, err -} - -func (m metricsStore) GetApplicationName(ctx context.Context) (string, error) { - start := time.Now() - r0, r1 := m.s.GetApplicationName(ctx) - m.queryLatencies.WithLabelValues("GetApplicationName").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetAuditLogsOffset(ctx context.Context, arg database.GetAuditLogsOffsetParams) ([]database.GetAuditLogsOffsetRow, error) { - start := time.Now() - rows, err := m.s.GetAuditLogsOffset(ctx, arg) - m.queryLatencies.WithLabelValues("GetAuditLogsOffset").Observe(time.Since(start).Seconds()) - return rows, err -} - -func (m metricsStore) GetAuthorizationUserRoles(ctx context.Context, userID uuid.UUID) (database.GetAuthorizationUserRolesRow, error) { - start := time.Now() - row, err := m.s.GetAuthorizationUserRoles(ctx, userID) - m.queryLatencies.WithLabelValues("GetAuthorizationUserRoles").Observe(time.Since(start).Seconds()) - return row, err -} - -func (m metricsStore) GetCoordinatorResumeTokenSigningKey(ctx context.Context) (string, error) { - start := time.Now() - r0, r1 := m.s.GetCoordinatorResumeTokenSigningKey(ctx) - m.queryLatencies.WithLabelValues("GetCoordinatorResumeTokenSigningKey").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetCryptoKeyByFeatureAndSequence(ctx context.Context, arg database.GetCryptoKeyByFeatureAndSequenceParams) (database.CryptoKey, error) { - start := time.Now() - r0, r1 := m.s.GetCryptoKeyByFeatureAndSequence(ctx, arg) - m.queryLatencies.WithLabelValues("GetCryptoKeyByFeatureAndSequence").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetCryptoKeys(ctx context.Context) ([]database.CryptoKey, error) { - start := time.Now() - r0, r1 := m.s.GetCryptoKeys(ctx) - m.queryLatencies.WithLabelValues("GetCryptoKeys").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetCryptoKeysByFeature(ctx context.Context, feature database.CryptoKeyFeature) ([]database.CryptoKey, error) { - start := time.Now() - r0, r1 := m.s.GetCryptoKeysByFeature(ctx, feature) - m.queryLatencies.WithLabelValues("GetCryptoKeysByFeature").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetDBCryptKeys(ctx context.Context) ([]database.DBCryptKey, error) { - start := time.Now() - r0, r1 := m.s.GetDBCryptKeys(ctx) - m.queryLatencies.WithLabelValues("GetDBCryptKeys").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetDERPMeshKey(ctx context.Context) (string, error) { - start := time.Now() - key, err := m.s.GetDERPMeshKey(ctx) - m.queryLatencies.WithLabelValues("GetDERPMeshKey").Observe(time.Since(start).Seconds()) - return key, err -} - -func (m metricsStore) GetDefaultOrganization(ctx context.Context) (database.Organization, error) { - start := time.Now() - r0, r1 := m.s.GetDefaultOrganization(ctx) - m.queryLatencies.WithLabelValues("GetDefaultOrganization").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetDefaultProxyConfig(ctx context.Context) (database.GetDefaultProxyConfigRow, error) { - start := time.Now() - resp, err := m.s.GetDefaultProxyConfig(ctx) - m.queryLatencies.WithLabelValues("GetDefaultProxyConfig").Observe(time.Since(start).Seconds()) - return resp, err -} - -func (m metricsStore) GetDeploymentDAUs(ctx context.Context, tzOffset int32) ([]database.GetDeploymentDAUsRow, error) { - start := time.Now() - rows, err := m.s.GetDeploymentDAUs(ctx, tzOffset) - m.queryLatencies.WithLabelValues("GetDeploymentDAUs").Observe(time.Since(start).Seconds()) - return rows, err -} - -func (m metricsStore) GetDeploymentID(ctx context.Context) (string, error) { - start := time.Now() - id, err := m.s.GetDeploymentID(ctx) - m.queryLatencies.WithLabelValues("GetDeploymentID").Observe(time.Since(start).Seconds()) - return id, err -} - -func (m metricsStore) GetDeploymentWorkspaceAgentStats(ctx context.Context, createdAt time.Time) (database.GetDeploymentWorkspaceAgentStatsRow, error) { - start := time.Now() - row, err := m.s.GetDeploymentWorkspaceAgentStats(ctx, createdAt) - m.queryLatencies.WithLabelValues("GetDeploymentWorkspaceAgentStats").Observe(time.Since(start).Seconds()) - return row, err -} - -func (m metricsStore) GetDeploymentWorkspaceAgentUsageStats(ctx context.Context, createdAt time.Time) (database.GetDeploymentWorkspaceAgentUsageStatsRow, error) { - start := time.Now() - r0, r1 := m.s.GetDeploymentWorkspaceAgentUsageStats(ctx, createdAt) - m.queryLatencies.WithLabelValues("GetDeploymentWorkspaceAgentUsageStats").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetDeploymentWorkspaceStats(ctx context.Context) (database.GetDeploymentWorkspaceStatsRow, error) { - start := time.Now() - row, err := m.s.GetDeploymentWorkspaceStats(ctx) - m.queryLatencies.WithLabelValues("GetDeploymentWorkspaceStats").Observe(time.Since(start).Seconds()) - return row, err -} - -func (m metricsStore) GetExternalAuthLink(ctx context.Context, arg database.GetExternalAuthLinkParams) (database.ExternalAuthLink, error) { - start := time.Now() - link, err := m.s.GetExternalAuthLink(ctx, arg) - m.queryLatencies.WithLabelValues("GetExternalAuthLink").Observe(time.Since(start).Seconds()) - return link, err -} - -func (m metricsStore) GetExternalAuthLinksByUserID(ctx context.Context, userID uuid.UUID) ([]database.ExternalAuthLink, error) { - start := time.Now() - r0, r1 := m.s.GetExternalAuthLinksByUserID(ctx, userID) - m.queryLatencies.WithLabelValues("GetExternalAuthLinksByUserID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetFailedWorkspaceBuildsByTemplateID(ctx context.Context, arg database.GetFailedWorkspaceBuildsByTemplateIDParams) ([]database.GetFailedWorkspaceBuildsByTemplateIDRow, error) { - start := time.Now() - r0, r1 := m.s.GetFailedWorkspaceBuildsByTemplateID(ctx, arg) - m.queryLatencies.WithLabelValues("GetFailedWorkspaceBuildsByTemplateID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetFileByHashAndCreator(ctx context.Context, arg database.GetFileByHashAndCreatorParams) (database.File, error) { - start := time.Now() - file, err := m.s.GetFileByHashAndCreator(ctx, arg) - m.queryLatencies.WithLabelValues("GetFileByHashAndCreator").Observe(time.Since(start).Seconds()) - return file, err -} - -func (m metricsStore) GetFileByID(ctx context.Context, id uuid.UUID) (database.File, error) { - start := time.Now() - file, err := m.s.GetFileByID(ctx, id) - m.queryLatencies.WithLabelValues("GetFileByID").Observe(time.Since(start).Seconds()) - return file, err -} - -func (m metricsStore) GetFileTemplates(ctx context.Context, fileID uuid.UUID) ([]database.GetFileTemplatesRow, error) { - start := time.Now() - rows, err := m.s.GetFileTemplates(ctx, fileID) - m.queryLatencies.WithLabelValues("GetFileTemplates").Observe(time.Since(start).Seconds()) - return rows, err -} - -func (m metricsStore) GetGitSSHKey(ctx context.Context, userID uuid.UUID) (database.GitSSHKey, error) { - start := time.Now() - key, err := m.s.GetGitSSHKey(ctx, userID) - m.queryLatencies.WithLabelValues("GetGitSSHKey").Observe(time.Since(start).Seconds()) - return key, err -} - -func (m metricsStore) GetGroupByID(ctx context.Context, id uuid.UUID) (database.Group, error) { - start := time.Now() - group, err := m.s.GetGroupByID(ctx, id) - m.queryLatencies.WithLabelValues("GetGroupByID").Observe(time.Since(start).Seconds()) - return group, err -} - -func (m metricsStore) GetGroupByOrgAndName(ctx context.Context, arg database.GetGroupByOrgAndNameParams) (database.Group, error) { - start := time.Now() - group, err := m.s.GetGroupByOrgAndName(ctx, arg) - m.queryLatencies.WithLabelValues("GetGroupByOrgAndName").Observe(time.Since(start).Seconds()) - return group, err -} - -func (m metricsStore) GetGroupMembers(ctx context.Context) ([]database.GroupMember, error) { - start := time.Now() - r0, r1 := m.s.GetGroupMembers(ctx) - m.queryLatencies.WithLabelValues("GetGroupMembers").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetGroupMembersByGroupID(ctx context.Context, groupID uuid.UUID) ([]database.GroupMember, error) { - start := time.Now() - users, err := m.s.GetGroupMembersByGroupID(ctx, groupID) - m.queryLatencies.WithLabelValues("GetGroupMembersByGroupID").Observe(time.Since(start).Seconds()) - return users, err -} - -func (m metricsStore) GetGroupMembersCountByGroupID(ctx context.Context, groupID uuid.UUID) (int64, error) { - start := time.Now() - r0, r1 := m.s.GetGroupMembersCountByGroupID(ctx, groupID) - m.queryLatencies.WithLabelValues("GetGroupMembersCountByGroupID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetGroups(ctx context.Context, arg database.GetGroupsParams) ([]database.GetGroupsRow, error) { - start := time.Now() - r0, r1 := m.s.GetGroups(ctx, arg) - m.queryLatencies.WithLabelValues("GetGroups").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetHealthSettings(ctx context.Context) (string, error) { - start := time.Now() - r0, r1 := m.s.GetHealthSettings(ctx) - m.queryLatencies.WithLabelValues("GetHealthSettings").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetHungProvisionerJobs(ctx context.Context, hungSince time.Time) ([]database.ProvisionerJob, error) { - start := time.Now() - jobs, err := m.s.GetHungProvisionerJobs(ctx, hungSince) - m.queryLatencies.WithLabelValues("GetHungProvisionerJobs").Observe(time.Since(start).Seconds()) - return jobs, err -} - -func (m metricsStore) GetJFrogXrayScanByWorkspaceAndAgentID(ctx context.Context, arg database.GetJFrogXrayScanByWorkspaceAndAgentIDParams) (database.JfrogXrayScan, error) { - start := time.Now() - r0, r1 := m.s.GetJFrogXrayScanByWorkspaceAndAgentID(ctx, arg) - m.queryLatencies.WithLabelValues("GetJFrogXrayScanByWorkspaceAndAgentID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetLastUpdateCheck(ctx context.Context) (string, error) { - start := time.Now() - version, err := m.s.GetLastUpdateCheck(ctx) - m.queryLatencies.WithLabelValues("GetLastUpdateCheck").Observe(time.Since(start).Seconds()) - return version, err -} - -func (m metricsStore) GetLatestCryptoKeyByFeature(ctx context.Context, feature database.CryptoKeyFeature) (database.CryptoKey, error) { - start := time.Now() - r0, r1 := m.s.GetLatestCryptoKeyByFeature(ctx, feature) - m.queryLatencies.WithLabelValues("GetLatestCryptoKeyByFeature").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetLatestWorkspaceBuildByWorkspaceID(ctx context.Context, workspaceID uuid.UUID) (database.WorkspaceBuild, error) { - start := time.Now() - build, err := m.s.GetLatestWorkspaceBuildByWorkspaceID(ctx, workspaceID) - m.queryLatencies.WithLabelValues("GetLatestWorkspaceBuildByWorkspaceID").Observe(time.Since(start).Seconds()) - return build, err -} - -func (m metricsStore) GetLatestWorkspaceBuilds(ctx context.Context) ([]database.WorkspaceBuild, error) { - start := time.Now() - builds, err := m.s.GetLatestWorkspaceBuilds(ctx) - m.queryLatencies.WithLabelValues("GetLatestWorkspaceBuilds").Observe(time.Since(start).Seconds()) - return builds, err -} - -func (m metricsStore) GetLatestWorkspaceBuildsByWorkspaceIDs(ctx context.Context, ids []uuid.UUID) ([]database.WorkspaceBuild, error) { - start := time.Now() - builds, err := m.s.GetLatestWorkspaceBuildsByWorkspaceIDs(ctx, ids) - m.queryLatencies.WithLabelValues("GetLatestWorkspaceBuildsByWorkspaceIDs").Observe(time.Since(start).Seconds()) - return builds, err -} - -func (m metricsStore) GetLicenseByID(ctx context.Context, id int32) (database.License, error) { - start := time.Now() - license, err := m.s.GetLicenseByID(ctx, id) - m.queryLatencies.WithLabelValues("GetLicenseByID").Observe(time.Since(start).Seconds()) - return license, err -} - -func (m metricsStore) GetLicenses(ctx context.Context) ([]database.License, error) { - start := time.Now() - licenses, err := m.s.GetLicenses(ctx) - m.queryLatencies.WithLabelValues("GetLicenses").Observe(time.Since(start).Seconds()) - return licenses, err -} - -func (m metricsStore) GetLogoURL(ctx context.Context) (string, error) { - start := time.Now() - url, err := m.s.GetLogoURL(ctx) - m.queryLatencies.WithLabelValues("GetLogoURL").Observe(time.Since(start).Seconds()) - return url, err -} - -func (m metricsStore) GetNotificationMessagesByStatus(ctx context.Context, arg database.GetNotificationMessagesByStatusParams) ([]database.NotificationMessage, error) { - start := time.Now() - r0, r1 := m.s.GetNotificationMessagesByStatus(ctx, arg) - m.queryLatencies.WithLabelValues("GetNotificationMessagesByStatus").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetNotificationReportGeneratorLogByTemplate(ctx context.Context, arg uuid.UUID) (database.NotificationReportGeneratorLog, error) { - start := time.Now() - r0, r1 := m.s.GetNotificationReportGeneratorLogByTemplate(ctx, arg) - m.queryLatencies.WithLabelValues("GetNotificationReportGeneratorLogByTemplate").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetNotificationTemplateByID(ctx context.Context, id uuid.UUID) (database.NotificationTemplate, error) { - start := time.Now() - r0, r1 := m.s.GetNotificationTemplateByID(ctx, id) - m.queryLatencies.WithLabelValues("GetNotificationTemplateByID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetNotificationTemplatesByKind(ctx context.Context, kind database.NotificationTemplateKind) ([]database.NotificationTemplate, error) { - start := time.Now() - r0, r1 := m.s.GetNotificationTemplatesByKind(ctx, kind) - m.queryLatencies.WithLabelValues("GetNotificationTemplatesByKind").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetNotificationsSettings(ctx context.Context) (string, error) { - start := time.Now() - r0, r1 := m.s.GetNotificationsSettings(ctx) - m.queryLatencies.WithLabelValues("GetNotificationsSettings").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetOAuth2ProviderAppByID(ctx context.Context, id uuid.UUID) (database.OAuth2ProviderApp, error) { - start := time.Now() - r0, r1 := m.s.GetOAuth2ProviderAppByID(ctx, id) - m.queryLatencies.WithLabelValues("GetOAuth2ProviderAppByID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetOAuth2ProviderAppCodeByID(ctx context.Context, id uuid.UUID) (database.OAuth2ProviderAppCode, error) { - start := time.Now() - r0, r1 := m.s.GetOAuth2ProviderAppCodeByID(ctx, id) - m.queryLatencies.WithLabelValues("GetOAuth2ProviderAppCodeByID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetOAuth2ProviderAppCodeByPrefix(ctx context.Context, secretPrefix []byte) (database.OAuth2ProviderAppCode, error) { - start := time.Now() - r0, r1 := m.s.GetOAuth2ProviderAppCodeByPrefix(ctx, secretPrefix) - m.queryLatencies.WithLabelValues("GetOAuth2ProviderAppCodeByPrefix").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetOAuth2ProviderAppSecretByID(ctx context.Context, id uuid.UUID) (database.OAuth2ProviderAppSecret, error) { - start := time.Now() - r0, r1 := m.s.GetOAuth2ProviderAppSecretByID(ctx, id) - m.queryLatencies.WithLabelValues("GetOAuth2ProviderAppSecretByID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetOAuth2ProviderAppSecretByPrefix(ctx context.Context, secretPrefix []byte) (database.OAuth2ProviderAppSecret, error) { - start := time.Now() - r0, r1 := m.s.GetOAuth2ProviderAppSecretByPrefix(ctx, secretPrefix) - m.queryLatencies.WithLabelValues("GetOAuth2ProviderAppSecretByPrefix").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetOAuth2ProviderAppSecretsByAppID(ctx context.Context, appID uuid.UUID) ([]database.OAuth2ProviderAppSecret, error) { - start := time.Now() - r0, r1 := m.s.GetOAuth2ProviderAppSecretsByAppID(ctx, appID) - m.queryLatencies.WithLabelValues("GetOAuth2ProviderAppSecretsByAppID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetOAuth2ProviderAppTokenByPrefix(ctx context.Context, hashPrefix []byte) (database.OAuth2ProviderAppToken, error) { - start := time.Now() - r0, r1 := m.s.GetOAuth2ProviderAppTokenByPrefix(ctx, hashPrefix) - m.queryLatencies.WithLabelValues("GetOAuth2ProviderAppTokenByPrefix").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetOAuth2ProviderApps(ctx context.Context) ([]database.OAuth2ProviderApp, error) { - start := time.Now() - r0, r1 := m.s.GetOAuth2ProviderApps(ctx) - m.queryLatencies.WithLabelValues("GetOAuth2ProviderApps").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetOAuth2ProviderAppsByUserID(ctx context.Context, userID uuid.UUID) ([]database.GetOAuth2ProviderAppsByUserIDRow, error) { - start := time.Now() - r0, r1 := m.s.GetOAuth2ProviderAppsByUserID(ctx, userID) - m.queryLatencies.WithLabelValues("GetOAuth2ProviderAppsByUserID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetOAuthSigningKey(ctx context.Context) (string, error) { - start := time.Now() - r0, r1 := m.s.GetOAuthSigningKey(ctx) - m.queryLatencies.WithLabelValues("GetOAuthSigningKey").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetOrganizationByID(ctx context.Context, id uuid.UUID) (database.Organization, error) { - start := time.Now() - organization, err := m.s.GetOrganizationByID(ctx, id) - m.queryLatencies.WithLabelValues("GetOrganizationByID").Observe(time.Since(start).Seconds()) - return organization, err -} - -func (m metricsStore) GetOrganizationByName(ctx context.Context, name string) (database.Organization, error) { - start := time.Now() - organization, err := m.s.GetOrganizationByName(ctx, name) - m.queryLatencies.WithLabelValues("GetOrganizationByName").Observe(time.Since(start).Seconds()) - return organization, err -} - -func (m metricsStore) GetOrganizationIDsByMemberIDs(ctx context.Context, ids []uuid.UUID) ([]database.GetOrganizationIDsByMemberIDsRow, error) { - start := time.Now() - organizations, err := m.s.GetOrganizationIDsByMemberIDs(ctx, ids) - m.queryLatencies.WithLabelValues("GetOrganizationIDsByMemberIDs").Observe(time.Since(start).Seconds()) - return organizations, err -} - -func (m metricsStore) GetOrganizations(ctx context.Context, args database.GetOrganizationsParams) ([]database.Organization, error) { - start := time.Now() - organizations, err := m.s.GetOrganizations(ctx, args) - m.queryLatencies.WithLabelValues("GetOrganizations").Observe(time.Since(start).Seconds()) - return organizations, err -} - -func (m metricsStore) GetOrganizationsByUserID(ctx context.Context, userID uuid.UUID) ([]database.Organization, error) { - start := time.Now() - organizations, err := m.s.GetOrganizationsByUserID(ctx, userID) - m.queryLatencies.WithLabelValues("GetOrganizationsByUserID").Observe(time.Since(start).Seconds()) - return organizations, err -} - -func (m metricsStore) GetParameterSchemasByJobID(ctx context.Context, jobID uuid.UUID) ([]database.ParameterSchema, error) { - start := time.Now() - schemas, err := m.s.GetParameterSchemasByJobID(ctx, jobID) - m.queryLatencies.WithLabelValues("GetParameterSchemasByJobID").Observe(time.Since(start).Seconds()) - return schemas, err -} - -func (m metricsStore) GetPreviousTemplateVersion(ctx context.Context, arg database.GetPreviousTemplateVersionParams) (database.TemplateVersion, error) { - start := time.Now() - version, err := m.s.GetPreviousTemplateVersion(ctx, arg) - m.queryLatencies.WithLabelValues("GetPreviousTemplateVersion").Observe(time.Since(start).Seconds()) - return version, err -} - -func (m metricsStore) GetProvisionerDaemons(ctx context.Context) ([]database.ProvisionerDaemon, error) { - start := time.Now() - daemons, err := m.s.GetProvisionerDaemons(ctx) - m.queryLatencies.WithLabelValues("GetProvisionerDaemons").Observe(time.Since(start).Seconds()) - return daemons, err -} - -func (m metricsStore) GetProvisionerDaemonsByOrganization(ctx context.Context, organizationID uuid.UUID) ([]database.ProvisionerDaemon, error) { - start := time.Now() - r0, r1 := m.s.GetProvisionerDaemonsByOrganization(ctx, organizationID) - m.queryLatencies.WithLabelValues("GetProvisionerDaemonsByOrganization").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetProvisionerJobByID(ctx context.Context, id uuid.UUID) (database.ProvisionerJob, error) { - start := time.Now() - job, err := m.s.GetProvisionerJobByID(ctx, id) - m.queryLatencies.WithLabelValues("GetProvisionerJobByID").Observe(time.Since(start).Seconds()) - return job, err -} - -func (m metricsStore) GetProvisionerJobTimingsByJobID(ctx context.Context, jobID uuid.UUID) ([]database.ProvisionerJobTiming, error) { - start := time.Now() - r0, r1 := m.s.GetProvisionerJobTimingsByJobID(ctx, jobID) - m.queryLatencies.WithLabelValues("GetProvisionerJobTimingsByJobID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetProvisionerJobsByIDs(ctx context.Context, ids []uuid.UUID) ([]database.ProvisionerJob, error) { - start := time.Now() - jobs, err := m.s.GetProvisionerJobsByIDs(ctx, ids) - m.queryLatencies.WithLabelValues("GetProvisionerJobsByIDs").Observe(time.Since(start).Seconds()) - return jobs, err -} - -func (m metricsStore) GetProvisionerJobsByIDsWithQueuePosition(ctx context.Context, ids []uuid.UUID) ([]database.GetProvisionerJobsByIDsWithQueuePositionRow, error) { - start := time.Now() - r0, r1 := m.s.GetProvisionerJobsByIDsWithQueuePosition(ctx, ids) - m.queryLatencies.WithLabelValues("GetProvisionerJobsByIDsWithQueuePosition").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetProvisionerJobsCreatedAfter(ctx context.Context, createdAt time.Time) ([]database.ProvisionerJob, error) { - start := time.Now() - jobs, err := m.s.GetProvisionerJobsCreatedAfter(ctx, createdAt) - m.queryLatencies.WithLabelValues("GetProvisionerJobsCreatedAfter").Observe(time.Since(start).Seconds()) - return jobs, err -} - -func (m metricsStore) GetProvisionerKeyByHashedSecret(ctx context.Context, hashedSecret []byte) (database.ProvisionerKey, error) { - start := time.Now() - r0, r1 := m.s.GetProvisionerKeyByHashedSecret(ctx, hashedSecret) - m.queryLatencies.WithLabelValues("GetProvisionerKeyByHashedSecret").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetProvisionerKeyByID(ctx context.Context, id uuid.UUID) (database.ProvisionerKey, error) { - start := time.Now() - r0, r1 := m.s.GetProvisionerKeyByID(ctx, id) - m.queryLatencies.WithLabelValues("GetProvisionerKeyByID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetProvisionerKeyByName(ctx context.Context, name database.GetProvisionerKeyByNameParams) (database.ProvisionerKey, error) { - start := time.Now() - r0, r1 := m.s.GetProvisionerKeyByName(ctx, name) - m.queryLatencies.WithLabelValues("GetProvisionerKeyByName").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetProvisionerLogsAfterID(ctx context.Context, arg database.GetProvisionerLogsAfterIDParams) ([]database.ProvisionerJobLog, error) { - start := time.Now() - logs, err := m.s.GetProvisionerLogsAfterID(ctx, arg) - m.queryLatencies.WithLabelValues("GetProvisionerLogsAfterID").Observe(time.Since(start).Seconds()) - return logs, err -} - -func (m metricsStore) GetQuotaAllowanceForUser(ctx context.Context, userID database.GetQuotaAllowanceForUserParams) (int64, error) { - start := time.Now() - allowance, err := m.s.GetQuotaAllowanceForUser(ctx, userID) - m.queryLatencies.WithLabelValues("GetQuotaAllowanceForUser").Observe(time.Since(start).Seconds()) - return allowance, err -} - -func (m metricsStore) GetQuotaConsumedForUser(ctx context.Context, ownerID database.GetQuotaConsumedForUserParams) (int64, error) { - start := time.Now() - consumed, err := m.s.GetQuotaConsumedForUser(ctx, ownerID) - m.queryLatencies.WithLabelValues("GetQuotaConsumedForUser").Observe(time.Since(start).Seconds()) - return consumed, err -} - -func (m metricsStore) GetReplicaByID(ctx context.Context, id uuid.UUID) (database.Replica, error) { - start := time.Now() - replica, err := m.s.GetReplicaByID(ctx, id) - m.queryLatencies.WithLabelValues("GetReplicaByID").Observe(time.Since(start).Seconds()) - return replica, err -} - -func (m metricsStore) GetReplicasUpdatedAfter(ctx context.Context, updatedAt time.Time) ([]database.Replica, error) { - start := time.Now() - replicas, err := m.s.GetReplicasUpdatedAfter(ctx, updatedAt) - m.queryLatencies.WithLabelValues("GetReplicasUpdatedAfter").Observe(time.Since(start).Seconds()) - return replicas, err -} - -func (m metricsStore) GetRuntimeConfig(ctx context.Context, key string) (string, error) { - start := time.Now() - r0, r1 := m.s.GetRuntimeConfig(ctx, key) - m.queryLatencies.WithLabelValues("GetRuntimeConfig").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetTailnetAgents(ctx context.Context, id uuid.UUID) ([]database.TailnetAgent, error) { - start := time.Now() - r0, r1 := m.s.GetTailnetAgents(ctx, id) - m.queryLatencies.WithLabelValues("GetTailnetAgents").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetTailnetClientsForAgent(ctx context.Context, agentID uuid.UUID) ([]database.TailnetClient, error) { - start := time.Now() - r0, r1 := m.s.GetTailnetClientsForAgent(ctx, agentID) - m.queryLatencies.WithLabelValues("GetTailnetClientsForAgent").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetTailnetPeers(ctx context.Context, id uuid.UUID) ([]database.TailnetPeer, error) { - start := time.Now() - r0, r1 := m.s.GetTailnetPeers(ctx, id) - m.queryLatencies.WithLabelValues("GetTailnetPeers").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetTailnetTunnelPeerBindings(ctx context.Context, srcID uuid.UUID) ([]database.GetTailnetTunnelPeerBindingsRow, error) { - start := time.Now() - r0, r1 := m.s.GetTailnetTunnelPeerBindings(ctx, srcID) - m.queryLatencies.WithLabelValues("GetTailnetTunnelPeerBindings").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetTailnetTunnelPeerIDs(ctx context.Context, srcID uuid.UUID) ([]database.GetTailnetTunnelPeerIDsRow, error) { - start := time.Now() - r0, r1 := m.s.GetTailnetTunnelPeerIDs(ctx, srcID) - m.queryLatencies.WithLabelValues("GetTailnetTunnelPeerIDs").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetTemplateAppInsights(ctx context.Context, arg database.GetTemplateAppInsightsParams) ([]database.GetTemplateAppInsightsRow, error) { - start := time.Now() - r0, r1 := m.s.GetTemplateAppInsights(ctx, arg) - m.queryLatencies.WithLabelValues("GetTemplateAppInsights").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetTemplateAppInsightsByTemplate(ctx context.Context, arg database.GetTemplateAppInsightsByTemplateParams) ([]database.GetTemplateAppInsightsByTemplateRow, error) { - start := time.Now() - r0, r1 := m.s.GetTemplateAppInsightsByTemplate(ctx, arg) - m.queryLatencies.WithLabelValues("GetTemplateAppInsightsByTemplate").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetTemplateAverageBuildTime(ctx context.Context, arg database.GetTemplateAverageBuildTimeParams) (database.GetTemplateAverageBuildTimeRow, error) { - start := time.Now() - buildTime, err := m.s.GetTemplateAverageBuildTime(ctx, arg) - m.queryLatencies.WithLabelValues("GetTemplateAverageBuildTime").Observe(time.Since(start).Seconds()) - return buildTime, err -} - -func (m metricsStore) GetTemplateByID(ctx context.Context, id uuid.UUID) (database.Template, error) { - start := time.Now() - template, err := m.s.GetTemplateByID(ctx, id) - m.queryLatencies.WithLabelValues("GetTemplateByID").Observe(time.Since(start).Seconds()) - return template, err -} - -func (m metricsStore) GetTemplateByOrganizationAndName(ctx context.Context, arg database.GetTemplateByOrganizationAndNameParams) (database.Template, error) { - start := time.Now() - template, err := m.s.GetTemplateByOrganizationAndName(ctx, arg) - m.queryLatencies.WithLabelValues("GetTemplateByOrganizationAndName").Observe(time.Since(start).Seconds()) - return template, err -} - -func (m metricsStore) GetTemplateDAUs(ctx context.Context, arg database.GetTemplateDAUsParams) ([]database.GetTemplateDAUsRow, error) { - start := time.Now() - daus, err := m.s.GetTemplateDAUs(ctx, arg) - m.queryLatencies.WithLabelValues("GetTemplateDAUs").Observe(time.Since(start).Seconds()) - return daus, err -} - -func (m metricsStore) GetTemplateInsights(ctx context.Context, arg database.GetTemplateInsightsParams) (database.GetTemplateInsightsRow, error) { - start := time.Now() - r0, r1 := m.s.GetTemplateInsights(ctx, arg) - m.queryLatencies.WithLabelValues("GetTemplateInsights").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetTemplateInsightsByInterval(ctx context.Context, arg database.GetTemplateInsightsByIntervalParams) ([]database.GetTemplateInsightsByIntervalRow, error) { - start := time.Now() - r0, r1 := m.s.GetTemplateInsightsByInterval(ctx, arg) - m.queryLatencies.WithLabelValues("GetTemplateInsightsByInterval").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetTemplateInsightsByTemplate(ctx context.Context, arg database.GetTemplateInsightsByTemplateParams) ([]database.GetTemplateInsightsByTemplateRow, error) { - start := time.Now() - r0, r1 := m.s.GetTemplateInsightsByTemplate(ctx, arg) - m.queryLatencies.WithLabelValues("GetTemplateInsightsByTemplate").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetTemplateParameterInsights(ctx context.Context, arg database.GetTemplateParameterInsightsParams) ([]database.GetTemplateParameterInsightsRow, error) { - start := time.Now() - r0, r1 := m.s.GetTemplateParameterInsights(ctx, arg) - m.queryLatencies.WithLabelValues("GetTemplateParameterInsights").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetTemplateUsageStats(ctx context.Context, arg database.GetTemplateUsageStatsParams) ([]database.TemplateUsageStat, error) { - start := time.Now() - r0, r1 := m.s.GetTemplateUsageStats(ctx, arg) - m.queryLatencies.WithLabelValues("GetTemplateUsageStats").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetTemplateVersionByID(ctx context.Context, id uuid.UUID) (database.TemplateVersion, error) { - start := time.Now() - version, err := m.s.GetTemplateVersionByID(ctx, id) - m.queryLatencies.WithLabelValues("GetTemplateVersionByID").Observe(time.Since(start).Seconds()) - return version, err -} - -func (m metricsStore) GetTemplateVersionByJobID(ctx context.Context, jobID uuid.UUID) (database.TemplateVersion, error) { - start := time.Now() - version, err := m.s.GetTemplateVersionByJobID(ctx, jobID) - m.queryLatencies.WithLabelValues("GetTemplateVersionByJobID").Observe(time.Since(start).Seconds()) - return version, err -} - -func (m metricsStore) GetTemplateVersionByTemplateIDAndName(ctx context.Context, arg database.GetTemplateVersionByTemplateIDAndNameParams) (database.TemplateVersion, error) { - start := time.Now() - version, err := m.s.GetTemplateVersionByTemplateIDAndName(ctx, arg) - m.queryLatencies.WithLabelValues("GetTemplateVersionByTemplateIDAndName").Observe(time.Since(start).Seconds()) - return version, err -} - -func (m metricsStore) GetTemplateVersionParameters(ctx context.Context, templateVersionID uuid.UUID) ([]database.TemplateVersionParameter, error) { - start := time.Now() - parameters, err := m.s.GetTemplateVersionParameters(ctx, templateVersionID) - m.queryLatencies.WithLabelValues("GetTemplateVersionParameters").Observe(time.Since(start).Seconds()) - return parameters, err -} - -func (m metricsStore) GetTemplateVersionVariables(ctx context.Context, templateVersionID uuid.UUID) ([]database.TemplateVersionVariable, error) { - start := time.Now() - variables, err := m.s.GetTemplateVersionVariables(ctx, templateVersionID) - m.queryLatencies.WithLabelValues("GetTemplateVersionVariables").Observe(time.Since(start).Seconds()) - return variables, err -} - -func (m metricsStore) GetTemplateVersionWorkspaceTags(ctx context.Context, templateVersionID uuid.UUID) ([]database.TemplateVersionWorkspaceTag, error) { - start := time.Now() - r0, r1 := m.s.GetTemplateVersionWorkspaceTags(ctx, templateVersionID) - m.queryLatencies.WithLabelValues("GetTemplateVersionWorkspaceTags").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetTemplateVersionsByIDs(ctx context.Context, ids []uuid.UUID) ([]database.TemplateVersion, error) { - start := time.Now() - versions, err := m.s.GetTemplateVersionsByIDs(ctx, ids) - m.queryLatencies.WithLabelValues("GetTemplateVersionsByIDs").Observe(time.Since(start).Seconds()) - return versions, err -} - -func (m metricsStore) GetTemplateVersionsByTemplateID(ctx context.Context, arg database.GetTemplateVersionsByTemplateIDParams) ([]database.TemplateVersion, error) { - start := time.Now() - versions, err := m.s.GetTemplateVersionsByTemplateID(ctx, arg) - m.queryLatencies.WithLabelValues("GetTemplateVersionsByTemplateID").Observe(time.Since(start).Seconds()) - return versions, err -} - -func (m metricsStore) GetTemplateVersionsCreatedAfter(ctx context.Context, createdAt time.Time) ([]database.TemplateVersion, error) { - start := time.Now() - versions, err := m.s.GetTemplateVersionsCreatedAfter(ctx, createdAt) - m.queryLatencies.WithLabelValues("GetTemplateVersionsCreatedAfter").Observe(time.Since(start).Seconds()) - return versions, err -} - -func (m metricsStore) GetTemplates(ctx context.Context) ([]database.Template, error) { - start := time.Now() - templates, err := m.s.GetTemplates(ctx) - m.queryLatencies.WithLabelValues("GetTemplates").Observe(time.Since(start).Seconds()) - return templates, err -} - -func (m metricsStore) GetTemplatesWithFilter(ctx context.Context, arg database.GetTemplatesWithFilterParams) ([]database.Template, error) { - start := time.Now() - templates, err := m.s.GetTemplatesWithFilter(ctx, arg) - m.queryLatencies.WithLabelValues("GetTemplatesWithFilter").Observe(time.Since(start).Seconds()) - return templates, err -} - -func (m metricsStore) GetUnexpiredLicenses(ctx context.Context) ([]database.License, error) { - start := time.Now() - licenses, err := m.s.GetUnexpiredLicenses(ctx) - m.queryLatencies.WithLabelValues("GetUnexpiredLicenses").Observe(time.Since(start).Seconds()) - return licenses, err -} - -func (m metricsStore) GetUserActivityInsights(ctx context.Context, arg database.GetUserActivityInsightsParams) ([]database.GetUserActivityInsightsRow, error) { - start := time.Now() - r0, r1 := m.s.GetUserActivityInsights(ctx, arg) - m.queryLatencies.WithLabelValues("GetUserActivityInsights").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetUserByEmailOrUsername(ctx context.Context, arg database.GetUserByEmailOrUsernameParams) (database.User, error) { - start := time.Now() - user, err := m.s.GetUserByEmailOrUsername(ctx, arg) - m.queryLatencies.WithLabelValues("GetUserByEmailOrUsername").Observe(time.Since(start).Seconds()) - return user, err -} - -func (m metricsStore) GetUserByID(ctx context.Context, id uuid.UUID) (database.User, error) { - start := time.Now() - user, err := m.s.GetUserByID(ctx, id) - m.queryLatencies.WithLabelValues("GetUserByID").Observe(time.Since(start).Seconds()) - return user, err -} - -func (m metricsStore) GetUserCount(ctx context.Context) (int64, error) { - start := time.Now() - count, err := m.s.GetUserCount(ctx) - m.queryLatencies.WithLabelValues("GetUserCount").Observe(time.Since(start).Seconds()) - return count, err -} - -func (m metricsStore) GetUserLatencyInsights(ctx context.Context, arg database.GetUserLatencyInsightsParams) ([]database.GetUserLatencyInsightsRow, error) { - start := time.Now() - r0, r1 := m.s.GetUserLatencyInsights(ctx, arg) - m.queryLatencies.WithLabelValues("GetUserLatencyInsights").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetUserLinkByLinkedID(ctx context.Context, linkedID string) (database.UserLink, error) { - start := time.Now() - link, err := m.s.GetUserLinkByLinkedID(ctx, linkedID) - m.queryLatencies.WithLabelValues("GetUserLinkByLinkedID").Observe(time.Since(start).Seconds()) - return link, err -} - -func (m metricsStore) GetUserLinkByUserIDLoginType(ctx context.Context, arg database.GetUserLinkByUserIDLoginTypeParams) (database.UserLink, error) { - start := time.Now() - link, err := m.s.GetUserLinkByUserIDLoginType(ctx, arg) - m.queryLatencies.WithLabelValues("GetUserLinkByUserIDLoginType").Observe(time.Since(start).Seconds()) - return link, err -} - -func (m metricsStore) GetUserLinksByUserID(ctx context.Context, userID uuid.UUID) ([]database.UserLink, error) { - start := time.Now() - r0, r1 := m.s.GetUserLinksByUserID(ctx, userID) - m.queryLatencies.WithLabelValues("GetUserLinksByUserID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetUserNotificationPreferences(ctx context.Context, userID uuid.UUID) ([]database.NotificationPreference, error) { - start := time.Now() - r0, r1 := m.s.GetUserNotificationPreferences(ctx, userID) - m.queryLatencies.WithLabelValues("GetUserNotificationPreferences").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetUserWorkspaceBuildParameters(ctx context.Context, ownerID database.GetUserWorkspaceBuildParametersParams) ([]database.GetUserWorkspaceBuildParametersRow, error) { - start := time.Now() - r0, r1 := m.s.GetUserWorkspaceBuildParameters(ctx, ownerID) - m.queryLatencies.WithLabelValues("GetUserWorkspaceBuildParameters").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetUsers(ctx context.Context, arg database.GetUsersParams) ([]database.GetUsersRow, error) { - start := time.Now() - users, err := m.s.GetUsers(ctx, arg) - m.queryLatencies.WithLabelValues("GetUsers").Observe(time.Since(start).Seconds()) - return users, err -} - -func (m metricsStore) GetUsersByIDs(ctx context.Context, ids []uuid.UUID) ([]database.User, error) { - start := time.Now() - users, err := m.s.GetUsersByIDs(ctx, ids) - m.queryLatencies.WithLabelValues("GetUsersByIDs").Observe(time.Since(start).Seconds()) - return users, err -} - -func (m metricsStore) GetWorkspaceAgentAndLatestBuildByAuthToken(ctx context.Context, authToken uuid.UUID) (database.GetWorkspaceAgentAndLatestBuildByAuthTokenRow, error) { - start := time.Now() - r0, r1 := m.s.GetWorkspaceAgentAndLatestBuildByAuthToken(ctx, authToken) - m.queryLatencies.WithLabelValues("GetWorkspaceAgentAndLatestBuildByAuthToken").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetWorkspaceAgentByID(ctx context.Context, id uuid.UUID) (database.WorkspaceAgent, error) { - start := time.Now() - agent, err := m.s.GetWorkspaceAgentByID(ctx, id) - m.queryLatencies.WithLabelValues("GetWorkspaceAgentByID").Observe(time.Since(start).Seconds()) - return agent, err -} - -func (m metricsStore) GetWorkspaceAgentByInstanceID(ctx context.Context, authInstanceID string) (database.WorkspaceAgent, error) { - start := time.Now() - agent, err := m.s.GetWorkspaceAgentByInstanceID(ctx, authInstanceID) - m.queryLatencies.WithLabelValues("GetWorkspaceAgentByInstanceID").Observe(time.Since(start).Seconds()) - return agent, err -} - -func (m metricsStore) GetWorkspaceAgentLifecycleStateByID(ctx context.Context, id uuid.UUID) (database.GetWorkspaceAgentLifecycleStateByIDRow, error) { - start := time.Now() - r0, r1 := m.s.GetWorkspaceAgentLifecycleStateByID(ctx, id) - m.queryLatencies.WithLabelValues("GetWorkspaceAgentLifecycleStateByID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetWorkspaceAgentLogSourcesByAgentIDs(ctx context.Context, ids []uuid.UUID) ([]database.WorkspaceAgentLogSource, error) { - start := time.Now() - r0, r1 := m.s.GetWorkspaceAgentLogSourcesByAgentIDs(ctx, ids) - m.queryLatencies.WithLabelValues("GetWorkspaceAgentLogSourcesByAgentIDs").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetWorkspaceAgentLogsAfter(ctx context.Context, arg database.GetWorkspaceAgentLogsAfterParams) ([]database.WorkspaceAgentLog, error) { - start := time.Now() - r0, r1 := m.s.GetWorkspaceAgentLogsAfter(ctx, arg) - m.queryLatencies.WithLabelValues("GetWorkspaceAgentLogsAfter").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetWorkspaceAgentMetadata(ctx context.Context, workspaceAgentID database.GetWorkspaceAgentMetadataParams) ([]database.WorkspaceAgentMetadatum, error) { - start := time.Now() - metadata, err := m.s.GetWorkspaceAgentMetadata(ctx, workspaceAgentID) - m.queryLatencies.WithLabelValues("GetWorkspaceAgentMetadata").Observe(time.Since(start).Seconds()) - return metadata, err -} - -func (m metricsStore) GetWorkspaceAgentPortShare(ctx context.Context, arg database.GetWorkspaceAgentPortShareParams) (database.WorkspaceAgentPortShare, error) { - start := time.Now() - r0, r1 := m.s.GetWorkspaceAgentPortShare(ctx, arg) - m.queryLatencies.WithLabelValues("GetWorkspaceAgentPortShare").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetWorkspaceAgentScriptTimingsByBuildID(ctx context.Context, id uuid.UUID) ([]database.GetWorkspaceAgentScriptTimingsByBuildIDRow, error) { - start := time.Now() - r0, r1 := m.s.GetWorkspaceAgentScriptTimingsByBuildID(ctx, id) - m.queryLatencies.WithLabelValues("GetWorkspaceAgentScriptTimingsByBuildID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetWorkspaceAgentScriptsByAgentIDs(ctx context.Context, ids []uuid.UUID) ([]database.WorkspaceAgentScript, error) { - start := time.Now() - r0, r1 := m.s.GetWorkspaceAgentScriptsByAgentIDs(ctx, ids) - m.queryLatencies.WithLabelValues("GetWorkspaceAgentScriptsByAgentIDs").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetWorkspaceAgentStats(ctx context.Context, createdAt time.Time) ([]database.GetWorkspaceAgentStatsRow, error) { - start := time.Now() - stats, err := m.s.GetWorkspaceAgentStats(ctx, createdAt) - m.queryLatencies.WithLabelValues("GetWorkspaceAgentStats").Observe(time.Since(start).Seconds()) - return stats, err -} - -func (m metricsStore) GetWorkspaceAgentStatsAndLabels(ctx context.Context, createdAt time.Time) ([]database.GetWorkspaceAgentStatsAndLabelsRow, error) { - start := time.Now() - stats, err := m.s.GetWorkspaceAgentStatsAndLabels(ctx, createdAt) - m.queryLatencies.WithLabelValues("GetWorkspaceAgentStatsAndLabels").Observe(time.Since(start).Seconds()) - return stats, err -} - -func (m metricsStore) GetWorkspaceAgentUsageStats(ctx context.Context, createdAt time.Time) ([]database.GetWorkspaceAgentUsageStatsRow, error) { - start := time.Now() - r0, r1 := m.s.GetWorkspaceAgentUsageStats(ctx, createdAt) - m.queryLatencies.WithLabelValues("GetWorkspaceAgentUsageStats").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetWorkspaceAgentUsageStatsAndLabels(ctx context.Context, createdAt time.Time) ([]database.GetWorkspaceAgentUsageStatsAndLabelsRow, error) { - start := time.Now() - r0, r1 := m.s.GetWorkspaceAgentUsageStatsAndLabels(ctx, createdAt) - m.queryLatencies.WithLabelValues("GetWorkspaceAgentUsageStatsAndLabels").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetWorkspaceAgentsByResourceIDs(ctx context.Context, ids []uuid.UUID) ([]database.WorkspaceAgent, error) { - start := time.Now() - agents, err := m.s.GetWorkspaceAgentsByResourceIDs(ctx, ids) - m.queryLatencies.WithLabelValues("GetWorkspaceAgentsByResourceIDs").Observe(time.Since(start).Seconds()) - return agents, err -} - -func (m metricsStore) GetWorkspaceAgentsCreatedAfter(ctx context.Context, createdAt time.Time) ([]database.WorkspaceAgent, error) { - start := time.Now() - agents, err := m.s.GetWorkspaceAgentsCreatedAfter(ctx, createdAt) - m.queryLatencies.WithLabelValues("GetWorkspaceAgentsCreatedAfter").Observe(time.Since(start).Seconds()) - return agents, err -} - -func (m metricsStore) GetWorkspaceAgentsInLatestBuildByWorkspaceID(ctx context.Context, workspaceID uuid.UUID) ([]database.WorkspaceAgent, error) { - start := time.Now() - agents, err := m.s.GetWorkspaceAgentsInLatestBuildByWorkspaceID(ctx, workspaceID) - m.queryLatencies.WithLabelValues("GetWorkspaceAgentsInLatestBuildByWorkspaceID").Observe(time.Since(start).Seconds()) - return agents, err -} - -func (m metricsStore) GetWorkspaceAppByAgentIDAndSlug(ctx context.Context, arg database.GetWorkspaceAppByAgentIDAndSlugParams) (database.WorkspaceApp, error) { - start := time.Now() - app, err := m.s.GetWorkspaceAppByAgentIDAndSlug(ctx, arg) - m.queryLatencies.WithLabelValues("GetWorkspaceAppByAgentIDAndSlug").Observe(time.Since(start).Seconds()) - return app, err -} - -func (m metricsStore) GetWorkspaceAppsByAgentID(ctx context.Context, agentID uuid.UUID) ([]database.WorkspaceApp, error) { - start := time.Now() - apps, err := m.s.GetWorkspaceAppsByAgentID(ctx, agentID) - m.queryLatencies.WithLabelValues("GetWorkspaceAppsByAgentID").Observe(time.Since(start).Seconds()) - return apps, err -} - -func (m metricsStore) GetWorkspaceAppsByAgentIDs(ctx context.Context, ids []uuid.UUID) ([]database.WorkspaceApp, error) { - start := time.Now() - apps, err := m.s.GetWorkspaceAppsByAgentIDs(ctx, ids) - m.queryLatencies.WithLabelValues("GetWorkspaceAppsByAgentIDs").Observe(time.Since(start).Seconds()) - return apps, err -} - -func (m metricsStore) GetWorkspaceAppsCreatedAfter(ctx context.Context, createdAt time.Time) ([]database.WorkspaceApp, error) { - start := time.Now() - apps, err := m.s.GetWorkspaceAppsCreatedAfter(ctx, createdAt) - m.queryLatencies.WithLabelValues("GetWorkspaceAppsCreatedAfter").Observe(time.Since(start).Seconds()) - return apps, err -} - -func (m metricsStore) GetWorkspaceBuildByID(ctx context.Context, id uuid.UUID) (database.WorkspaceBuild, error) { - start := time.Now() - build, err := m.s.GetWorkspaceBuildByID(ctx, id) - m.queryLatencies.WithLabelValues("GetWorkspaceBuildByID").Observe(time.Since(start).Seconds()) - return build, err -} - -func (m metricsStore) GetWorkspaceBuildByJobID(ctx context.Context, jobID uuid.UUID) (database.WorkspaceBuild, error) { - start := time.Now() - build, err := m.s.GetWorkspaceBuildByJobID(ctx, jobID) - m.queryLatencies.WithLabelValues("GetWorkspaceBuildByJobID").Observe(time.Since(start).Seconds()) - return build, err -} - -func (m metricsStore) GetWorkspaceBuildByWorkspaceIDAndBuildNumber(ctx context.Context, arg database.GetWorkspaceBuildByWorkspaceIDAndBuildNumberParams) (database.WorkspaceBuild, error) { - start := time.Now() - build, err := m.s.GetWorkspaceBuildByWorkspaceIDAndBuildNumber(ctx, arg) - m.queryLatencies.WithLabelValues("GetWorkspaceBuildByWorkspaceIDAndBuildNumber").Observe(time.Since(start).Seconds()) - return build, err -} - -func (m metricsStore) GetWorkspaceBuildParameters(ctx context.Context, workspaceBuildID uuid.UUID) ([]database.WorkspaceBuildParameter, error) { - start := time.Now() - params, err := m.s.GetWorkspaceBuildParameters(ctx, workspaceBuildID) - m.queryLatencies.WithLabelValues("GetWorkspaceBuildParameters").Observe(time.Since(start).Seconds()) - return params, err -} - -func (m metricsStore) GetWorkspaceBuildStatsByTemplates(ctx context.Context, since time.Time) ([]database.GetWorkspaceBuildStatsByTemplatesRow, error) { - start := time.Now() - r0, r1 := m.s.GetWorkspaceBuildStatsByTemplates(ctx, since) - m.queryLatencies.WithLabelValues("GetWorkspaceBuildStatsByTemplates").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetWorkspaceBuildsByWorkspaceID(ctx context.Context, arg database.GetWorkspaceBuildsByWorkspaceIDParams) ([]database.WorkspaceBuild, error) { - start := time.Now() - builds, err := m.s.GetWorkspaceBuildsByWorkspaceID(ctx, arg) - m.queryLatencies.WithLabelValues("GetWorkspaceBuildsByWorkspaceID").Observe(time.Since(start).Seconds()) - return builds, err -} - -func (m metricsStore) GetWorkspaceBuildsCreatedAfter(ctx context.Context, createdAt time.Time) ([]database.WorkspaceBuild, error) { - start := time.Now() - builds, err := m.s.GetWorkspaceBuildsCreatedAfter(ctx, createdAt) - m.queryLatencies.WithLabelValues("GetWorkspaceBuildsCreatedAfter").Observe(time.Since(start).Seconds()) - return builds, err -} - -func (m metricsStore) GetWorkspaceByAgentID(ctx context.Context, agentID uuid.UUID) (database.Workspace, error) { - start := time.Now() - workspace, err := m.s.GetWorkspaceByAgentID(ctx, agentID) - m.queryLatencies.WithLabelValues("GetWorkspaceByAgentID").Observe(time.Since(start).Seconds()) - return workspace, err -} - -func (m metricsStore) GetWorkspaceByID(ctx context.Context, id uuid.UUID) (database.Workspace, error) { - start := time.Now() - workspace, err := m.s.GetWorkspaceByID(ctx, id) - m.queryLatencies.WithLabelValues("GetWorkspaceByID").Observe(time.Since(start).Seconds()) - return workspace, err -} - -func (m metricsStore) GetWorkspaceByOwnerIDAndName(ctx context.Context, arg database.GetWorkspaceByOwnerIDAndNameParams) (database.Workspace, error) { - start := time.Now() - workspace, err := m.s.GetWorkspaceByOwnerIDAndName(ctx, arg) - m.queryLatencies.WithLabelValues("GetWorkspaceByOwnerIDAndName").Observe(time.Since(start).Seconds()) - return workspace, err -} - -func (m metricsStore) GetWorkspaceByWorkspaceAppID(ctx context.Context, workspaceAppID uuid.UUID) (database.Workspace, error) { - start := time.Now() - workspace, err := m.s.GetWorkspaceByWorkspaceAppID(ctx, workspaceAppID) - m.queryLatencies.WithLabelValues("GetWorkspaceByWorkspaceAppID").Observe(time.Since(start).Seconds()) - return workspace, err -} - -func (m metricsStore) GetWorkspaceProxies(ctx context.Context) ([]database.WorkspaceProxy, error) { - start := time.Now() - proxies, err := m.s.GetWorkspaceProxies(ctx) - m.queryLatencies.WithLabelValues("GetWorkspaceProxies").Observe(time.Since(start).Seconds()) - return proxies, err -} - -func (m metricsStore) GetWorkspaceProxyByHostname(ctx context.Context, arg database.GetWorkspaceProxyByHostnameParams) (database.WorkspaceProxy, error) { - start := time.Now() - proxy, err := m.s.GetWorkspaceProxyByHostname(ctx, arg) - m.queryLatencies.WithLabelValues("GetWorkspaceProxyByHostname").Observe(time.Since(start).Seconds()) - return proxy, err -} - -func (m metricsStore) GetWorkspaceProxyByID(ctx context.Context, id uuid.UUID) (database.WorkspaceProxy, error) { - start := time.Now() - proxy, err := m.s.GetWorkspaceProxyByID(ctx, id) - m.queryLatencies.WithLabelValues("GetWorkspaceProxyByID").Observe(time.Since(start).Seconds()) - return proxy, err -} - -func (m metricsStore) GetWorkspaceProxyByName(ctx context.Context, name string) (database.WorkspaceProxy, error) { - start := time.Now() - proxy, err := m.s.GetWorkspaceProxyByName(ctx, name) - m.queryLatencies.WithLabelValues("GetWorkspaceProxyByName").Observe(time.Since(start).Seconds()) - return proxy, err -} - -func (m metricsStore) GetWorkspaceResourceByID(ctx context.Context, id uuid.UUID) (database.WorkspaceResource, error) { - start := time.Now() - resource, err := m.s.GetWorkspaceResourceByID(ctx, id) - m.queryLatencies.WithLabelValues("GetWorkspaceResourceByID").Observe(time.Since(start).Seconds()) - return resource, err -} - -func (m metricsStore) GetWorkspaceResourceMetadataByResourceIDs(ctx context.Context, ids []uuid.UUID) ([]database.WorkspaceResourceMetadatum, error) { - start := time.Now() - metadata, err := m.s.GetWorkspaceResourceMetadataByResourceIDs(ctx, ids) - m.queryLatencies.WithLabelValues("GetWorkspaceResourceMetadataByResourceIDs").Observe(time.Since(start).Seconds()) - return metadata, err -} - -func (m metricsStore) GetWorkspaceResourceMetadataCreatedAfter(ctx context.Context, createdAt time.Time) ([]database.WorkspaceResourceMetadatum, error) { - start := time.Now() - metadata, err := m.s.GetWorkspaceResourceMetadataCreatedAfter(ctx, createdAt) - m.queryLatencies.WithLabelValues("GetWorkspaceResourceMetadataCreatedAfter").Observe(time.Since(start).Seconds()) - return metadata, err -} - -func (m metricsStore) GetWorkspaceResourcesByJobID(ctx context.Context, jobID uuid.UUID) ([]database.WorkspaceResource, error) { - start := time.Now() - resources, err := m.s.GetWorkspaceResourcesByJobID(ctx, jobID) - m.queryLatencies.WithLabelValues("GetWorkspaceResourcesByJobID").Observe(time.Since(start).Seconds()) - return resources, err -} - -func (m metricsStore) GetWorkspaceResourcesByJobIDs(ctx context.Context, ids []uuid.UUID) ([]database.WorkspaceResource, error) { - start := time.Now() - resources, err := m.s.GetWorkspaceResourcesByJobIDs(ctx, ids) - m.queryLatencies.WithLabelValues("GetWorkspaceResourcesByJobIDs").Observe(time.Since(start).Seconds()) - return resources, err -} - -func (m metricsStore) GetWorkspaceResourcesCreatedAfter(ctx context.Context, createdAt time.Time) ([]database.WorkspaceResource, error) { - start := time.Now() - resources, err := m.s.GetWorkspaceResourcesCreatedAfter(ctx, createdAt) - m.queryLatencies.WithLabelValues("GetWorkspaceResourcesCreatedAfter").Observe(time.Since(start).Seconds()) - return resources, err -} - -func (m metricsStore) GetWorkspaceUniqueOwnerCountByTemplateIDs(ctx context.Context, templateIds []uuid.UUID) ([]database.GetWorkspaceUniqueOwnerCountByTemplateIDsRow, error) { - start := time.Now() - r0, r1 := m.s.GetWorkspaceUniqueOwnerCountByTemplateIDs(ctx, templateIds) - m.queryLatencies.WithLabelValues("GetWorkspaceUniqueOwnerCountByTemplateIDs").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetWorkspaces(ctx context.Context, arg database.GetWorkspacesParams) ([]database.GetWorkspacesRow, error) { - start := time.Now() - workspaces, err := m.s.GetWorkspaces(ctx, arg) - m.queryLatencies.WithLabelValues("GetWorkspaces").Observe(time.Since(start).Seconds()) - return workspaces, err -} - -func (m metricsStore) GetWorkspacesEligibleForTransition(ctx context.Context, now time.Time) ([]database.WorkspaceTable, error) { - start := time.Now() - workspaces, err := m.s.GetWorkspacesEligibleForTransition(ctx, now) - m.queryLatencies.WithLabelValues("GetWorkspacesEligibleForAutoStartStop").Observe(time.Since(start).Seconds()) - return workspaces, err -} - -func (m metricsStore) InsertAPIKey(ctx context.Context, arg database.InsertAPIKeyParams) (database.APIKey, error) { - start := time.Now() - key, err := m.s.InsertAPIKey(ctx, arg) - m.queryLatencies.WithLabelValues("InsertAPIKey").Observe(time.Since(start).Seconds()) - return key, err -} - -func (m metricsStore) InsertAllUsersGroup(ctx context.Context, organizationID uuid.UUID) (database.Group, error) { - start := time.Now() - group, err := m.s.InsertAllUsersGroup(ctx, organizationID) - m.queryLatencies.WithLabelValues("InsertAllUsersGroup").Observe(time.Since(start).Seconds()) - return group, err -} - -func (m metricsStore) InsertAuditLog(ctx context.Context, arg database.InsertAuditLogParams) (database.AuditLog, error) { - start := time.Now() - log, err := m.s.InsertAuditLog(ctx, arg) - m.queryLatencies.WithLabelValues("InsertAuditLog").Observe(time.Since(start).Seconds()) - return log, err -} - -func (m metricsStore) InsertCryptoKey(ctx context.Context, arg database.InsertCryptoKeyParams) (database.CryptoKey, error) { - start := time.Now() - key, err := m.s.InsertCryptoKey(ctx, arg) - m.queryLatencies.WithLabelValues("InsertCryptoKey").Observe(time.Since(start).Seconds()) - return key, err -} - -func (m metricsStore) InsertCustomRole(ctx context.Context, arg database.InsertCustomRoleParams) (database.CustomRole, error) { - start := time.Now() - r0, r1 := m.s.InsertCustomRole(ctx, arg) - m.queryLatencies.WithLabelValues("InsertCustomRole").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) InsertDBCryptKey(ctx context.Context, arg database.InsertDBCryptKeyParams) error { - start := time.Now() - r0 := m.s.InsertDBCryptKey(ctx, arg) - m.queryLatencies.WithLabelValues("InsertDBCryptKey").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) InsertDERPMeshKey(ctx context.Context, value string) error { - start := time.Now() - err := m.s.InsertDERPMeshKey(ctx, value) - m.queryLatencies.WithLabelValues("InsertDERPMeshKey").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) InsertDeploymentID(ctx context.Context, value string) error { - start := time.Now() - err := m.s.InsertDeploymentID(ctx, value) - m.queryLatencies.WithLabelValues("InsertDeploymentID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) InsertExternalAuthLink(ctx context.Context, arg database.InsertExternalAuthLinkParams) (database.ExternalAuthLink, error) { - start := time.Now() - link, err := m.s.InsertExternalAuthLink(ctx, arg) - m.queryLatencies.WithLabelValues("InsertExternalAuthLink").Observe(time.Since(start).Seconds()) - return link, err -} - -func (m metricsStore) InsertFile(ctx context.Context, arg database.InsertFileParams) (database.File, error) { - start := time.Now() - file, err := m.s.InsertFile(ctx, arg) - m.queryLatencies.WithLabelValues("InsertFile").Observe(time.Since(start).Seconds()) - return file, err -} - -func (m metricsStore) InsertGitSSHKey(ctx context.Context, arg database.InsertGitSSHKeyParams) (database.GitSSHKey, error) { - start := time.Now() - key, err := m.s.InsertGitSSHKey(ctx, arg) - m.queryLatencies.WithLabelValues("InsertGitSSHKey").Observe(time.Since(start).Seconds()) - return key, err -} - -func (m metricsStore) InsertGroup(ctx context.Context, arg database.InsertGroupParams) (database.Group, error) { - start := time.Now() - group, err := m.s.InsertGroup(ctx, arg) - m.queryLatencies.WithLabelValues("InsertGroup").Observe(time.Since(start).Seconds()) - return group, err -} - -func (m metricsStore) InsertGroupMember(ctx context.Context, arg database.InsertGroupMemberParams) error { - start := time.Now() - err := m.s.InsertGroupMember(ctx, arg) - m.queryLatencies.WithLabelValues("InsertGroupMember").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) InsertLicense(ctx context.Context, arg database.InsertLicenseParams) (database.License, error) { - start := time.Now() - license, err := m.s.InsertLicense(ctx, arg) - m.queryLatencies.WithLabelValues("InsertLicense").Observe(time.Since(start).Seconds()) - return license, err -} - -func (m metricsStore) InsertMissingGroups(ctx context.Context, arg database.InsertMissingGroupsParams) ([]database.Group, error) { - start := time.Now() - r0, r1 := m.s.InsertMissingGroups(ctx, arg) - m.queryLatencies.WithLabelValues("InsertMissingGroups").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) InsertOAuth2ProviderApp(ctx context.Context, arg database.InsertOAuth2ProviderAppParams) (database.OAuth2ProviderApp, error) { - start := time.Now() - r0, r1 := m.s.InsertOAuth2ProviderApp(ctx, arg) - m.queryLatencies.WithLabelValues("InsertOAuth2ProviderApp").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) InsertOAuth2ProviderAppCode(ctx context.Context, arg database.InsertOAuth2ProviderAppCodeParams) (database.OAuth2ProviderAppCode, error) { - start := time.Now() - r0, r1 := m.s.InsertOAuth2ProviderAppCode(ctx, arg) - m.queryLatencies.WithLabelValues("InsertOAuth2ProviderAppCode").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) InsertOAuth2ProviderAppSecret(ctx context.Context, arg database.InsertOAuth2ProviderAppSecretParams) (database.OAuth2ProviderAppSecret, error) { - start := time.Now() - r0, r1 := m.s.InsertOAuth2ProviderAppSecret(ctx, arg) - m.queryLatencies.WithLabelValues("InsertOAuth2ProviderAppSecret").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) InsertOAuth2ProviderAppToken(ctx context.Context, arg database.InsertOAuth2ProviderAppTokenParams) (database.OAuth2ProviderAppToken, error) { - start := time.Now() - r0, r1 := m.s.InsertOAuth2ProviderAppToken(ctx, arg) - m.queryLatencies.WithLabelValues("InsertOAuth2ProviderAppToken").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) InsertOrganization(ctx context.Context, arg database.InsertOrganizationParams) (database.Organization, error) { - start := time.Now() - organization, err := m.s.InsertOrganization(ctx, arg) - m.queryLatencies.WithLabelValues("InsertOrganization").Observe(time.Since(start).Seconds()) - return organization, err -} - -func (m metricsStore) InsertOrganizationMember(ctx context.Context, arg database.InsertOrganizationMemberParams) (database.OrganizationMember, error) { - start := time.Now() - member, err := m.s.InsertOrganizationMember(ctx, arg) - m.queryLatencies.WithLabelValues("InsertOrganizationMember").Observe(time.Since(start).Seconds()) - return member, err -} - -func (m metricsStore) InsertProvisionerJob(ctx context.Context, arg database.InsertProvisionerJobParams) (database.ProvisionerJob, error) { - start := time.Now() - job, err := m.s.InsertProvisionerJob(ctx, arg) - m.queryLatencies.WithLabelValues("InsertProvisionerJob").Observe(time.Since(start).Seconds()) - return job, err -} - -func (m metricsStore) InsertProvisionerJobLogs(ctx context.Context, arg database.InsertProvisionerJobLogsParams) ([]database.ProvisionerJobLog, error) { - start := time.Now() - logs, err := m.s.InsertProvisionerJobLogs(ctx, arg) - m.queryLatencies.WithLabelValues("InsertProvisionerJobLogs").Observe(time.Since(start).Seconds()) - return logs, err -} - -func (m metricsStore) InsertProvisionerJobTimings(ctx context.Context, arg database.InsertProvisionerJobTimingsParams) ([]database.ProvisionerJobTiming, error) { - start := time.Now() - r0, r1 := m.s.InsertProvisionerJobTimings(ctx, arg) - m.queryLatencies.WithLabelValues("InsertProvisionerJobTimings").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) InsertProvisionerKey(ctx context.Context, arg database.InsertProvisionerKeyParams) (database.ProvisionerKey, error) { - start := time.Now() - r0, r1 := m.s.InsertProvisionerKey(ctx, arg) - m.queryLatencies.WithLabelValues("InsertProvisionerKey").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) InsertReplica(ctx context.Context, arg database.InsertReplicaParams) (database.Replica, error) { - start := time.Now() - replica, err := m.s.InsertReplica(ctx, arg) - m.queryLatencies.WithLabelValues("InsertReplica").Observe(time.Since(start).Seconds()) - return replica, err -} - -func (m metricsStore) InsertTemplate(ctx context.Context, arg database.InsertTemplateParams) error { - start := time.Now() - err := m.s.InsertTemplate(ctx, arg) - m.queryLatencies.WithLabelValues("InsertTemplate").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) InsertTemplateVersion(ctx context.Context, arg database.InsertTemplateVersionParams) error { - start := time.Now() - err := m.s.InsertTemplateVersion(ctx, arg) - m.queryLatencies.WithLabelValues("InsertTemplateVersion").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) InsertTemplateVersionParameter(ctx context.Context, arg database.InsertTemplateVersionParameterParams) (database.TemplateVersionParameter, error) { - start := time.Now() - parameter, err := m.s.InsertTemplateVersionParameter(ctx, arg) - m.queryLatencies.WithLabelValues("InsertTemplateVersionParameter").Observe(time.Since(start).Seconds()) - return parameter, err -} - -func (m metricsStore) InsertTemplateVersionVariable(ctx context.Context, arg database.InsertTemplateVersionVariableParams) (database.TemplateVersionVariable, error) { - start := time.Now() - variable, err := m.s.InsertTemplateVersionVariable(ctx, arg) - m.queryLatencies.WithLabelValues("InsertTemplateVersionVariable").Observe(time.Since(start).Seconds()) - return variable, err -} - -func (m metricsStore) InsertTemplateVersionWorkspaceTag(ctx context.Context, arg database.InsertTemplateVersionWorkspaceTagParams) (database.TemplateVersionWorkspaceTag, error) { - start := time.Now() - r0, r1 := m.s.InsertTemplateVersionWorkspaceTag(ctx, arg) - m.queryLatencies.WithLabelValues("InsertTemplateVersionWorkspaceTag").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) InsertUser(ctx context.Context, arg database.InsertUserParams) (database.User, error) { - start := time.Now() - user, err := m.s.InsertUser(ctx, arg) - m.queryLatencies.WithLabelValues("InsertUser").Observe(time.Since(start).Seconds()) - return user, err -} - -func (m metricsStore) InsertUserGroupsByID(ctx context.Context, arg database.InsertUserGroupsByIDParams) ([]uuid.UUID, error) { - start := time.Now() - r0, r1 := m.s.InsertUserGroupsByID(ctx, arg) - m.queryLatencies.WithLabelValues("InsertUserGroupsByID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) InsertUserGroupsByName(ctx context.Context, arg database.InsertUserGroupsByNameParams) error { - start := time.Now() - err := m.s.InsertUserGroupsByName(ctx, arg) - m.queryLatencies.WithLabelValues("InsertUserGroupsByName").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) InsertUserLink(ctx context.Context, arg database.InsertUserLinkParams) (database.UserLink, error) { - start := time.Now() - link, err := m.s.InsertUserLink(ctx, arg) - m.queryLatencies.WithLabelValues("InsertUserLink").Observe(time.Since(start).Seconds()) - return link, err -} - -func (m metricsStore) InsertWorkspace(ctx context.Context, arg database.InsertWorkspaceParams) (database.WorkspaceTable, error) { - start := time.Now() - workspace, err := m.s.InsertWorkspace(ctx, arg) - m.queryLatencies.WithLabelValues("InsertWorkspace").Observe(time.Since(start).Seconds()) - return workspace, err -} - -func (m metricsStore) InsertWorkspaceAgent(ctx context.Context, arg database.InsertWorkspaceAgentParams) (database.WorkspaceAgent, error) { - start := time.Now() - agent, err := m.s.InsertWorkspaceAgent(ctx, arg) - m.queryLatencies.WithLabelValues("InsertWorkspaceAgent").Observe(time.Since(start).Seconds()) - return agent, err -} - -func (m metricsStore) InsertWorkspaceAgentLogSources(ctx context.Context, arg database.InsertWorkspaceAgentLogSourcesParams) ([]database.WorkspaceAgentLogSource, error) { - start := time.Now() - r0, r1 := m.s.InsertWorkspaceAgentLogSources(ctx, arg) - m.queryLatencies.WithLabelValues("InsertWorkspaceAgentLogSources").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) InsertWorkspaceAgentLogs(ctx context.Context, arg database.InsertWorkspaceAgentLogsParams) ([]database.WorkspaceAgentLog, error) { - start := time.Now() - r0, r1 := m.s.InsertWorkspaceAgentLogs(ctx, arg) - m.queryLatencies.WithLabelValues("InsertWorkspaceAgentLogs").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) InsertWorkspaceAgentMetadata(ctx context.Context, arg database.InsertWorkspaceAgentMetadataParams) error { - start := time.Now() - err := m.s.InsertWorkspaceAgentMetadata(ctx, arg) - m.queryLatencies.WithLabelValues("InsertWorkspaceAgentMetadata").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) InsertWorkspaceAgentScriptTimings(ctx context.Context, arg database.InsertWorkspaceAgentScriptTimingsParams) (database.WorkspaceAgentScriptTiming, error) { - start := time.Now() - r0, r1 := m.s.InsertWorkspaceAgentScriptTimings(ctx, arg) - m.queryLatencies.WithLabelValues("InsertWorkspaceAgentScriptTimings").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) InsertWorkspaceAgentScripts(ctx context.Context, arg database.InsertWorkspaceAgentScriptsParams) ([]database.WorkspaceAgentScript, error) { - start := time.Now() - r0, r1 := m.s.InsertWorkspaceAgentScripts(ctx, arg) - m.queryLatencies.WithLabelValues("InsertWorkspaceAgentScripts").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) InsertWorkspaceAgentStats(ctx context.Context, arg database.InsertWorkspaceAgentStatsParams) error { - start := time.Now() - r0 := m.s.InsertWorkspaceAgentStats(ctx, arg) - m.queryLatencies.WithLabelValues("InsertWorkspaceAgentStats").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) InsertWorkspaceApp(ctx context.Context, arg database.InsertWorkspaceAppParams) (database.WorkspaceApp, error) { - start := time.Now() - app, err := m.s.InsertWorkspaceApp(ctx, arg) - m.queryLatencies.WithLabelValues("InsertWorkspaceApp").Observe(time.Since(start).Seconds()) - return app, err -} - -func (m metricsStore) InsertWorkspaceAppStats(ctx context.Context, arg database.InsertWorkspaceAppStatsParams) error { - start := time.Now() - r0 := m.s.InsertWorkspaceAppStats(ctx, arg) - m.queryLatencies.WithLabelValues("InsertWorkspaceAppStats").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) InsertWorkspaceBuild(ctx context.Context, arg database.InsertWorkspaceBuildParams) error { - start := time.Now() - err := m.s.InsertWorkspaceBuild(ctx, arg) - m.queryLatencies.WithLabelValues("InsertWorkspaceBuild").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) InsertWorkspaceBuildParameters(ctx context.Context, arg database.InsertWorkspaceBuildParametersParams) error { - start := time.Now() - err := m.s.InsertWorkspaceBuildParameters(ctx, arg) - m.queryLatencies.WithLabelValues("InsertWorkspaceBuildParameters").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) InsertWorkspaceProxy(ctx context.Context, arg database.InsertWorkspaceProxyParams) (database.WorkspaceProxy, error) { - start := time.Now() - proxy, err := m.s.InsertWorkspaceProxy(ctx, arg) - m.queryLatencies.WithLabelValues("InsertWorkspaceProxy").Observe(time.Since(start).Seconds()) - return proxy, err -} - -func (m metricsStore) InsertWorkspaceResource(ctx context.Context, arg database.InsertWorkspaceResourceParams) (database.WorkspaceResource, error) { - start := time.Now() - resource, err := m.s.InsertWorkspaceResource(ctx, arg) - m.queryLatencies.WithLabelValues("InsertWorkspaceResource").Observe(time.Since(start).Seconds()) - return resource, err -} - -func (m metricsStore) InsertWorkspaceResourceMetadata(ctx context.Context, arg database.InsertWorkspaceResourceMetadataParams) ([]database.WorkspaceResourceMetadatum, error) { - start := time.Now() - metadata, err := m.s.InsertWorkspaceResourceMetadata(ctx, arg) - m.queryLatencies.WithLabelValues("InsertWorkspaceResourceMetadata").Observe(time.Since(start).Seconds()) - return metadata, err -} - -func (m metricsStore) ListProvisionerKeysByOrganization(ctx context.Context, organizationID uuid.UUID) ([]database.ProvisionerKey, error) { - start := time.Now() - r0, r1 := m.s.ListProvisionerKeysByOrganization(ctx, organizationID) - m.queryLatencies.WithLabelValues("ListProvisionerKeysByOrganization").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) ListProvisionerKeysByOrganizationExcludeReserved(ctx context.Context, organizationID uuid.UUID) ([]database.ProvisionerKey, error) { - start := time.Now() - r0, r1 := m.s.ListProvisionerKeysByOrganizationExcludeReserved(ctx, organizationID) - m.queryLatencies.WithLabelValues("ListProvisionerKeysByOrganizationExcludeReserved").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) ListWorkspaceAgentPortShares(ctx context.Context, workspaceID uuid.UUID) ([]database.WorkspaceAgentPortShare, error) { - start := time.Now() - r0, r1 := m.s.ListWorkspaceAgentPortShares(ctx, workspaceID) - m.queryLatencies.WithLabelValues("ListWorkspaceAgentPortShares").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) OrganizationMembers(ctx context.Context, arg database.OrganizationMembersParams) ([]database.OrganizationMembersRow, error) { - start := time.Now() - r0, r1 := m.s.OrganizationMembers(ctx, arg) - m.queryLatencies.WithLabelValues("OrganizationMembers").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) ReduceWorkspaceAgentShareLevelToAuthenticatedByTemplate(ctx context.Context, templateID uuid.UUID) error { - start := time.Now() - r0 := m.s.ReduceWorkspaceAgentShareLevelToAuthenticatedByTemplate(ctx, templateID) - m.queryLatencies.WithLabelValues("ReduceWorkspaceAgentShareLevelToAuthenticatedByTemplate").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) RegisterWorkspaceProxy(ctx context.Context, arg database.RegisterWorkspaceProxyParams) (database.WorkspaceProxy, error) { - start := time.Now() - proxy, err := m.s.RegisterWorkspaceProxy(ctx, arg) - m.queryLatencies.WithLabelValues("RegisterWorkspaceProxy").Observe(time.Since(start).Seconds()) - return proxy, err -} - -func (m metricsStore) RemoveUserFromAllGroups(ctx context.Context, userID uuid.UUID) error { - start := time.Now() - r0 := m.s.RemoveUserFromAllGroups(ctx, userID) - m.queryLatencies.WithLabelValues("RemoveUserFromAllGroups").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) RemoveUserFromGroups(ctx context.Context, arg database.RemoveUserFromGroupsParams) ([]uuid.UUID, error) { - start := time.Now() - r0, r1 := m.s.RemoveUserFromGroups(ctx, arg) - m.queryLatencies.WithLabelValues("RemoveUserFromGroups").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) RevokeDBCryptKey(ctx context.Context, activeKeyDigest string) error { - start := time.Now() - r0 := m.s.RevokeDBCryptKey(ctx, activeKeyDigest) - m.queryLatencies.WithLabelValues("RevokeDBCryptKey").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) TryAcquireLock(ctx context.Context, pgTryAdvisoryXactLock int64) (bool, error) { - start := time.Now() - ok, err := m.s.TryAcquireLock(ctx, pgTryAdvisoryXactLock) - m.queryLatencies.WithLabelValues("TryAcquireLock").Observe(time.Since(start).Seconds()) - return ok, err -} - -func (m metricsStore) UnarchiveTemplateVersion(ctx context.Context, arg database.UnarchiveTemplateVersionParams) error { - start := time.Now() - r0 := m.s.UnarchiveTemplateVersion(ctx, arg) - m.queryLatencies.WithLabelValues("UnarchiveTemplateVersion").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UnfavoriteWorkspace(ctx context.Context, arg uuid.UUID) error { - start := time.Now() - r0 := m.s.UnfavoriteWorkspace(ctx, arg) - m.queryLatencies.WithLabelValues("UnfavoriteWorkspace").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpdateAPIKeyByID(ctx context.Context, arg database.UpdateAPIKeyByIDParams) error { - start := time.Now() - err := m.s.UpdateAPIKeyByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateAPIKeyByID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateCryptoKeyDeletesAt(ctx context.Context, arg database.UpdateCryptoKeyDeletesAtParams) (database.CryptoKey, error) { - start := time.Now() - key, err := m.s.UpdateCryptoKeyDeletesAt(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateCryptoKeyDeletesAt").Observe(time.Since(start).Seconds()) - return key, err -} - -func (m metricsStore) UpdateCustomRole(ctx context.Context, arg database.UpdateCustomRoleParams) (database.CustomRole, error) { - start := time.Now() - r0, r1 := m.s.UpdateCustomRole(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateCustomRole").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) UpdateExternalAuthLink(ctx context.Context, arg database.UpdateExternalAuthLinkParams) (database.ExternalAuthLink, error) { - start := time.Now() - link, err := m.s.UpdateExternalAuthLink(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateExternalAuthLink").Observe(time.Since(start).Seconds()) - return link, err -} - -func (m metricsStore) UpdateGitSSHKey(ctx context.Context, arg database.UpdateGitSSHKeyParams) (database.GitSSHKey, error) { - start := time.Now() - key, err := m.s.UpdateGitSSHKey(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateGitSSHKey").Observe(time.Since(start).Seconds()) - return key, err -} - -func (m metricsStore) UpdateGroupByID(ctx context.Context, arg database.UpdateGroupByIDParams) (database.Group, error) { - start := time.Now() - group, err := m.s.UpdateGroupByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateGroupByID").Observe(time.Since(start).Seconds()) - return group, err -} - -func (m metricsStore) UpdateInactiveUsersToDormant(ctx context.Context, lastSeenAfter database.UpdateInactiveUsersToDormantParams) ([]database.UpdateInactiveUsersToDormantRow, error) { - start := time.Now() - r0, r1 := m.s.UpdateInactiveUsersToDormant(ctx, lastSeenAfter) - m.queryLatencies.WithLabelValues("UpdateInactiveUsersToDormant").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) UpdateMemberRoles(ctx context.Context, arg database.UpdateMemberRolesParams) (database.OrganizationMember, error) { - start := time.Now() - member, err := m.s.UpdateMemberRoles(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateMemberRoles").Observe(time.Since(start).Seconds()) - return member, err -} - -func (m metricsStore) UpdateNotificationTemplateMethodByID(ctx context.Context, arg database.UpdateNotificationTemplateMethodByIDParams) (database.NotificationTemplate, error) { - start := time.Now() - r0, r1 := m.s.UpdateNotificationTemplateMethodByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateNotificationTemplateMethodByID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) UpdateOAuth2ProviderAppByID(ctx context.Context, arg database.UpdateOAuth2ProviderAppByIDParams) (database.OAuth2ProviderApp, error) { - start := time.Now() - r0, r1 := m.s.UpdateOAuth2ProviderAppByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateOAuth2ProviderAppByID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) UpdateOAuth2ProviderAppSecretByID(ctx context.Context, arg database.UpdateOAuth2ProviderAppSecretByIDParams) (database.OAuth2ProviderAppSecret, error) { - start := time.Now() - r0, r1 := m.s.UpdateOAuth2ProviderAppSecretByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateOAuth2ProviderAppSecretByID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) UpdateOrganization(ctx context.Context, arg database.UpdateOrganizationParams) (database.Organization, error) { - start := time.Now() - r0, r1 := m.s.UpdateOrganization(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateOrganization").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) UpdateProvisionerDaemonLastSeenAt(ctx context.Context, arg database.UpdateProvisionerDaemonLastSeenAtParams) error { - start := time.Now() - r0 := m.s.UpdateProvisionerDaemonLastSeenAt(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateProvisionerDaemonLastSeenAt").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpdateProvisionerJobByID(ctx context.Context, arg database.UpdateProvisionerJobByIDParams) error { - start := time.Now() - err := m.s.UpdateProvisionerJobByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateProvisionerJobByID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateProvisionerJobWithCancelByID(ctx context.Context, arg database.UpdateProvisionerJobWithCancelByIDParams) error { - start := time.Now() - err := m.s.UpdateProvisionerJobWithCancelByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateProvisionerJobWithCancelByID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateProvisionerJobWithCompleteByID(ctx context.Context, arg database.UpdateProvisionerJobWithCompleteByIDParams) error { - start := time.Now() - err := m.s.UpdateProvisionerJobWithCompleteByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateProvisionerJobWithCompleteByID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateReplica(ctx context.Context, arg database.UpdateReplicaParams) (database.Replica, error) { - start := time.Now() - replica, err := m.s.UpdateReplica(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateReplica").Observe(time.Since(start).Seconds()) - return replica, err -} - -func (m metricsStore) UpdateTailnetPeerStatusByCoordinator(ctx context.Context, arg database.UpdateTailnetPeerStatusByCoordinatorParams) error { - start := time.Now() - r0 := m.s.UpdateTailnetPeerStatusByCoordinator(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateTailnetPeerStatusByCoordinator").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpdateTemplateACLByID(ctx context.Context, arg database.UpdateTemplateACLByIDParams) error { - start := time.Now() - err := m.s.UpdateTemplateACLByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateTemplateACLByID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateTemplateAccessControlByID(ctx context.Context, arg database.UpdateTemplateAccessControlByIDParams) error { - start := time.Now() - r0 := m.s.UpdateTemplateAccessControlByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateTemplateAccessControlByID").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpdateTemplateActiveVersionByID(ctx context.Context, arg database.UpdateTemplateActiveVersionByIDParams) error { - start := time.Now() - err := m.s.UpdateTemplateActiveVersionByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateTemplateActiveVersionByID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateTemplateDeletedByID(ctx context.Context, arg database.UpdateTemplateDeletedByIDParams) error { - start := time.Now() - err := m.s.UpdateTemplateDeletedByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateTemplateDeletedByID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateTemplateMetaByID(ctx context.Context, arg database.UpdateTemplateMetaByIDParams) error { - start := time.Now() - err := m.s.UpdateTemplateMetaByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateTemplateMetaByID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateTemplateScheduleByID(ctx context.Context, arg database.UpdateTemplateScheduleByIDParams) error { - start := time.Now() - err := m.s.UpdateTemplateScheduleByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateTemplateScheduleByID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateTemplateVersionByID(ctx context.Context, arg database.UpdateTemplateVersionByIDParams) error { - start := time.Now() - err := m.s.UpdateTemplateVersionByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateTemplateVersionByID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateTemplateVersionDescriptionByJobID(ctx context.Context, arg database.UpdateTemplateVersionDescriptionByJobIDParams) error { - start := time.Now() - err := m.s.UpdateTemplateVersionDescriptionByJobID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateTemplateVersionDescriptionByJobID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateTemplateVersionExternalAuthProvidersByJobID(ctx context.Context, arg database.UpdateTemplateVersionExternalAuthProvidersByJobIDParams) error { - start := time.Now() - err := m.s.UpdateTemplateVersionExternalAuthProvidersByJobID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateTemplateVersionExternalAuthProvidersByJobID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateTemplateWorkspacesLastUsedAt(ctx context.Context, arg database.UpdateTemplateWorkspacesLastUsedAtParams) error { - start := time.Now() - r0 := m.s.UpdateTemplateWorkspacesLastUsedAt(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateTemplateWorkspacesLastUsedAt").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpdateUserAppearanceSettings(ctx context.Context, arg database.UpdateUserAppearanceSettingsParams) (database.User, error) { - start := time.Now() - r0, r1 := m.s.UpdateUserAppearanceSettings(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateUserAppearanceSettings").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) UpdateUserDeletedByID(ctx context.Context, id uuid.UUID) error { - start := time.Now() - r0 := m.s.UpdateUserDeletedByID(ctx, id) - m.queryLatencies.WithLabelValues("UpdateUserDeletedByID").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpdateUserGithubComUserID(ctx context.Context, arg database.UpdateUserGithubComUserIDParams) error { - start := time.Now() - r0 := m.s.UpdateUserGithubComUserID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateUserGithubComUserID").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpdateUserHashedOneTimePasscode(ctx context.Context, arg database.UpdateUserHashedOneTimePasscodeParams) error { - start := time.Now() - r0 := m.s.UpdateUserHashedOneTimePasscode(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateUserHashedOneTimePasscode").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpdateUserHashedPassword(ctx context.Context, arg database.UpdateUserHashedPasswordParams) error { - start := time.Now() - err := m.s.UpdateUserHashedPassword(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateUserHashedPassword").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateUserLastSeenAt(ctx context.Context, arg database.UpdateUserLastSeenAtParams) (database.User, error) { - start := time.Now() - user, err := m.s.UpdateUserLastSeenAt(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateUserLastSeenAt").Observe(time.Since(start).Seconds()) - return user, err -} - -func (m metricsStore) UpdateUserLink(ctx context.Context, arg database.UpdateUserLinkParams) (database.UserLink, error) { - start := time.Now() - link, err := m.s.UpdateUserLink(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateUserLink").Observe(time.Since(start).Seconds()) - return link, err -} - -func (m metricsStore) UpdateUserLinkedID(ctx context.Context, arg database.UpdateUserLinkedIDParams) (database.UserLink, error) { - start := time.Now() - link, err := m.s.UpdateUserLinkedID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateUserLinkedID").Observe(time.Since(start).Seconds()) - return link, err -} - -func (m metricsStore) UpdateUserLoginType(ctx context.Context, arg database.UpdateUserLoginTypeParams) (database.User, error) { - start := time.Now() - r0, r1 := m.s.UpdateUserLoginType(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateUserLoginType").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) UpdateUserNotificationPreferences(ctx context.Context, arg database.UpdateUserNotificationPreferencesParams) (int64, error) { - start := time.Now() - r0, r1 := m.s.UpdateUserNotificationPreferences(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateUserNotificationPreferences").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) UpdateUserProfile(ctx context.Context, arg database.UpdateUserProfileParams) (database.User, error) { - start := time.Now() - user, err := m.s.UpdateUserProfile(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateUserProfile").Observe(time.Since(start).Seconds()) - return user, err -} - -func (m metricsStore) UpdateUserQuietHoursSchedule(ctx context.Context, arg database.UpdateUserQuietHoursScheduleParams) (database.User, error) { - start := time.Now() - r0, r1 := m.s.UpdateUserQuietHoursSchedule(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateUserQuietHoursSchedule").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) UpdateUserRoles(ctx context.Context, arg database.UpdateUserRolesParams) (database.User, error) { - start := time.Now() - user, err := m.s.UpdateUserRoles(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateUserRoles").Observe(time.Since(start).Seconds()) - return user, err -} - -func (m metricsStore) UpdateUserStatus(ctx context.Context, arg database.UpdateUserStatusParams) (database.User, error) { - start := time.Now() - user, err := m.s.UpdateUserStatus(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateUserStatus").Observe(time.Since(start).Seconds()) - return user, err -} - -func (m metricsStore) UpdateWorkspace(ctx context.Context, arg database.UpdateWorkspaceParams) (database.WorkspaceTable, error) { - start := time.Now() - workspace, err := m.s.UpdateWorkspace(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateWorkspace").Observe(time.Since(start).Seconds()) - return workspace, err -} - -func (m metricsStore) UpdateWorkspaceAgentConnectionByID(ctx context.Context, arg database.UpdateWorkspaceAgentConnectionByIDParams) error { - start := time.Now() - err := m.s.UpdateWorkspaceAgentConnectionByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateWorkspaceAgentConnectionByID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateWorkspaceAgentLifecycleStateByID(ctx context.Context, arg database.UpdateWorkspaceAgentLifecycleStateByIDParams) error { - start := time.Now() - r0 := m.s.UpdateWorkspaceAgentLifecycleStateByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateWorkspaceAgentLifecycleStateByID").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpdateWorkspaceAgentLogOverflowByID(ctx context.Context, arg database.UpdateWorkspaceAgentLogOverflowByIDParams) error { - start := time.Now() - r0 := m.s.UpdateWorkspaceAgentLogOverflowByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateWorkspaceAgentLogOverflowByID").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpdateWorkspaceAgentMetadata(ctx context.Context, arg database.UpdateWorkspaceAgentMetadataParams) error { - start := time.Now() - err := m.s.UpdateWorkspaceAgentMetadata(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateWorkspaceAgentMetadata").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateWorkspaceAgentStartupByID(ctx context.Context, arg database.UpdateWorkspaceAgentStartupByIDParams) error { - start := time.Now() - err := m.s.UpdateWorkspaceAgentStartupByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateWorkspaceAgentStartupByID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateWorkspaceAppHealthByID(ctx context.Context, arg database.UpdateWorkspaceAppHealthByIDParams) error { - start := time.Now() - err := m.s.UpdateWorkspaceAppHealthByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateWorkspaceAppHealthByID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateWorkspaceAutomaticUpdates(ctx context.Context, arg database.UpdateWorkspaceAutomaticUpdatesParams) error { - start := time.Now() - r0 := m.s.UpdateWorkspaceAutomaticUpdates(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateWorkspaceAutomaticUpdates").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpdateWorkspaceAutostart(ctx context.Context, arg database.UpdateWorkspaceAutostartParams) error { - start := time.Now() - err := m.s.UpdateWorkspaceAutostart(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateWorkspaceAutostart").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateWorkspaceBuildCostByID(ctx context.Context, arg database.UpdateWorkspaceBuildCostByIDParams) error { - start := time.Now() - err := m.s.UpdateWorkspaceBuildCostByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateWorkspaceBuildCostByID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateWorkspaceBuildDeadlineByID(ctx context.Context, arg database.UpdateWorkspaceBuildDeadlineByIDParams) error { - start := time.Now() - r0 := m.s.UpdateWorkspaceBuildDeadlineByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateWorkspaceBuildDeadlineByID").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpdateWorkspaceBuildProvisionerStateByID(ctx context.Context, arg database.UpdateWorkspaceBuildProvisionerStateByIDParams) error { - start := time.Now() - r0 := m.s.UpdateWorkspaceBuildProvisionerStateByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateWorkspaceBuildProvisionerStateByID").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpdateWorkspaceDeletedByID(ctx context.Context, arg database.UpdateWorkspaceDeletedByIDParams) error { - start := time.Now() - err := m.s.UpdateWorkspaceDeletedByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateWorkspaceDeletedByID").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateWorkspaceDormantDeletingAt(ctx context.Context, arg database.UpdateWorkspaceDormantDeletingAtParams) (database.WorkspaceTable, error) { - start := time.Now() - ws, r0 := m.s.UpdateWorkspaceDormantDeletingAt(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateWorkspaceDormantDeletingAt").Observe(time.Since(start).Seconds()) - return ws, r0 -} - -func (m metricsStore) UpdateWorkspaceLastUsedAt(ctx context.Context, arg database.UpdateWorkspaceLastUsedAtParams) error { - start := time.Now() - err := m.s.UpdateWorkspaceLastUsedAt(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateWorkspaceLastUsedAt").Observe(time.Since(start).Seconds()) - return err -} - -func (m metricsStore) UpdateWorkspaceProxy(ctx context.Context, arg database.UpdateWorkspaceProxyParams) (database.WorkspaceProxy, error) { - start := time.Now() - proxy, err := m.s.UpdateWorkspaceProxy(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateWorkspaceProxy").Observe(time.Since(start).Seconds()) - return proxy, err -} - -func (m metricsStore) UpdateWorkspaceProxyDeleted(ctx context.Context, arg database.UpdateWorkspaceProxyDeletedParams) error { - start := time.Now() - r0 := m.s.UpdateWorkspaceProxyDeleted(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateWorkspaceProxyDeleted").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpdateWorkspaceTTL(ctx context.Context, arg database.UpdateWorkspaceTTLParams) error { - start := time.Now() - r0 := m.s.UpdateWorkspaceTTL(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateWorkspaceTTL").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpdateWorkspacesDormantDeletingAtByTemplateID(ctx context.Context, arg database.UpdateWorkspacesDormantDeletingAtByTemplateIDParams) ([]database.WorkspaceTable, error) { - start := time.Now() - r0, r1 := m.s.UpdateWorkspacesDormantDeletingAtByTemplateID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateWorkspacesDormantDeletingAtByTemplateID").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) UpsertAnnouncementBanners(ctx context.Context, value string) error { - start := time.Now() - r0 := m.s.UpsertAnnouncementBanners(ctx, value) - m.queryLatencies.WithLabelValues("UpsertAnnouncementBanners").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpsertAppSecurityKey(ctx context.Context, value string) error { - start := time.Now() - r0 := m.s.UpsertAppSecurityKey(ctx, value) - m.queryLatencies.WithLabelValues("UpsertAppSecurityKey").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpsertApplicationName(ctx context.Context, value string) error { - start := time.Now() - r0 := m.s.UpsertApplicationName(ctx, value) - m.queryLatencies.WithLabelValues("UpsertApplicationName").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpsertCoordinatorResumeTokenSigningKey(ctx context.Context, value string) error { - start := time.Now() - r0 := m.s.UpsertCoordinatorResumeTokenSigningKey(ctx, value) - m.queryLatencies.WithLabelValues("UpsertCoordinatorResumeTokenSigningKey").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpsertDefaultProxy(ctx context.Context, arg database.UpsertDefaultProxyParams) error { - start := time.Now() - r0 := m.s.UpsertDefaultProxy(ctx, arg) - m.queryLatencies.WithLabelValues("UpsertDefaultProxy").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpsertHealthSettings(ctx context.Context, value string) error { - start := time.Now() - r0 := m.s.UpsertHealthSettings(ctx, value) - m.queryLatencies.WithLabelValues("UpsertHealthSettings").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpsertJFrogXrayScanByWorkspaceAndAgentID(ctx context.Context, arg database.UpsertJFrogXrayScanByWorkspaceAndAgentIDParams) error { - start := time.Now() - r0 := m.s.UpsertJFrogXrayScanByWorkspaceAndAgentID(ctx, arg) - m.queryLatencies.WithLabelValues("UpsertJFrogXrayScanByWorkspaceAndAgentID").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpsertLastUpdateCheck(ctx context.Context, value string) error { - start := time.Now() - r0 := m.s.UpsertLastUpdateCheck(ctx, value) - m.queryLatencies.WithLabelValues("UpsertLastUpdateCheck").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpsertLogoURL(ctx context.Context, value string) error { - start := time.Now() - r0 := m.s.UpsertLogoURL(ctx, value) - m.queryLatencies.WithLabelValues("UpsertLogoURL").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpsertNotificationReportGeneratorLog(ctx context.Context, arg database.UpsertNotificationReportGeneratorLogParams) error { - start := time.Now() - r0 := m.s.UpsertNotificationReportGeneratorLog(ctx, arg) - m.queryLatencies.WithLabelValues("UpsertNotificationReportGeneratorLog").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpsertNotificationsSettings(ctx context.Context, value string) error { - start := time.Now() - r0 := m.s.UpsertNotificationsSettings(ctx, value) - m.queryLatencies.WithLabelValues("UpsertNotificationsSettings").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpsertOAuthSigningKey(ctx context.Context, value string) error { - start := time.Now() - r0 := m.s.UpsertOAuthSigningKey(ctx, value) - m.queryLatencies.WithLabelValues("UpsertOAuthSigningKey").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpsertProvisionerDaemon(ctx context.Context, arg database.UpsertProvisionerDaemonParams) (database.ProvisionerDaemon, error) { - start := time.Now() - r0, r1 := m.s.UpsertProvisionerDaemon(ctx, arg) - m.queryLatencies.WithLabelValues("UpsertProvisionerDaemon").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) UpsertRuntimeConfig(ctx context.Context, arg database.UpsertRuntimeConfigParams) error { - start := time.Now() - r0 := m.s.UpsertRuntimeConfig(ctx, arg) - m.queryLatencies.WithLabelValues("UpsertRuntimeConfig").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpsertTailnetAgent(ctx context.Context, arg database.UpsertTailnetAgentParams) (database.TailnetAgent, error) { - start := time.Now() - r0, r1 := m.s.UpsertTailnetAgent(ctx, arg) - m.queryLatencies.WithLabelValues("UpsertTailnetAgent").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) UpsertTailnetClient(ctx context.Context, arg database.UpsertTailnetClientParams) (database.TailnetClient, error) { - start := time.Now() - r0, r1 := m.s.UpsertTailnetClient(ctx, arg) - m.queryLatencies.WithLabelValues("UpsertTailnetClient").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) UpsertTailnetClientSubscription(ctx context.Context, arg database.UpsertTailnetClientSubscriptionParams) error { - start := time.Now() - r0 := m.s.UpsertTailnetClientSubscription(ctx, arg) - m.queryLatencies.WithLabelValues("UpsertTailnetClientSubscription").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpsertTailnetCoordinator(ctx context.Context, id uuid.UUID) (database.TailnetCoordinator, error) { - start := time.Now() - r0, r1 := m.s.UpsertTailnetCoordinator(ctx, id) - m.queryLatencies.WithLabelValues("UpsertTailnetCoordinator").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) UpsertTailnetPeer(ctx context.Context, arg database.UpsertTailnetPeerParams) (database.TailnetPeer, error) { - start := time.Now() - r0, r1 := m.s.UpsertTailnetPeer(ctx, arg) - m.queryLatencies.WithLabelValues("UpsertTailnetPeer").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) UpsertTailnetTunnel(ctx context.Context, arg database.UpsertTailnetTunnelParams) (database.TailnetTunnel, error) { - start := time.Now() - r0, r1 := m.s.UpsertTailnetTunnel(ctx, arg) - m.queryLatencies.WithLabelValues("UpsertTailnetTunnel").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) UpsertTemplateUsageStats(ctx context.Context) error { - start := time.Now() - r0 := m.s.UpsertTemplateUsageStats(ctx) - m.queryLatencies.WithLabelValues("UpsertTemplateUsageStats").Observe(time.Since(start).Seconds()) - return r0 -} - -func (m metricsStore) UpsertWorkspaceAgentPortShare(ctx context.Context, arg database.UpsertWorkspaceAgentPortShareParams) (database.WorkspaceAgentPortShare, error) { - start := time.Now() - r0, r1 := m.s.UpsertWorkspaceAgentPortShare(ctx, arg) - m.queryLatencies.WithLabelValues("UpsertWorkspaceAgentPortShare").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetAuthorizedTemplates(ctx context.Context, arg database.GetTemplatesWithFilterParams, prepared rbac.PreparedAuthorized) ([]database.Template, error) { - start := time.Now() - templates, err := m.s.GetAuthorizedTemplates(ctx, arg, prepared) - m.queryLatencies.WithLabelValues("GetAuthorizedTemplates").Observe(time.Since(start).Seconds()) - return templates, err -} - -func (m metricsStore) GetTemplateGroupRoles(ctx context.Context, id uuid.UUID) ([]database.TemplateGroup, error) { - start := time.Now() - roles, err := m.s.GetTemplateGroupRoles(ctx, id) - m.queryLatencies.WithLabelValues("GetTemplateGroupRoles").Observe(time.Since(start).Seconds()) - return roles, err -} - -func (m metricsStore) GetTemplateUserRoles(ctx context.Context, id uuid.UUID) ([]database.TemplateUser, error) { - start := time.Now() - roles, err := m.s.GetTemplateUserRoles(ctx, id) - m.queryLatencies.WithLabelValues("GetTemplateUserRoles").Observe(time.Since(start).Seconds()) - return roles, err -} - -func (m metricsStore) GetAuthorizedWorkspaces(ctx context.Context, arg database.GetWorkspacesParams, prepared rbac.PreparedAuthorized) ([]database.GetWorkspacesRow, error) { - start := time.Now() - workspaces, err := m.s.GetAuthorizedWorkspaces(ctx, arg, prepared) - m.queryLatencies.WithLabelValues("GetAuthorizedWorkspaces").Observe(time.Since(start).Seconds()) - return workspaces, err -} - -func (m metricsStore) GetAuthorizedUsers(ctx context.Context, arg database.GetUsersParams, prepared rbac.PreparedAuthorized) ([]database.GetUsersRow, error) { - start := time.Now() - r0, r1 := m.s.GetAuthorizedUsers(ctx, arg, prepared) - m.queryLatencies.WithLabelValues("GetAuthorizedUsers").Observe(time.Since(start).Seconds()) - return r0, r1 -} - -func (m metricsStore) GetAuthorizedAuditLogsOffset(ctx context.Context, arg database.GetAuditLogsOffsetParams, prepared rbac.PreparedAuthorized) ([]database.GetAuditLogsOffsetRow, error) { - start := time.Now() - r0, r1 := m.s.GetAuthorizedAuditLogsOffset(ctx, arg, prepared) - m.queryLatencies.WithLabelValues("GetAuthorizedAuditLogsOffset").Observe(time.Since(start).Seconds()) - return r0, r1 -} diff --git a/coderd/database/dbmetrics/dbmetrics_test.go b/coderd/database/dbmetrics/dbmetrics_test.go new file mode 100644 index 0000000000000..2b8d2979b1cfe --- /dev/null +++ b/coderd/database/dbmetrics/dbmetrics_test.go @@ -0,0 +1,109 @@ +package dbmetrics_test + +import ( + "bytes" + "testing" + + "github.com/prometheus/client_golang/prometheus" + "github.com/stretchr/testify/require" + "golang.org/x/xerrors" + + "cdr.dev/slog" + "cdr.dev/slog/sloggers/sloghuman" + "cdr.dev/slog/sloggers/slogtest" + "github.com/coder/coder/v2/coderd/coderdtest/promhelp" + "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/database/dbmem" + "github.com/coder/coder/v2/coderd/database/dbmetrics" +) + +func TestInTxMetrics(t *testing.T) { + t.Parallel() + + successLabels := prometheus.Labels{ + "success": "true", + "id": "", + } + const inTxHistMetricName = "coderd_db_tx_duration_seconds" + const inTxCountMetricName = "coderd_db_tx_executions_count" + t.Run("QueryMetrics", func(t *testing.T) { + t.Parallel() + + db := dbmem.New() + reg := prometheus.NewRegistry() + db = dbmetrics.NewQueryMetrics(db, slogtest.Make(t, nil), reg) + + err := db.InTx(func(s database.Store) error { + return nil + }, nil) + require.NoError(t, err) + + // Check that the metrics are registered + inTxMetric := promhelp.HistogramValue(t, reg, inTxHistMetricName, successLabels) + require.NotNil(t, inTxMetric) + require.Equal(t, uint64(1), inTxMetric.GetSampleCount()) + }) + + t.Run("DBMetrics", func(t *testing.T) { + t.Parallel() + + db := dbmem.New() + reg := prometheus.NewRegistry() + db = dbmetrics.NewDBMetrics(db, slogtest.Make(t, nil), reg) + + err := db.InTx(func(s database.Store) error { + return nil + }, nil) + require.NoError(t, err) + + // Check that the metrics are registered + inTxMetric := promhelp.HistogramValue(t, reg, inTxHistMetricName, successLabels) + require.NotNil(t, inTxMetric) + require.Equal(t, uint64(1), inTxMetric.GetSampleCount()) + }) + + // Test log output and metrics on failures + // Log example: + // [erro] database transaction hit serialization error and had to retry success=false executions=2 id=foobar_factory + t.Run("SerializationError", func(t *testing.T) { + t.Parallel() + + var output bytes.Buffer + logger := slog.Make(sloghuman.Sink(&output)) + + reg := prometheus.NewRegistry() + db := dbmetrics.NewDBMetrics(dbmem.New(), logger, reg) + const id = "foobar_factory" + + txOpts := database.DefaultTXOptions().WithID(id) + database.IncrementExecutionCount(txOpts) // 2 executions + + err := db.InTx(func(s database.Store) error { + return xerrors.Errorf("some dumb error") + }, txOpts) + require.Error(t, err) + + // Check that the metrics are registered + inTxHistMetric := promhelp.HistogramValue(t, reg, inTxHistMetricName, prometheus.Labels{ + "success": "false", + "id": id, + }) + require.NotNil(t, inTxHistMetric) + require.Equal(t, uint64(1), inTxHistMetric.GetSampleCount()) + + inTxCountMetric := promhelp.CounterValue(t, reg, inTxCountMetricName, prometheus.Labels{ + "success": "false", + "retries": "1", + "id": id, + }) + require.NotNil(t, inTxCountMetric) + require.Equal(t, 1, inTxCountMetric) + + // Also check the logs + require.Contains(t, output.String(), "some dumb error") + require.Contains(t, output.String(), "database transaction hit serialization error and had to retry") + require.Contains(t, output.String(), "success=false") + require.Contains(t, output.String(), "executions=2") + require.Contains(t, output.String(), "id="+id) + }) +} diff --git a/coderd/database/dbmetrics/querymetrics.go b/coderd/database/dbmetrics/querymetrics.go new file mode 100644 index 0000000000000..7e74aab3b9de0 --- /dev/null +++ b/coderd/database/dbmetrics/querymetrics.go @@ -0,0 +1,2710 @@ +// Code generated by coderd/database/gen/metrics. +// Any function can be edited and will not be overwritten. +// New database functions are automatically generated! +package dbmetrics + +import ( + "context" + "time" + + "github.com/google/uuid" + "github.com/prometheus/client_golang/prometheus" + "golang.org/x/exp/slices" + + "cdr.dev/slog" + "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" +) + +var ( + // Force these imports, for some reason the autogen does not include them. + _ uuid.UUID + _ policy.Action + _ rbac.Objecter +) + +const wrapname = "dbmetrics.metricsStore" + +// NewQueryMetrics returns a database.Store that registers metrics for all queries to reg. +func NewQueryMetrics(s database.Store, logger slog.Logger, reg prometheus.Registerer) database.Store { + // Don't double-wrap. + if slices.Contains(s.Wrappers(), wrapname) { + return s + } + queryLatencies := prometheus.NewHistogramVec(prometheus.HistogramOpts{ + Namespace: "coderd", + Subsystem: "db", + Name: "query_latencies_seconds", + Help: "Latency distribution of queries in seconds.", + Buckets: prometheus.DefBuckets, + }, []string{"query"}) + reg.MustRegister(queryLatencies) + return &queryMetricsStore{ + s: s, + queryLatencies: queryLatencies, + dbMetrics: NewDBMetrics(s, logger, reg).(*metricsStore), + } +} + +var _ database.Store = (*queryMetricsStore)(nil) + +type queryMetricsStore struct { + s database.Store + queryLatencies *prometheus.HistogramVec + dbMetrics *metricsStore +} + +func (m queryMetricsStore) Wrappers() []string { + return append(m.s.Wrappers(), wrapname) +} + +func (m queryMetricsStore) Ping(ctx context.Context) (time.Duration, error) { + start := time.Now() + duration, err := m.s.Ping(ctx) + m.queryLatencies.WithLabelValues("Ping").Observe(time.Since(start).Seconds()) + return duration, err +} + +func (m queryMetricsStore) InTx(f func(database.Store) error, options *database.TxOptions) error { + return m.dbMetrics.InTx(f, options) +} + +func (m queryMetricsStore) AcquireLock(ctx context.Context, pgAdvisoryXactLock int64) error { + start := time.Now() + err := m.s.AcquireLock(ctx, pgAdvisoryXactLock) + m.queryLatencies.WithLabelValues("AcquireLock").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) AcquireNotificationMessages(ctx context.Context, arg database.AcquireNotificationMessagesParams) ([]database.AcquireNotificationMessagesRow, error) { + start := time.Now() + r0, r1 := m.s.AcquireNotificationMessages(ctx, arg) + m.queryLatencies.WithLabelValues("AcquireNotificationMessages").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) AcquireProvisionerJob(ctx context.Context, arg database.AcquireProvisionerJobParams) (database.ProvisionerJob, error) { + start := time.Now() + provisionerJob, err := m.s.AcquireProvisionerJob(ctx, arg) + m.queryLatencies.WithLabelValues("AcquireProvisionerJob").Observe(time.Since(start).Seconds()) + return provisionerJob, err +} + +func (m queryMetricsStore) ActivityBumpWorkspace(ctx context.Context, arg database.ActivityBumpWorkspaceParams) error { + start := time.Now() + r0 := m.s.ActivityBumpWorkspace(ctx, arg) + m.queryLatencies.WithLabelValues("ActivityBumpWorkspace").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) AllUserIDs(ctx context.Context) ([]uuid.UUID, error) { + start := time.Now() + r0, r1 := m.s.AllUserIDs(ctx) + m.queryLatencies.WithLabelValues("AllUserIDs").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) ArchiveUnusedTemplateVersions(ctx context.Context, arg database.ArchiveUnusedTemplateVersionsParams) ([]uuid.UUID, error) { + start := time.Now() + r0, r1 := m.s.ArchiveUnusedTemplateVersions(ctx, arg) + m.queryLatencies.WithLabelValues("ArchiveUnusedTemplateVersions").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) BatchUpdateWorkspaceLastUsedAt(ctx context.Context, arg database.BatchUpdateWorkspaceLastUsedAtParams) error { + start := time.Now() + r0 := m.s.BatchUpdateWorkspaceLastUsedAt(ctx, arg) + m.queryLatencies.WithLabelValues("BatchUpdateWorkspaceLastUsedAt").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) BulkMarkNotificationMessagesFailed(ctx context.Context, arg database.BulkMarkNotificationMessagesFailedParams) (int64, error) { + start := time.Now() + r0, r1 := m.s.BulkMarkNotificationMessagesFailed(ctx, arg) + m.queryLatencies.WithLabelValues("BulkMarkNotificationMessagesFailed").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) BulkMarkNotificationMessagesSent(ctx context.Context, arg database.BulkMarkNotificationMessagesSentParams) (int64, error) { + start := time.Now() + r0, r1 := m.s.BulkMarkNotificationMessagesSent(ctx, arg) + m.queryLatencies.WithLabelValues("BulkMarkNotificationMessagesSent").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) CleanTailnetCoordinators(ctx context.Context) error { + start := time.Now() + err := m.s.CleanTailnetCoordinators(ctx) + m.queryLatencies.WithLabelValues("CleanTailnetCoordinators").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) CleanTailnetLostPeers(ctx context.Context) error { + start := time.Now() + r0 := m.s.CleanTailnetLostPeers(ctx) + m.queryLatencies.WithLabelValues("CleanTailnetLostPeers").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) CleanTailnetTunnels(ctx context.Context) error { + start := time.Now() + r0 := m.s.CleanTailnetTunnels(ctx) + m.queryLatencies.WithLabelValues("CleanTailnetTunnels").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) CustomRoles(ctx context.Context, arg database.CustomRolesParams) ([]database.CustomRole, error) { + start := time.Now() + r0, r1 := m.s.CustomRoles(ctx, arg) + m.queryLatencies.WithLabelValues("CustomRoles").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) DeleteAPIKeyByID(ctx context.Context, id string) error { + start := time.Now() + err := m.s.DeleteAPIKeyByID(ctx, id) + m.queryLatencies.WithLabelValues("DeleteAPIKeyByID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) DeleteAPIKeysByUserID(ctx context.Context, userID uuid.UUID) error { + start := time.Now() + err := m.s.DeleteAPIKeysByUserID(ctx, userID) + m.queryLatencies.WithLabelValues("DeleteAPIKeysByUserID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) DeleteAllTailnetClientSubscriptions(ctx context.Context, arg database.DeleteAllTailnetClientSubscriptionsParams) error { + start := time.Now() + r0 := m.s.DeleteAllTailnetClientSubscriptions(ctx, arg) + m.queryLatencies.WithLabelValues("DeleteAllTailnetClientSubscriptions").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) DeleteAllTailnetTunnels(ctx context.Context, arg database.DeleteAllTailnetTunnelsParams) error { + start := time.Now() + r0 := m.s.DeleteAllTailnetTunnels(ctx, arg) + m.queryLatencies.WithLabelValues("DeleteAllTailnetTunnels").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) DeleteApplicationConnectAPIKeysByUserID(ctx context.Context, userID uuid.UUID) error { + start := time.Now() + err := m.s.DeleteApplicationConnectAPIKeysByUserID(ctx, userID) + m.queryLatencies.WithLabelValues("DeleteApplicationConnectAPIKeysByUserID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) DeleteCoordinator(ctx context.Context, id uuid.UUID) error { + start := time.Now() + r0 := m.s.DeleteCoordinator(ctx, id) + m.queryLatencies.WithLabelValues("DeleteCoordinator").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) DeleteCryptoKey(ctx context.Context, arg database.DeleteCryptoKeyParams) (database.CryptoKey, error) { + start := time.Now() + r0, r1 := m.s.DeleteCryptoKey(ctx, arg) + m.queryLatencies.WithLabelValues("DeleteCryptoKey").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) DeleteCustomRole(ctx context.Context, arg database.DeleteCustomRoleParams) error { + start := time.Now() + r0 := m.s.DeleteCustomRole(ctx, arg) + m.queryLatencies.WithLabelValues("DeleteCustomRole").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) DeleteExternalAuthLink(ctx context.Context, arg database.DeleteExternalAuthLinkParams) error { + start := time.Now() + r0 := m.s.DeleteExternalAuthLink(ctx, arg) + m.queryLatencies.WithLabelValues("DeleteExternalAuthLink").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) DeleteGitSSHKey(ctx context.Context, userID uuid.UUID) error { + start := time.Now() + err := m.s.DeleteGitSSHKey(ctx, userID) + m.queryLatencies.WithLabelValues("DeleteGitSSHKey").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) DeleteGroupByID(ctx context.Context, id uuid.UUID) error { + start := time.Now() + err := m.s.DeleteGroupByID(ctx, id) + m.queryLatencies.WithLabelValues("DeleteGroupByID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) DeleteGroupMemberFromGroup(ctx context.Context, arg database.DeleteGroupMemberFromGroupParams) error { + start := time.Now() + err := m.s.DeleteGroupMemberFromGroup(ctx, arg) + m.queryLatencies.WithLabelValues("DeleteGroupMemberFromGroup").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) DeleteLicense(ctx context.Context, id int32) (int32, error) { + start := time.Now() + licenseID, err := m.s.DeleteLicense(ctx, id) + m.queryLatencies.WithLabelValues("DeleteLicense").Observe(time.Since(start).Seconds()) + return licenseID, err +} + +func (m queryMetricsStore) DeleteOAuth2ProviderAppByID(ctx context.Context, id uuid.UUID) error { + start := time.Now() + r0 := m.s.DeleteOAuth2ProviderAppByID(ctx, id) + m.queryLatencies.WithLabelValues("DeleteOAuth2ProviderAppByID").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) DeleteOAuth2ProviderAppCodeByID(ctx context.Context, id uuid.UUID) error { + start := time.Now() + r0 := m.s.DeleteOAuth2ProviderAppCodeByID(ctx, id) + m.queryLatencies.WithLabelValues("DeleteOAuth2ProviderAppCodeByID").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) DeleteOAuth2ProviderAppCodesByAppAndUserID(ctx context.Context, arg database.DeleteOAuth2ProviderAppCodesByAppAndUserIDParams) error { + start := time.Now() + r0 := m.s.DeleteOAuth2ProviderAppCodesByAppAndUserID(ctx, arg) + m.queryLatencies.WithLabelValues("DeleteOAuth2ProviderAppCodesByAppAndUserID").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) DeleteOAuth2ProviderAppSecretByID(ctx context.Context, id uuid.UUID) error { + start := time.Now() + r0 := m.s.DeleteOAuth2ProviderAppSecretByID(ctx, id) + m.queryLatencies.WithLabelValues("DeleteOAuth2ProviderAppSecretByID").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) DeleteOAuth2ProviderAppTokensByAppAndUserID(ctx context.Context, arg database.DeleteOAuth2ProviderAppTokensByAppAndUserIDParams) error { + start := time.Now() + r0 := m.s.DeleteOAuth2ProviderAppTokensByAppAndUserID(ctx, arg) + m.queryLatencies.WithLabelValues("DeleteOAuth2ProviderAppTokensByAppAndUserID").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) DeleteOldNotificationMessages(ctx context.Context) error { + start := time.Now() + r0 := m.s.DeleteOldNotificationMessages(ctx) + m.queryLatencies.WithLabelValues("DeleteOldNotificationMessages").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) DeleteOldProvisionerDaemons(ctx context.Context) error { + start := time.Now() + r0 := m.s.DeleteOldProvisionerDaemons(ctx) + m.queryLatencies.WithLabelValues("DeleteOldProvisionerDaemons").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) DeleteOldWorkspaceAgentLogs(ctx context.Context, arg time.Time) error { + start := time.Now() + r0 := m.s.DeleteOldWorkspaceAgentLogs(ctx, arg) + m.queryLatencies.WithLabelValues("DeleteOldWorkspaceAgentLogs").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) DeleteOldWorkspaceAgentStats(ctx context.Context) error { + start := time.Now() + err := m.s.DeleteOldWorkspaceAgentStats(ctx) + m.queryLatencies.WithLabelValues("DeleteOldWorkspaceAgentStats").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) DeleteOrganization(ctx context.Context, id uuid.UUID) error { + start := time.Now() + r0 := m.s.DeleteOrganization(ctx, id) + m.queryLatencies.WithLabelValues("DeleteOrganization").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) DeleteOrganizationMember(ctx context.Context, arg database.DeleteOrganizationMemberParams) error { + start := time.Now() + r0 := m.s.DeleteOrganizationMember(ctx, arg) + m.queryLatencies.WithLabelValues("DeleteOrganizationMember").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) DeleteProvisionerKey(ctx context.Context, id uuid.UUID) error { + start := time.Now() + r0 := m.s.DeleteProvisionerKey(ctx, id) + m.queryLatencies.WithLabelValues("DeleteProvisionerKey").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) DeleteReplicasUpdatedBefore(ctx context.Context, updatedAt time.Time) error { + start := time.Now() + err := m.s.DeleteReplicasUpdatedBefore(ctx, updatedAt) + m.queryLatencies.WithLabelValues("DeleteReplicasUpdatedBefore").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) DeleteRuntimeConfig(ctx context.Context, key string) error { + start := time.Now() + r0 := m.s.DeleteRuntimeConfig(ctx, key) + m.queryLatencies.WithLabelValues("DeleteRuntimeConfig").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) DeleteTailnetAgent(ctx context.Context, arg database.DeleteTailnetAgentParams) (database.DeleteTailnetAgentRow, error) { + start := time.Now() + r0, r1 := m.s.DeleteTailnetAgent(ctx, arg) + m.queryLatencies.WithLabelValues("DeleteTailnetAgent").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) DeleteTailnetClient(ctx context.Context, arg database.DeleteTailnetClientParams) (database.DeleteTailnetClientRow, error) { + start := time.Now() + r0, r1 := m.s.DeleteTailnetClient(ctx, arg) + m.queryLatencies.WithLabelValues("DeleteTailnetClient").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) DeleteTailnetClientSubscription(ctx context.Context, arg database.DeleteTailnetClientSubscriptionParams) error { + start := time.Now() + r0 := m.s.DeleteTailnetClientSubscription(ctx, arg) + m.queryLatencies.WithLabelValues("DeleteTailnetClientSubscription").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) DeleteTailnetPeer(ctx context.Context, arg database.DeleteTailnetPeerParams) (database.DeleteTailnetPeerRow, error) { + start := time.Now() + r0, r1 := m.s.DeleteTailnetPeer(ctx, arg) + m.queryLatencies.WithLabelValues("DeleteTailnetPeer").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) DeleteTailnetTunnel(ctx context.Context, arg database.DeleteTailnetTunnelParams) (database.DeleteTailnetTunnelRow, error) { + start := time.Now() + r0, r1 := m.s.DeleteTailnetTunnel(ctx, arg) + m.queryLatencies.WithLabelValues("DeleteTailnetTunnel").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) DeleteWorkspaceAgentPortShare(ctx context.Context, arg database.DeleteWorkspaceAgentPortShareParams) error { + start := time.Now() + r0 := m.s.DeleteWorkspaceAgentPortShare(ctx, arg) + m.queryLatencies.WithLabelValues("DeleteWorkspaceAgentPortShare").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) DeleteWorkspaceAgentPortSharesByTemplate(ctx context.Context, templateID uuid.UUID) error { + start := time.Now() + r0 := m.s.DeleteWorkspaceAgentPortSharesByTemplate(ctx, templateID) + m.queryLatencies.WithLabelValues("DeleteWorkspaceAgentPortSharesByTemplate").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) EnqueueNotificationMessage(ctx context.Context, arg database.EnqueueNotificationMessageParams) error { + start := time.Now() + r0 := m.s.EnqueueNotificationMessage(ctx, arg) + m.queryLatencies.WithLabelValues("EnqueueNotificationMessage").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) FavoriteWorkspace(ctx context.Context, arg uuid.UUID) error { + start := time.Now() + r0 := m.s.FavoriteWorkspace(ctx, arg) + m.queryLatencies.WithLabelValues("FavoriteWorkspace").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) FetchNewMessageMetadata(ctx context.Context, arg database.FetchNewMessageMetadataParams) (database.FetchNewMessageMetadataRow, error) { + start := time.Now() + r0, r1 := m.s.FetchNewMessageMetadata(ctx, arg) + m.queryLatencies.WithLabelValues("FetchNewMessageMetadata").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetAPIKeyByID(ctx context.Context, id string) (database.APIKey, error) { + start := time.Now() + apiKey, err := m.s.GetAPIKeyByID(ctx, id) + m.queryLatencies.WithLabelValues("GetAPIKeyByID").Observe(time.Since(start).Seconds()) + return apiKey, err +} + +func (m queryMetricsStore) GetAPIKeyByName(ctx context.Context, arg database.GetAPIKeyByNameParams) (database.APIKey, error) { + start := time.Now() + apiKey, err := m.s.GetAPIKeyByName(ctx, arg) + m.queryLatencies.WithLabelValues("GetAPIKeyByName").Observe(time.Since(start).Seconds()) + return apiKey, err +} + +func (m queryMetricsStore) GetAPIKeysByLoginType(ctx context.Context, loginType database.LoginType) ([]database.APIKey, error) { + start := time.Now() + apiKeys, err := m.s.GetAPIKeysByLoginType(ctx, loginType) + m.queryLatencies.WithLabelValues("GetAPIKeysByLoginType").Observe(time.Since(start).Seconds()) + return apiKeys, err +} + +func (m queryMetricsStore) GetAPIKeysByUserID(ctx context.Context, arg database.GetAPIKeysByUserIDParams) ([]database.APIKey, error) { + start := time.Now() + apiKeys, err := m.s.GetAPIKeysByUserID(ctx, arg) + m.queryLatencies.WithLabelValues("GetAPIKeysByUserID").Observe(time.Since(start).Seconds()) + return apiKeys, err +} + +func (m queryMetricsStore) GetAPIKeysLastUsedAfter(ctx context.Context, lastUsed time.Time) ([]database.APIKey, error) { + start := time.Now() + apiKeys, err := m.s.GetAPIKeysLastUsedAfter(ctx, lastUsed) + m.queryLatencies.WithLabelValues("GetAPIKeysLastUsedAfter").Observe(time.Since(start).Seconds()) + return apiKeys, err +} + +func (m queryMetricsStore) GetActiveUserCount(ctx context.Context) (int64, error) { + start := time.Now() + count, err := m.s.GetActiveUserCount(ctx) + m.queryLatencies.WithLabelValues("GetActiveUserCount").Observe(time.Since(start).Seconds()) + return count, err +} + +func (m queryMetricsStore) GetActiveWorkspaceBuildsByTemplateID(ctx context.Context, templateID uuid.UUID) ([]database.WorkspaceBuild, error) { + start := time.Now() + r0, r1 := m.s.GetActiveWorkspaceBuildsByTemplateID(ctx, templateID) + m.queryLatencies.WithLabelValues("GetActiveWorkspaceBuildsByTemplateID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetAllTailnetAgents(ctx context.Context) ([]database.TailnetAgent, error) { + start := time.Now() + r0, r1 := m.s.GetAllTailnetAgents(ctx) + m.queryLatencies.WithLabelValues("GetAllTailnetAgents").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetAllTailnetCoordinators(ctx context.Context) ([]database.TailnetCoordinator, error) { + start := time.Now() + r0, r1 := m.s.GetAllTailnetCoordinators(ctx) + m.queryLatencies.WithLabelValues("GetAllTailnetCoordinators").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetAllTailnetPeers(ctx context.Context) ([]database.TailnetPeer, error) { + start := time.Now() + r0, r1 := m.s.GetAllTailnetPeers(ctx) + m.queryLatencies.WithLabelValues("GetAllTailnetPeers").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetAllTailnetTunnels(ctx context.Context) ([]database.TailnetTunnel, error) { + start := time.Now() + r0, r1 := m.s.GetAllTailnetTunnels(ctx) + m.queryLatencies.WithLabelValues("GetAllTailnetTunnels").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetAnnouncementBanners(ctx context.Context) (string, error) { + start := time.Now() + r0, r1 := m.s.GetAnnouncementBanners(ctx) + m.queryLatencies.WithLabelValues("GetAnnouncementBanners").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetAppSecurityKey(ctx context.Context) (string, error) { + start := time.Now() + key, err := m.s.GetAppSecurityKey(ctx) + m.queryLatencies.WithLabelValues("GetAppSecurityKey").Observe(time.Since(start).Seconds()) + return key, err +} + +func (m queryMetricsStore) GetApplicationName(ctx context.Context) (string, error) { + start := time.Now() + r0, r1 := m.s.GetApplicationName(ctx) + m.queryLatencies.WithLabelValues("GetApplicationName").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetAuditLogsOffset(ctx context.Context, arg database.GetAuditLogsOffsetParams) ([]database.GetAuditLogsOffsetRow, error) { + start := time.Now() + rows, err := m.s.GetAuditLogsOffset(ctx, arg) + m.queryLatencies.WithLabelValues("GetAuditLogsOffset").Observe(time.Since(start).Seconds()) + return rows, err +} + +func (m queryMetricsStore) GetAuthorizationUserRoles(ctx context.Context, userID uuid.UUID) (database.GetAuthorizationUserRolesRow, error) { + start := time.Now() + row, err := m.s.GetAuthorizationUserRoles(ctx, userID) + m.queryLatencies.WithLabelValues("GetAuthorizationUserRoles").Observe(time.Since(start).Seconds()) + return row, err +} + +func (m queryMetricsStore) GetCoordinatorResumeTokenSigningKey(ctx context.Context) (string, error) { + start := time.Now() + r0, r1 := m.s.GetCoordinatorResumeTokenSigningKey(ctx) + m.queryLatencies.WithLabelValues("GetCoordinatorResumeTokenSigningKey").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetCryptoKeyByFeatureAndSequence(ctx context.Context, arg database.GetCryptoKeyByFeatureAndSequenceParams) (database.CryptoKey, error) { + start := time.Now() + r0, r1 := m.s.GetCryptoKeyByFeatureAndSequence(ctx, arg) + m.queryLatencies.WithLabelValues("GetCryptoKeyByFeatureAndSequence").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetCryptoKeys(ctx context.Context) ([]database.CryptoKey, error) { + start := time.Now() + r0, r1 := m.s.GetCryptoKeys(ctx) + m.queryLatencies.WithLabelValues("GetCryptoKeys").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetCryptoKeysByFeature(ctx context.Context, feature database.CryptoKeyFeature) ([]database.CryptoKey, error) { + start := time.Now() + r0, r1 := m.s.GetCryptoKeysByFeature(ctx, feature) + m.queryLatencies.WithLabelValues("GetCryptoKeysByFeature").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetDBCryptKeys(ctx context.Context) ([]database.DBCryptKey, error) { + start := time.Now() + r0, r1 := m.s.GetDBCryptKeys(ctx) + m.queryLatencies.WithLabelValues("GetDBCryptKeys").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetDERPMeshKey(ctx context.Context) (string, error) { + start := time.Now() + key, err := m.s.GetDERPMeshKey(ctx) + m.queryLatencies.WithLabelValues("GetDERPMeshKey").Observe(time.Since(start).Seconds()) + return key, err +} + +func (m queryMetricsStore) GetDefaultOrganization(ctx context.Context) (database.Organization, error) { + start := time.Now() + r0, r1 := m.s.GetDefaultOrganization(ctx) + m.queryLatencies.WithLabelValues("GetDefaultOrganization").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetDefaultProxyConfig(ctx context.Context) (database.GetDefaultProxyConfigRow, error) { + start := time.Now() + resp, err := m.s.GetDefaultProxyConfig(ctx) + m.queryLatencies.WithLabelValues("GetDefaultProxyConfig").Observe(time.Since(start).Seconds()) + return resp, err +} + +func (m queryMetricsStore) GetDeploymentDAUs(ctx context.Context, tzOffset int32) ([]database.GetDeploymentDAUsRow, error) { + start := time.Now() + rows, err := m.s.GetDeploymentDAUs(ctx, tzOffset) + m.queryLatencies.WithLabelValues("GetDeploymentDAUs").Observe(time.Since(start).Seconds()) + return rows, err +} + +func (m queryMetricsStore) GetDeploymentID(ctx context.Context) (string, error) { + start := time.Now() + id, err := m.s.GetDeploymentID(ctx) + m.queryLatencies.WithLabelValues("GetDeploymentID").Observe(time.Since(start).Seconds()) + return id, err +} + +func (m queryMetricsStore) GetDeploymentWorkspaceAgentStats(ctx context.Context, createdAt time.Time) (database.GetDeploymentWorkspaceAgentStatsRow, error) { + start := time.Now() + row, err := m.s.GetDeploymentWorkspaceAgentStats(ctx, createdAt) + m.queryLatencies.WithLabelValues("GetDeploymentWorkspaceAgentStats").Observe(time.Since(start).Seconds()) + return row, err +} + +func (m queryMetricsStore) GetDeploymentWorkspaceAgentUsageStats(ctx context.Context, createdAt time.Time) (database.GetDeploymentWorkspaceAgentUsageStatsRow, error) { + start := time.Now() + r0, r1 := m.s.GetDeploymentWorkspaceAgentUsageStats(ctx, createdAt) + m.queryLatencies.WithLabelValues("GetDeploymentWorkspaceAgentUsageStats").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetDeploymentWorkspaceStats(ctx context.Context) (database.GetDeploymentWorkspaceStatsRow, error) { + start := time.Now() + row, err := m.s.GetDeploymentWorkspaceStats(ctx) + m.queryLatencies.WithLabelValues("GetDeploymentWorkspaceStats").Observe(time.Since(start).Seconds()) + return row, err +} + +func (m queryMetricsStore) GetExternalAuthLink(ctx context.Context, arg database.GetExternalAuthLinkParams) (database.ExternalAuthLink, error) { + start := time.Now() + link, err := m.s.GetExternalAuthLink(ctx, arg) + m.queryLatencies.WithLabelValues("GetExternalAuthLink").Observe(time.Since(start).Seconds()) + return link, err +} + +func (m queryMetricsStore) GetExternalAuthLinksByUserID(ctx context.Context, userID uuid.UUID) ([]database.ExternalAuthLink, error) { + start := time.Now() + r0, r1 := m.s.GetExternalAuthLinksByUserID(ctx, userID) + m.queryLatencies.WithLabelValues("GetExternalAuthLinksByUserID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetFailedWorkspaceBuildsByTemplateID(ctx context.Context, arg database.GetFailedWorkspaceBuildsByTemplateIDParams) ([]database.GetFailedWorkspaceBuildsByTemplateIDRow, error) { + start := time.Now() + r0, r1 := m.s.GetFailedWorkspaceBuildsByTemplateID(ctx, arg) + m.queryLatencies.WithLabelValues("GetFailedWorkspaceBuildsByTemplateID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetFileByHashAndCreator(ctx context.Context, arg database.GetFileByHashAndCreatorParams) (database.File, error) { + start := time.Now() + file, err := m.s.GetFileByHashAndCreator(ctx, arg) + m.queryLatencies.WithLabelValues("GetFileByHashAndCreator").Observe(time.Since(start).Seconds()) + return file, err +} + +func (m queryMetricsStore) GetFileByID(ctx context.Context, id uuid.UUID) (database.File, error) { + start := time.Now() + file, err := m.s.GetFileByID(ctx, id) + m.queryLatencies.WithLabelValues("GetFileByID").Observe(time.Since(start).Seconds()) + return file, err +} + +func (m queryMetricsStore) GetFileTemplates(ctx context.Context, fileID uuid.UUID) ([]database.GetFileTemplatesRow, error) { + start := time.Now() + rows, err := m.s.GetFileTemplates(ctx, fileID) + m.queryLatencies.WithLabelValues("GetFileTemplates").Observe(time.Since(start).Seconds()) + return rows, err +} + +func (m queryMetricsStore) GetGitSSHKey(ctx context.Context, userID uuid.UUID) (database.GitSSHKey, error) { + start := time.Now() + key, err := m.s.GetGitSSHKey(ctx, userID) + m.queryLatencies.WithLabelValues("GetGitSSHKey").Observe(time.Since(start).Seconds()) + return key, err +} + +func (m queryMetricsStore) GetGroupByID(ctx context.Context, id uuid.UUID) (database.Group, error) { + start := time.Now() + group, err := m.s.GetGroupByID(ctx, id) + m.queryLatencies.WithLabelValues("GetGroupByID").Observe(time.Since(start).Seconds()) + return group, err +} + +func (m queryMetricsStore) GetGroupByOrgAndName(ctx context.Context, arg database.GetGroupByOrgAndNameParams) (database.Group, error) { + start := time.Now() + group, err := m.s.GetGroupByOrgAndName(ctx, arg) + m.queryLatencies.WithLabelValues("GetGroupByOrgAndName").Observe(time.Since(start).Seconds()) + return group, err +} + +func (m queryMetricsStore) GetGroupMembers(ctx context.Context) ([]database.GroupMember, error) { + start := time.Now() + r0, r1 := m.s.GetGroupMembers(ctx) + m.queryLatencies.WithLabelValues("GetGroupMembers").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetGroupMembersByGroupID(ctx context.Context, groupID uuid.UUID) ([]database.GroupMember, error) { + start := time.Now() + users, err := m.s.GetGroupMembersByGroupID(ctx, groupID) + m.queryLatencies.WithLabelValues("GetGroupMembersByGroupID").Observe(time.Since(start).Seconds()) + return users, err +} + +func (m queryMetricsStore) GetGroupMembersCountByGroupID(ctx context.Context, groupID uuid.UUID) (int64, error) { + start := time.Now() + r0, r1 := m.s.GetGroupMembersCountByGroupID(ctx, groupID) + m.queryLatencies.WithLabelValues("GetGroupMembersCountByGroupID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetGroups(ctx context.Context, arg database.GetGroupsParams) ([]database.GetGroupsRow, error) { + start := time.Now() + r0, r1 := m.s.GetGroups(ctx, arg) + m.queryLatencies.WithLabelValues("GetGroups").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetHealthSettings(ctx context.Context) (string, error) { + start := time.Now() + r0, r1 := m.s.GetHealthSettings(ctx) + m.queryLatencies.WithLabelValues("GetHealthSettings").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetHungProvisionerJobs(ctx context.Context, hungSince time.Time) ([]database.ProvisionerJob, error) { + start := time.Now() + jobs, err := m.s.GetHungProvisionerJobs(ctx, hungSince) + m.queryLatencies.WithLabelValues("GetHungProvisionerJobs").Observe(time.Since(start).Seconds()) + return jobs, err +} + +func (m queryMetricsStore) GetJFrogXrayScanByWorkspaceAndAgentID(ctx context.Context, arg database.GetJFrogXrayScanByWorkspaceAndAgentIDParams) (database.JfrogXrayScan, error) { + start := time.Now() + r0, r1 := m.s.GetJFrogXrayScanByWorkspaceAndAgentID(ctx, arg) + m.queryLatencies.WithLabelValues("GetJFrogXrayScanByWorkspaceAndAgentID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetLastUpdateCheck(ctx context.Context) (string, error) { + start := time.Now() + version, err := m.s.GetLastUpdateCheck(ctx) + m.queryLatencies.WithLabelValues("GetLastUpdateCheck").Observe(time.Since(start).Seconds()) + return version, err +} + +func (m queryMetricsStore) GetLatestCryptoKeyByFeature(ctx context.Context, feature database.CryptoKeyFeature) (database.CryptoKey, error) { + start := time.Now() + r0, r1 := m.s.GetLatestCryptoKeyByFeature(ctx, feature) + m.queryLatencies.WithLabelValues("GetLatestCryptoKeyByFeature").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetLatestWorkspaceBuildByWorkspaceID(ctx context.Context, workspaceID uuid.UUID) (database.WorkspaceBuild, error) { + start := time.Now() + build, err := m.s.GetLatestWorkspaceBuildByWorkspaceID(ctx, workspaceID) + m.queryLatencies.WithLabelValues("GetLatestWorkspaceBuildByWorkspaceID").Observe(time.Since(start).Seconds()) + return build, err +} + +func (m queryMetricsStore) GetLatestWorkspaceBuilds(ctx context.Context) ([]database.WorkspaceBuild, error) { + start := time.Now() + builds, err := m.s.GetLatestWorkspaceBuilds(ctx) + m.queryLatencies.WithLabelValues("GetLatestWorkspaceBuilds").Observe(time.Since(start).Seconds()) + return builds, err +} + +func (m queryMetricsStore) GetLatestWorkspaceBuildsByWorkspaceIDs(ctx context.Context, ids []uuid.UUID) ([]database.WorkspaceBuild, error) { + start := time.Now() + builds, err := m.s.GetLatestWorkspaceBuildsByWorkspaceIDs(ctx, ids) + m.queryLatencies.WithLabelValues("GetLatestWorkspaceBuildsByWorkspaceIDs").Observe(time.Since(start).Seconds()) + return builds, err +} + +func (m queryMetricsStore) GetLicenseByID(ctx context.Context, id int32) (database.License, error) { + start := time.Now() + license, err := m.s.GetLicenseByID(ctx, id) + m.queryLatencies.WithLabelValues("GetLicenseByID").Observe(time.Since(start).Seconds()) + return license, err +} + +func (m queryMetricsStore) GetLicenses(ctx context.Context) ([]database.License, error) { + start := time.Now() + licenses, err := m.s.GetLicenses(ctx) + m.queryLatencies.WithLabelValues("GetLicenses").Observe(time.Since(start).Seconds()) + return licenses, err +} + +func (m queryMetricsStore) GetLogoURL(ctx context.Context) (string, error) { + start := time.Now() + url, err := m.s.GetLogoURL(ctx) + m.queryLatencies.WithLabelValues("GetLogoURL").Observe(time.Since(start).Seconds()) + return url, err +} + +func (m queryMetricsStore) GetNotificationMessagesByStatus(ctx context.Context, arg database.GetNotificationMessagesByStatusParams) ([]database.NotificationMessage, error) { + start := time.Now() + r0, r1 := m.s.GetNotificationMessagesByStatus(ctx, arg) + m.queryLatencies.WithLabelValues("GetNotificationMessagesByStatus").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetNotificationReportGeneratorLogByTemplate(ctx context.Context, arg uuid.UUID) (database.NotificationReportGeneratorLog, error) { + start := time.Now() + r0, r1 := m.s.GetNotificationReportGeneratorLogByTemplate(ctx, arg) + m.queryLatencies.WithLabelValues("GetNotificationReportGeneratorLogByTemplate").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetNotificationTemplateByID(ctx context.Context, id uuid.UUID) (database.NotificationTemplate, error) { + start := time.Now() + r0, r1 := m.s.GetNotificationTemplateByID(ctx, id) + m.queryLatencies.WithLabelValues("GetNotificationTemplateByID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetNotificationTemplatesByKind(ctx context.Context, kind database.NotificationTemplateKind) ([]database.NotificationTemplate, error) { + start := time.Now() + r0, r1 := m.s.GetNotificationTemplatesByKind(ctx, kind) + m.queryLatencies.WithLabelValues("GetNotificationTemplatesByKind").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetNotificationsSettings(ctx context.Context) (string, error) { + start := time.Now() + r0, r1 := m.s.GetNotificationsSettings(ctx) + m.queryLatencies.WithLabelValues("GetNotificationsSettings").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetOAuth2ProviderAppByID(ctx context.Context, id uuid.UUID) (database.OAuth2ProviderApp, error) { + start := time.Now() + r0, r1 := m.s.GetOAuth2ProviderAppByID(ctx, id) + m.queryLatencies.WithLabelValues("GetOAuth2ProviderAppByID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetOAuth2ProviderAppCodeByID(ctx context.Context, id uuid.UUID) (database.OAuth2ProviderAppCode, error) { + start := time.Now() + r0, r1 := m.s.GetOAuth2ProviderAppCodeByID(ctx, id) + m.queryLatencies.WithLabelValues("GetOAuth2ProviderAppCodeByID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetOAuth2ProviderAppCodeByPrefix(ctx context.Context, secretPrefix []byte) (database.OAuth2ProviderAppCode, error) { + start := time.Now() + r0, r1 := m.s.GetOAuth2ProviderAppCodeByPrefix(ctx, secretPrefix) + m.queryLatencies.WithLabelValues("GetOAuth2ProviderAppCodeByPrefix").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetOAuth2ProviderAppSecretByID(ctx context.Context, id uuid.UUID) (database.OAuth2ProviderAppSecret, error) { + start := time.Now() + r0, r1 := m.s.GetOAuth2ProviderAppSecretByID(ctx, id) + m.queryLatencies.WithLabelValues("GetOAuth2ProviderAppSecretByID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetOAuth2ProviderAppSecretByPrefix(ctx context.Context, secretPrefix []byte) (database.OAuth2ProviderAppSecret, error) { + start := time.Now() + r0, r1 := m.s.GetOAuth2ProviderAppSecretByPrefix(ctx, secretPrefix) + m.queryLatencies.WithLabelValues("GetOAuth2ProviderAppSecretByPrefix").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetOAuth2ProviderAppSecretsByAppID(ctx context.Context, appID uuid.UUID) ([]database.OAuth2ProviderAppSecret, error) { + start := time.Now() + r0, r1 := m.s.GetOAuth2ProviderAppSecretsByAppID(ctx, appID) + m.queryLatencies.WithLabelValues("GetOAuth2ProviderAppSecretsByAppID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetOAuth2ProviderAppTokenByPrefix(ctx context.Context, hashPrefix []byte) (database.OAuth2ProviderAppToken, error) { + start := time.Now() + r0, r1 := m.s.GetOAuth2ProviderAppTokenByPrefix(ctx, hashPrefix) + m.queryLatencies.WithLabelValues("GetOAuth2ProviderAppTokenByPrefix").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetOAuth2ProviderApps(ctx context.Context) ([]database.OAuth2ProviderApp, error) { + start := time.Now() + r0, r1 := m.s.GetOAuth2ProviderApps(ctx) + m.queryLatencies.WithLabelValues("GetOAuth2ProviderApps").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetOAuth2ProviderAppsByUserID(ctx context.Context, userID uuid.UUID) ([]database.GetOAuth2ProviderAppsByUserIDRow, error) { + start := time.Now() + r0, r1 := m.s.GetOAuth2ProviderAppsByUserID(ctx, userID) + m.queryLatencies.WithLabelValues("GetOAuth2ProviderAppsByUserID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetOAuthSigningKey(ctx context.Context) (string, error) { + start := time.Now() + r0, r1 := m.s.GetOAuthSigningKey(ctx) + m.queryLatencies.WithLabelValues("GetOAuthSigningKey").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetOrganizationByID(ctx context.Context, id uuid.UUID) (database.Organization, error) { + start := time.Now() + organization, err := m.s.GetOrganizationByID(ctx, id) + m.queryLatencies.WithLabelValues("GetOrganizationByID").Observe(time.Since(start).Seconds()) + return organization, err +} + +func (m queryMetricsStore) GetOrganizationByName(ctx context.Context, name string) (database.Organization, error) { + start := time.Now() + organization, err := m.s.GetOrganizationByName(ctx, name) + m.queryLatencies.WithLabelValues("GetOrganizationByName").Observe(time.Since(start).Seconds()) + return organization, err +} + +func (m queryMetricsStore) GetOrganizationIDsByMemberIDs(ctx context.Context, ids []uuid.UUID) ([]database.GetOrganizationIDsByMemberIDsRow, error) { + start := time.Now() + organizations, err := m.s.GetOrganizationIDsByMemberIDs(ctx, ids) + m.queryLatencies.WithLabelValues("GetOrganizationIDsByMemberIDs").Observe(time.Since(start).Seconds()) + return organizations, err +} + +func (m queryMetricsStore) GetOrganizations(ctx context.Context, args database.GetOrganizationsParams) ([]database.Organization, error) { + start := time.Now() + organizations, err := m.s.GetOrganizations(ctx, args) + m.queryLatencies.WithLabelValues("GetOrganizations").Observe(time.Since(start).Seconds()) + return organizations, err +} + +func (m queryMetricsStore) GetOrganizationsByUserID(ctx context.Context, userID uuid.UUID) ([]database.Organization, error) { + start := time.Now() + organizations, err := m.s.GetOrganizationsByUserID(ctx, userID) + m.queryLatencies.WithLabelValues("GetOrganizationsByUserID").Observe(time.Since(start).Seconds()) + return organizations, err +} + +func (m queryMetricsStore) GetParameterSchemasByJobID(ctx context.Context, jobID uuid.UUID) ([]database.ParameterSchema, error) { + start := time.Now() + schemas, err := m.s.GetParameterSchemasByJobID(ctx, jobID) + m.queryLatencies.WithLabelValues("GetParameterSchemasByJobID").Observe(time.Since(start).Seconds()) + return schemas, err +} + +func (m queryMetricsStore) GetPreviousTemplateVersion(ctx context.Context, arg database.GetPreviousTemplateVersionParams) (database.TemplateVersion, error) { + start := time.Now() + version, err := m.s.GetPreviousTemplateVersion(ctx, arg) + m.queryLatencies.WithLabelValues("GetPreviousTemplateVersion").Observe(time.Since(start).Seconds()) + return version, err +} + +func (m queryMetricsStore) GetProvisionerDaemons(ctx context.Context) ([]database.ProvisionerDaemon, error) { + start := time.Now() + daemons, err := m.s.GetProvisionerDaemons(ctx) + m.queryLatencies.WithLabelValues("GetProvisionerDaemons").Observe(time.Since(start).Seconds()) + return daemons, err +} + +func (m queryMetricsStore) GetProvisionerDaemonsByOrganization(ctx context.Context, organizationID uuid.UUID) ([]database.ProvisionerDaemon, error) { + start := time.Now() + r0, r1 := m.s.GetProvisionerDaemonsByOrganization(ctx, organizationID) + m.queryLatencies.WithLabelValues("GetProvisionerDaemonsByOrganization").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetProvisionerJobByID(ctx context.Context, id uuid.UUID) (database.ProvisionerJob, error) { + start := time.Now() + job, err := m.s.GetProvisionerJobByID(ctx, id) + m.queryLatencies.WithLabelValues("GetProvisionerJobByID").Observe(time.Since(start).Seconds()) + return job, err +} + +func (m queryMetricsStore) GetProvisionerJobTimingsByJobID(ctx context.Context, jobID uuid.UUID) ([]database.ProvisionerJobTiming, error) { + start := time.Now() + r0, r1 := m.s.GetProvisionerJobTimingsByJobID(ctx, jobID) + m.queryLatencies.WithLabelValues("GetProvisionerJobTimingsByJobID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetProvisionerJobsByIDs(ctx context.Context, ids []uuid.UUID) ([]database.ProvisionerJob, error) { + start := time.Now() + jobs, err := m.s.GetProvisionerJobsByIDs(ctx, ids) + m.queryLatencies.WithLabelValues("GetProvisionerJobsByIDs").Observe(time.Since(start).Seconds()) + return jobs, err +} + +func (m queryMetricsStore) GetProvisionerJobsByIDsWithQueuePosition(ctx context.Context, ids []uuid.UUID) ([]database.GetProvisionerJobsByIDsWithQueuePositionRow, error) { + start := time.Now() + r0, r1 := m.s.GetProvisionerJobsByIDsWithQueuePosition(ctx, ids) + m.queryLatencies.WithLabelValues("GetProvisionerJobsByIDsWithQueuePosition").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetProvisionerJobsCreatedAfter(ctx context.Context, createdAt time.Time) ([]database.ProvisionerJob, error) { + start := time.Now() + jobs, err := m.s.GetProvisionerJobsCreatedAfter(ctx, createdAt) + m.queryLatencies.WithLabelValues("GetProvisionerJobsCreatedAfter").Observe(time.Since(start).Seconds()) + return jobs, err +} + +func (m queryMetricsStore) GetProvisionerKeyByHashedSecret(ctx context.Context, hashedSecret []byte) (database.ProvisionerKey, error) { + start := time.Now() + r0, r1 := m.s.GetProvisionerKeyByHashedSecret(ctx, hashedSecret) + m.queryLatencies.WithLabelValues("GetProvisionerKeyByHashedSecret").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetProvisionerKeyByID(ctx context.Context, id uuid.UUID) (database.ProvisionerKey, error) { + start := time.Now() + r0, r1 := m.s.GetProvisionerKeyByID(ctx, id) + m.queryLatencies.WithLabelValues("GetProvisionerKeyByID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetProvisionerKeyByName(ctx context.Context, name database.GetProvisionerKeyByNameParams) (database.ProvisionerKey, error) { + start := time.Now() + r0, r1 := m.s.GetProvisionerKeyByName(ctx, name) + m.queryLatencies.WithLabelValues("GetProvisionerKeyByName").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetProvisionerLogsAfterID(ctx context.Context, arg database.GetProvisionerLogsAfterIDParams) ([]database.ProvisionerJobLog, error) { + start := time.Now() + logs, err := m.s.GetProvisionerLogsAfterID(ctx, arg) + m.queryLatencies.WithLabelValues("GetProvisionerLogsAfterID").Observe(time.Since(start).Seconds()) + return logs, err +} + +func (m queryMetricsStore) GetQuotaAllowanceForUser(ctx context.Context, userID database.GetQuotaAllowanceForUserParams) (int64, error) { + start := time.Now() + allowance, err := m.s.GetQuotaAllowanceForUser(ctx, userID) + m.queryLatencies.WithLabelValues("GetQuotaAllowanceForUser").Observe(time.Since(start).Seconds()) + return allowance, err +} + +func (m queryMetricsStore) GetQuotaConsumedForUser(ctx context.Context, ownerID database.GetQuotaConsumedForUserParams) (int64, error) { + start := time.Now() + consumed, err := m.s.GetQuotaConsumedForUser(ctx, ownerID) + m.queryLatencies.WithLabelValues("GetQuotaConsumedForUser").Observe(time.Since(start).Seconds()) + return consumed, err +} + +func (m queryMetricsStore) GetReplicaByID(ctx context.Context, id uuid.UUID) (database.Replica, error) { + start := time.Now() + replica, err := m.s.GetReplicaByID(ctx, id) + m.queryLatencies.WithLabelValues("GetReplicaByID").Observe(time.Since(start).Seconds()) + return replica, err +} + +func (m queryMetricsStore) GetReplicasUpdatedAfter(ctx context.Context, updatedAt time.Time) ([]database.Replica, error) { + start := time.Now() + replicas, err := m.s.GetReplicasUpdatedAfter(ctx, updatedAt) + m.queryLatencies.WithLabelValues("GetReplicasUpdatedAfter").Observe(time.Since(start).Seconds()) + return replicas, err +} + +func (m queryMetricsStore) GetRuntimeConfig(ctx context.Context, key string) (string, error) { + start := time.Now() + r0, r1 := m.s.GetRuntimeConfig(ctx, key) + m.queryLatencies.WithLabelValues("GetRuntimeConfig").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetTailnetAgents(ctx context.Context, id uuid.UUID) ([]database.TailnetAgent, error) { + start := time.Now() + r0, r1 := m.s.GetTailnetAgents(ctx, id) + m.queryLatencies.WithLabelValues("GetTailnetAgents").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetTailnetClientsForAgent(ctx context.Context, agentID uuid.UUID) ([]database.TailnetClient, error) { + start := time.Now() + r0, r1 := m.s.GetTailnetClientsForAgent(ctx, agentID) + m.queryLatencies.WithLabelValues("GetTailnetClientsForAgent").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetTailnetPeers(ctx context.Context, id uuid.UUID) ([]database.TailnetPeer, error) { + start := time.Now() + r0, r1 := m.s.GetTailnetPeers(ctx, id) + m.queryLatencies.WithLabelValues("GetTailnetPeers").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetTailnetTunnelPeerBindings(ctx context.Context, srcID uuid.UUID) ([]database.GetTailnetTunnelPeerBindingsRow, error) { + start := time.Now() + r0, r1 := m.s.GetTailnetTunnelPeerBindings(ctx, srcID) + m.queryLatencies.WithLabelValues("GetTailnetTunnelPeerBindings").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetTailnetTunnelPeerIDs(ctx context.Context, srcID uuid.UUID) ([]database.GetTailnetTunnelPeerIDsRow, error) { + start := time.Now() + r0, r1 := m.s.GetTailnetTunnelPeerIDs(ctx, srcID) + m.queryLatencies.WithLabelValues("GetTailnetTunnelPeerIDs").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetTemplateAppInsights(ctx context.Context, arg database.GetTemplateAppInsightsParams) ([]database.GetTemplateAppInsightsRow, error) { + start := time.Now() + r0, r1 := m.s.GetTemplateAppInsights(ctx, arg) + m.queryLatencies.WithLabelValues("GetTemplateAppInsights").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetTemplateAppInsightsByTemplate(ctx context.Context, arg database.GetTemplateAppInsightsByTemplateParams) ([]database.GetTemplateAppInsightsByTemplateRow, error) { + start := time.Now() + r0, r1 := m.s.GetTemplateAppInsightsByTemplate(ctx, arg) + m.queryLatencies.WithLabelValues("GetTemplateAppInsightsByTemplate").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetTemplateAverageBuildTime(ctx context.Context, arg database.GetTemplateAverageBuildTimeParams) (database.GetTemplateAverageBuildTimeRow, error) { + start := time.Now() + buildTime, err := m.s.GetTemplateAverageBuildTime(ctx, arg) + m.queryLatencies.WithLabelValues("GetTemplateAverageBuildTime").Observe(time.Since(start).Seconds()) + return buildTime, err +} + +func (m queryMetricsStore) GetTemplateByID(ctx context.Context, id uuid.UUID) (database.Template, error) { + start := time.Now() + template, err := m.s.GetTemplateByID(ctx, id) + m.queryLatencies.WithLabelValues("GetTemplateByID").Observe(time.Since(start).Seconds()) + return template, err +} + +func (m queryMetricsStore) GetTemplateByOrganizationAndName(ctx context.Context, arg database.GetTemplateByOrganizationAndNameParams) (database.Template, error) { + start := time.Now() + template, err := m.s.GetTemplateByOrganizationAndName(ctx, arg) + m.queryLatencies.WithLabelValues("GetTemplateByOrganizationAndName").Observe(time.Since(start).Seconds()) + return template, err +} + +func (m queryMetricsStore) GetTemplateDAUs(ctx context.Context, arg database.GetTemplateDAUsParams) ([]database.GetTemplateDAUsRow, error) { + start := time.Now() + daus, err := m.s.GetTemplateDAUs(ctx, arg) + m.queryLatencies.WithLabelValues("GetTemplateDAUs").Observe(time.Since(start).Seconds()) + return daus, err +} + +func (m queryMetricsStore) GetTemplateInsights(ctx context.Context, arg database.GetTemplateInsightsParams) (database.GetTemplateInsightsRow, error) { + start := time.Now() + r0, r1 := m.s.GetTemplateInsights(ctx, arg) + m.queryLatencies.WithLabelValues("GetTemplateInsights").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetTemplateInsightsByInterval(ctx context.Context, arg database.GetTemplateInsightsByIntervalParams) ([]database.GetTemplateInsightsByIntervalRow, error) { + start := time.Now() + r0, r1 := m.s.GetTemplateInsightsByInterval(ctx, arg) + m.queryLatencies.WithLabelValues("GetTemplateInsightsByInterval").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetTemplateInsightsByTemplate(ctx context.Context, arg database.GetTemplateInsightsByTemplateParams) ([]database.GetTemplateInsightsByTemplateRow, error) { + start := time.Now() + r0, r1 := m.s.GetTemplateInsightsByTemplate(ctx, arg) + m.queryLatencies.WithLabelValues("GetTemplateInsightsByTemplate").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetTemplateParameterInsights(ctx context.Context, arg database.GetTemplateParameterInsightsParams) ([]database.GetTemplateParameterInsightsRow, error) { + start := time.Now() + r0, r1 := m.s.GetTemplateParameterInsights(ctx, arg) + m.queryLatencies.WithLabelValues("GetTemplateParameterInsights").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetTemplateUsageStats(ctx context.Context, arg database.GetTemplateUsageStatsParams) ([]database.TemplateUsageStat, error) { + start := time.Now() + r0, r1 := m.s.GetTemplateUsageStats(ctx, arg) + m.queryLatencies.WithLabelValues("GetTemplateUsageStats").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetTemplateVersionByID(ctx context.Context, id uuid.UUID) (database.TemplateVersion, error) { + start := time.Now() + version, err := m.s.GetTemplateVersionByID(ctx, id) + m.queryLatencies.WithLabelValues("GetTemplateVersionByID").Observe(time.Since(start).Seconds()) + return version, err +} + +func (m queryMetricsStore) GetTemplateVersionByJobID(ctx context.Context, jobID uuid.UUID) (database.TemplateVersion, error) { + start := time.Now() + version, err := m.s.GetTemplateVersionByJobID(ctx, jobID) + m.queryLatencies.WithLabelValues("GetTemplateVersionByJobID").Observe(time.Since(start).Seconds()) + return version, err +} + +func (m queryMetricsStore) GetTemplateVersionByTemplateIDAndName(ctx context.Context, arg database.GetTemplateVersionByTemplateIDAndNameParams) (database.TemplateVersion, error) { + start := time.Now() + version, err := m.s.GetTemplateVersionByTemplateIDAndName(ctx, arg) + m.queryLatencies.WithLabelValues("GetTemplateVersionByTemplateIDAndName").Observe(time.Since(start).Seconds()) + return version, err +} + +func (m queryMetricsStore) GetTemplateVersionParameters(ctx context.Context, templateVersionID uuid.UUID) ([]database.TemplateVersionParameter, error) { + start := time.Now() + parameters, err := m.s.GetTemplateVersionParameters(ctx, templateVersionID) + m.queryLatencies.WithLabelValues("GetTemplateVersionParameters").Observe(time.Since(start).Seconds()) + return parameters, err +} + +func (m queryMetricsStore) GetTemplateVersionVariables(ctx context.Context, templateVersionID uuid.UUID) ([]database.TemplateVersionVariable, error) { + start := time.Now() + variables, err := m.s.GetTemplateVersionVariables(ctx, templateVersionID) + m.queryLatencies.WithLabelValues("GetTemplateVersionVariables").Observe(time.Since(start).Seconds()) + return variables, err +} + +func (m queryMetricsStore) GetTemplateVersionWorkspaceTags(ctx context.Context, templateVersionID uuid.UUID) ([]database.TemplateVersionWorkspaceTag, error) { + start := time.Now() + r0, r1 := m.s.GetTemplateVersionWorkspaceTags(ctx, templateVersionID) + m.queryLatencies.WithLabelValues("GetTemplateVersionWorkspaceTags").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetTemplateVersionsByIDs(ctx context.Context, ids []uuid.UUID) ([]database.TemplateVersion, error) { + start := time.Now() + versions, err := m.s.GetTemplateVersionsByIDs(ctx, ids) + m.queryLatencies.WithLabelValues("GetTemplateVersionsByIDs").Observe(time.Since(start).Seconds()) + return versions, err +} + +func (m queryMetricsStore) GetTemplateVersionsByTemplateID(ctx context.Context, arg database.GetTemplateVersionsByTemplateIDParams) ([]database.TemplateVersion, error) { + start := time.Now() + versions, err := m.s.GetTemplateVersionsByTemplateID(ctx, arg) + m.queryLatencies.WithLabelValues("GetTemplateVersionsByTemplateID").Observe(time.Since(start).Seconds()) + return versions, err +} + +func (m queryMetricsStore) GetTemplateVersionsCreatedAfter(ctx context.Context, createdAt time.Time) ([]database.TemplateVersion, error) { + start := time.Now() + versions, err := m.s.GetTemplateVersionsCreatedAfter(ctx, createdAt) + m.queryLatencies.WithLabelValues("GetTemplateVersionsCreatedAfter").Observe(time.Since(start).Seconds()) + return versions, err +} + +func (m queryMetricsStore) GetTemplates(ctx context.Context) ([]database.Template, error) { + start := time.Now() + templates, err := m.s.GetTemplates(ctx) + m.queryLatencies.WithLabelValues("GetTemplates").Observe(time.Since(start).Seconds()) + return templates, err +} + +func (m queryMetricsStore) GetTemplatesWithFilter(ctx context.Context, arg database.GetTemplatesWithFilterParams) ([]database.Template, error) { + start := time.Now() + templates, err := m.s.GetTemplatesWithFilter(ctx, arg) + m.queryLatencies.WithLabelValues("GetTemplatesWithFilter").Observe(time.Since(start).Seconds()) + return templates, err +} + +func (m queryMetricsStore) GetUnexpiredLicenses(ctx context.Context) ([]database.License, error) { + start := time.Now() + licenses, err := m.s.GetUnexpiredLicenses(ctx) + m.queryLatencies.WithLabelValues("GetUnexpiredLicenses").Observe(time.Since(start).Seconds()) + return licenses, err +} + +func (m queryMetricsStore) GetUserActivityInsights(ctx context.Context, arg database.GetUserActivityInsightsParams) ([]database.GetUserActivityInsightsRow, error) { + start := time.Now() + r0, r1 := m.s.GetUserActivityInsights(ctx, arg) + m.queryLatencies.WithLabelValues("GetUserActivityInsights").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetUserByEmailOrUsername(ctx context.Context, arg database.GetUserByEmailOrUsernameParams) (database.User, error) { + start := time.Now() + user, err := m.s.GetUserByEmailOrUsername(ctx, arg) + m.queryLatencies.WithLabelValues("GetUserByEmailOrUsername").Observe(time.Since(start).Seconds()) + return user, err +} + +func (m queryMetricsStore) GetUserByID(ctx context.Context, id uuid.UUID) (database.User, error) { + start := time.Now() + user, err := m.s.GetUserByID(ctx, id) + m.queryLatencies.WithLabelValues("GetUserByID").Observe(time.Since(start).Seconds()) + return user, err +} + +func (m queryMetricsStore) GetUserCount(ctx context.Context) (int64, error) { + start := time.Now() + count, err := m.s.GetUserCount(ctx) + m.queryLatencies.WithLabelValues("GetUserCount").Observe(time.Since(start).Seconds()) + return count, err +} + +func (m queryMetricsStore) GetUserLatencyInsights(ctx context.Context, arg database.GetUserLatencyInsightsParams) ([]database.GetUserLatencyInsightsRow, error) { + start := time.Now() + r0, r1 := m.s.GetUserLatencyInsights(ctx, arg) + m.queryLatencies.WithLabelValues("GetUserLatencyInsights").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetUserLinkByLinkedID(ctx context.Context, linkedID string) (database.UserLink, error) { + start := time.Now() + link, err := m.s.GetUserLinkByLinkedID(ctx, linkedID) + m.queryLatencies.WithLabelValues("GetUserLinkByLinkedID").Observe(time.Since(start).Seconds()) + return link, err +} + +func (m queryMetricsStore) GetUserLinkByUserIDLoginType(ctx context.Context, arg database.GetUserLinkByUserIDLoginTypeParams) (database.UserLink, error) { + start := time.Now() + link, err := m.s.GetUserLinkByUserIDLoginType(ctx, arg) + m.queryLatencies.WithLabelValues("GetUserLinkByUserIDLoginType").Observe(time.Since(start).Seconds()) + return link, err +} + +func (m queryMetricsStore) GetUserLinksByUserID(ctx context.Context, userID uuid.UUID) ([]database.UserLink, error) { + start := time.Now() + r0, r1 := m.s.GetUserLinksByUserID(ctx, userID) + m.queryLatencies.WithLabelValues("GetUserLinksByUserID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetUserNotificationPreferences(ctx context.Context, userID uuid.UUID) ([]database.NotificationPreference, error) { + start := time.Now() + r0, r1 := m.s.GetUserNotificationPreferences(ctx, userID) + m.queryLatencies.WithLabelValues("GetUserNotificationPreferences").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetUserWorkspaceBuildParameters(ctx context.Context, ownerID database.GetUserWorkspaceBuildParametersParams) ([]database.GetUserWorkspaceBuildParametersRow, error) { + start := time.Now() + r0, r1 := m.s.GetUserWorkspaceBuildParameters(ctx, ownerID) + m.queryLatencies.WithLabelValues("GetUserWorkspaceBuildParameters").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetUsers(ctx context.Context, arg database.GetUsersParams) ([]database.GetUsersRow, error) { + start := time.Now() + users, err := m.s.GetUsers(ctx, arg) + m.queryLatencies.WithLabelValues("GetUsers").Observe(time.Since(start).Seconds()) + return users, err +} + +func (m queryMetricsStore) GetUsersByIDs(ctx context.Context, ids []uuid.UUID) ([]database.User, error) { + start := time.Now() + users, err := m.s.GetUsersByIDs(ctx, ids) + m.queryLatencies.WithLabelValues("GetUsersByIDs").Observe(time.Since(start).Seconds()) + return users, err +} + +func (m queryMetricsStore) GetWorkspaceAgentAndLatestBuildByAuthToken(ctx context.Context, authToken uuid.UUID) (database.GetWorkspaceAgentAndLatestBuildByAuthTokenRow, error) { + start := time.Now() + r0, r1 := m.s.GetWorkspaceAgentAndLatestBuildByAuthToken(ctx, authToken) + m.queryLatencies.WithLabelValues("GetWorkspaceAgentAndLatestBuildByAuthToken").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetWorkspaceAgentByID(ctx context.Context, id uuid.UUID) (database.WorkspaceAgent, error) { + start := time.Now() + agent, err := m.s.GetWorkspaceAgentByID(ctx, id) + m.queryLatencies.WithLabelValues("GetWorkspaceAgentByID").Observe(time.Since(start).Seconds()) + return agent, err +} + +func (m queryMetricsStore) GetWorkspaceAgentByInstanceID(ctx context.Context, authInstanceID string) (database.WorkspaceAgent, error) { + start := time.Now() + agent, err := m.s.GetWorkspaceAgentByInstanceID(ctx, authInstanceID) + m.queryLatencies.WithLabelValues("GetWorkspaceAgentByInstanceID").Observe(time.Since(start).Seconds()) + return agent, err +} + +func (m queryMetricsStore) GetWorkspaceAgentLifecycleStateByID(ctx context.Context, id uuid.UUID) (database.GetWorkspaceAgentLifecycleStateByIDRow, error) { + start := time.Now() + r0, r1 := m.s.GetWorkspaceAgentLifecycleStateByID(ctx, id) + m.queryLatencies.WithLabelValues("GetWorkspaceAgentLifecycleStateByID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetWorkspaceAgentLogSourcesByAgentIDs(ctx context.Context, ids []uuid.UUID) ([]database.WorkspaceAgentLogSource, error) { + start := time.Now() + r0, r1 := m.s.GetWorkspaceAgentLogSourcesByAgentIDs(ctx, ids) + m.queryLatencies.WithLabelValues("GetWorkspaceAgentLogSourcesByAgentIDs").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetWorkspaceAgentLogsAfter(ctx context.Context, arg database.GetWorkspaceAgentLogsAfterParams) ([]database.WorkspaceAgentLog, error) { + start := time.Now() + r0, r1 := m.s.GetWorkspaceAgentLogsAfter(ctx, arg) + m.queryLatencies.WithLabelValues("GetWorkspaceAgentLogsAfter").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetWorkspaceAgentMetadata(ctx context.Context, workspaceAgentID database.GetWorkspaceAgentMetadataParams) ([]database.WorkspaceAgentMetadatum, error) { + start := time.Now() + metadata, err := m.s.GetWorkspaceAgentMetadata(ctx, workspaceAgentID) + m.queryLatencies.WithLabelValues("GetWorkspaceAgentMetadata").Observe(time.Since(start).Seconds()) + return metadata, err +} + +func (m queryMetricsStore) GetWorkspaceAgentPortShare(ctx context.Context, arg database.GetWorkspaceAgentPortShareParams) (database.WorkspaceAgentPortShare, error) { + start := time.Now() + r0, r1 := m.s.GetWorkspaceAgentPortShare(ctx, arg) + m.queryLatencies.WithLabelValues("GetWorkspaceAgentPortShare").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetWorkspaceAgentScriptTimingsByBuildID(ctx context.Context, id uuid.UUID) ([]database.GetWorkspaceAgentScriptTimingsByBuildIDRow, error) { + start := time.Now() + r0, r1 := m.s.GetWorkspaceAgentScriptTimingsByBuildID(ctx, id) + m.queryLatencies.WithLabelValues("GetWorkspaceAgentScriptTimingsByBuildID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetWorkspaceAgentScriptsByAgentIDs(ctx context.Context, ids []uuid.UUID) ([]database.WorkspaceAgentScript, error) { + start := time.Now() + r0, r1 := m.s.GetWorkspaceAgentScriptsByAgentIDs(ctx, ids) + m.queryLatencies.WithLabelValues("GetWorkspaceAgentScriptsByAgentIDs").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetWorkspaceAgentStats(ctx context.Context, createdAt time.Time) ([]database.GetWorkspaceAgentStatsRow, error) { + start := time.Now() + stats, err := m.s.GetWorkspaceAgentStats(ctx, createdAt) + m.queryLatencies.WithLabelValues("GetWorkspaceAgentStats").Observe(time.Since(start).Seconds()) + return stats, err +} + +func (m queryMetricsStore) GetWorkspaceAgentStatsAndLabels(ctx context.Context, createdAt time.Time) ([]database.GetWorkspaceAgentStatsAndLabelsRow, error) { + start := time.Now() + stats, err := m.s.GetWorkspaceAgentStatsAndLabels(ctx, createdAt) + m.queryLatencies.WithLabelValues("GetWorkspaceAgentStatsAndLabels").Observe(time.Since(start).Seconds()) + return stats, err +} + +func (m queryMetricsStore) GetWorkspaceAgentUsageStats(ctx context.Context, createdAt time.Time) ([]database.GetWorkspaceAgentUsageStatsRow, error) { + start := time.Now() + r0, r1 := m.s.GetWorkspaceAgentUsageStats(ctx, createdAt) + m.queryLatencies.WithLabelValues("GetWorkspaceAgentUsageStats").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetWorkspaceAgentUsageStatsAndLabels(ctx context.Context, createdAt time.Time) ([]database.GetWorkspaceAgentUsageStatsAndLabelsRow, error) { + start := time.Now() + r0, r1 := m.s.GetWorkspaceAgentUsageStatsAndLabels(ctx, createdAt) + m.queryLatencies.WithLabelValues("GetWorkspaceAgentUsageStatsAndLabels").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetWorkspaceAgentsByResourceIDs(ctx context.Context, ids []uuid.UUID) ([]database.WorkspaceAgent, error) { + start := time.Now() + agents, err := m.s.GetWorkspaceAgentsByResourceIDs(ctx, ids) + m.queryLatencies.WithLabelValues("GetWorkspaceAgentsByResourceIDs").Observe(time.Since(start).Seconds()) + return agents, err +} + +func (m queryMetricsStore) GetWorkspaceAgentsCreatedAfter(ctx context.Context, createdAt time.Time) ([]database.WorkspaceAgent, error) { + start := time.Now() + agents, err := m.s.GetWorkspaceAgentsCreatedAfter(ctx, createdAt) + m.queryLatencies.WithLabelValues("GetWorkspaceAgentsCreatedAfter").Observe(time.Since(start).Seconds()) + return agents, err +} + +func (m queryMetricsStore) GetWorkspaceAgentsInLatestBuildByWorkspaceID(ctx context.Context, workspaceID uuid.UUID) ([]database.WorkspaceAgent, error) { + start := time.Now() + agents, err := m.s.GetWorkspaceAgentsInLatestBuildByWorkspaceID(ctx, workspaceID) + m.queryLatencies.WithLabelValues("GetWorkspaceAgentsInLatestBuildByWorkspaceID").Observe(time.Since(start).Seconds()) + return agents, err +} + +func (m queryMetricsStore) GetWorkspaceAppByAgentIDAndSlug(ctx context.Context, arg database.GetWorkspaceAppByAgentIDAndSlugParams) (database.WorkspaceApp, error) { + start := time.Now() + app, err := m.s.GetWorkspaceAppByAgentIDAndSlug(ctx, arg) + m.queryLatencies.WithLabelValues("GetWorkspaceAppByAgentIDAndSlug").Observe(time.Since(start).Seconds()) + return app, err +} + +func (m queryMetricsStore) GetWorkspaceAppsByAgentID(ctx context.Context, agentID uuid.UUID) ([]database.WorkspaceApp, error) { + start := time.Now() + apps, err := m.s.GetWorkspaceAppsByAgentID(ctx, agentID) + m.queryLatencies.WithLabelValues("GetWorkspaceAppsByAgentID").Observe(time.Since(start).Seconds()) + return apps, err +} + +func (m queryMetricsStore) GetWorkspaceAppsByAgentIDs(ctx context.Context, ids []uuid.UUID) ([]database.WorkspaceApp, error) { + start := time.Now() + apps, err := m.s.GetWorkspaceAppsByAgentIDs(ctx, ids) + m.queryLatencies.WithLabelValues("GetWorkspaceAppsByAgentIDs").Observe(time.Since(start).Seconds()) + return apps, err +} + +func (m queryMetricsStore) GetWorkspaceAppsCreatedAfter(ctx context.Context, createdAt time.Time) ([]database.WorkspaceApp, error) { + start := time.Now() + apps, err := m.s.GetWorkspaceAppsCreatedAfter(ctx, createdAt) + m.queryLatencies.WithLabelValues("GetWorkspaceAppsCreatedAfter").Observe(time.Since(start).Seconds()) + return apps, err +} + +func (m queryMetricsStore) GetWorkspaceBuildByID(ctx context.Context, id uuid.UUID) (database.WorkspaceBuild, error) { + start := time.Now() + build, err := m.s.GetWorkspaceBuildByID(ctx, id) + m.queryLatencies.WithLabelValues("GetWorkspaceBuildByID").Observe(time.Since(start).Seconds()) + return build, err +} + +func (m queryMetricsStore) GetWorkspaceBuildByJobID(ctx context.Context, jobID uuid.UUID) (database.WorkspaceBuild, error) { + start := time.Now() + build, err := m.s.GetWorkspaceBuildByJobID(ctx, jobID) + m.queryLatencies.WithLabelValues("GetWorkspaceBuildByJobID").Observe(time.Since(start).Seconds()) + return build, err +} + +func (m queryMetricsStore) GetWorkspaceBuildByWorkspaceIDAndBuildNumber(ctx context.Context, arg database.GetWorkspaceBuildByWorkspaceIDAndBuildNumberParams) (database.WorkspaceBuild, error) { + start := time.Now() + build, err := m.s.GetWorkspaceBuildByWorkspaceIDAndBuildNumber(ctx, arg) + m.queryLatencies.WithLabelValues("GetWorkspaceBuildByWorkspaceIDAndBuildNumber").Observe(time.Since(start).Seconds()) + return build, err +} + +func (m queryMetricsStore) GetWorkspaceBuildParameters(ctx context.Context, workspaceBuildID uuid.UUID) ([]database.WorkspaceBuildParameter, error) { + start := time.Now() + params, err := m.s.GetWorkspaceBuildParameters(ctx, workspaceBuildID) + m.queryLatencies.WithLabelValues("GetWorkspaceBuildParameters").Observe(time.Since(start).Seconds()) + return params, err +} + +func (m queryMetricsStore) GetWorkspaceBuildStatsByTemplates(ctx context.Context, since time.Time) ([]database.GetWorkspaceBuildStatsByTemplatesRow, error) { + start := time.Now() + r0, r1 := m.s.GetWorkspaceBuildStatsByTemplates(ctx, since) + m.queryLatencies.WithLabelValues("GetWorkspaceBuildStatsByTemplates").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetWorkspaceBuildsByWorkspaceID(ctx context.Context, arg database.GetWorkspaceBuildsByWorkspaceIDParams) ([]database.WorkspaceBuild, error) { + start := time.Now() + builds, err := m.s.GetWorkspaceBuildsByWorkspaceID(ctx, arg) + m.queryLatencies.WithLabelValues("GetWorkspaceBuildsByWorkspaceID").Observe(time.Since(start).Seconds()) + return builds, err +} + +func (m queryMetricsStore) GetWorkspaceBuildsCreatedAfter(ctx context.Context, createdAt time.Time) ([]database.WorkspaceBuild, error) { + start := time.Now() + builds, err := m.s.GetWorkspaceBuildsCreatedAfter(ctx, createdAt) + m.queryLatencies.WithLabelValues("GetWorkspaceBuildsCreatedAfter").Observe(time.Since(start).Seconds()) + return builds, err +} + +func (m queryMetricsStore) GetWorkspaceByAgentID(ctx context.Context, agentID uuid.UUID) (database.Workspace, error) { + start := time.Now() + workspace, err := m.s.GetWorkspaceByAgentID(ctx, agentID) + m.queryLatencies.WithLabelValues("GetWorkspaceByAgentID").Observe(time.Since(start).Seconds()) + return workspace, err +} + +func (m queryMetricsStore) GetWorkspaceByID(ctx context.Context, id uuid.UUID) (database.Workspace, error) { + start := time.Now() + workspace, err := m.s.GetWorkspaceByID(ctx, id) + m.queryLatencies.WithLabelValues("GetWorkspaceByID").Observe(time.Since(start).Seconds()) + return workspace, err +} + +func (m queryMetricsStore) GetWorkspaceByOwnerIDAndName(ctx context.Context, arg database.GetWorkspaceByOwnerIDAndNameParams) (database.Workspace, error) { + start := time.Now() + workspace, err := m.s.GetWorkspaceByOwnerIDAndName(ctx, arg) + m.queryLatencies.WithLabelValues("GetWorkspaceByOwnerIDAndName").Observe(time.Since(start).Seconds()) + return workspace, err +} + +func (m queryMetricsStore) GetWorkspaceByWorkspaceAppID(ctx context.Context, workspaceAppID uuid.UUID) (database.Workspace, error) { + start := time.Now() + workspace, err := m.s.GetWorkspaceByWorkspaceAppID(ctx, workspaceAppID) + m.queryLatencies.WithLabelValues("GetWorkspaceByWorkspaceAppID").Observe(time.Since(start).Seconds()) + return workspace, err +} + +func (m queryMetricsStore) GetWorkspaceProxies(ctx context.Context) ([]database.WorkspaceProxy, error) { + start := time.Now() + proxies, err := m.s.GetWorkspaceProxies(ctx) + m.queryLatencies.WithLabelValues("GetWorkspaceProxies").Observe(time.Since(start).Seconds()) + return proxies, err +} + +func (m queryMetricsStore) GetWorkspaceProxyByHostname(ctx context.Context, arg database.GetWorkspaceProxyByHostnameParams) (database.WorkspaceProxy, error) { + start := time.Now() + proxy, err := m.s.GetWorkspaceProxyByHostname(ctx, arg) + m.queryLatencies.WithLabelValues("GetWorkspaceProxyByHostname").Observe(time.Since(start).Seconds()) + return proxy, err +} + +func (m queryMetricsStore) GetWorkspaceProxyByID(ctx context.Context, id uuid.UUID) (database.WorkspaceProxy, error) { + start := time.Now() + proxy, err := m.s.GetWorkspaceProxyByID(ctx, id) + m.queryLatencies.WithLabelValues("GetWorkspaceProxyByID").Observe(time.Since(start).Seconds()) + return proxy, err +} + +func (m queryMetricsStore) GetWorkspaceProxyByName(ctx context.Context, name string) (database.WorkspaceProxy, error) { + start := time.Now() + proxy, err := m.s.GetWorkspaceProxyByName(ctx, name) + m.queryLatencies.WithLabelValues("GetWorkspaceProxyByName").Observe(time.Since(start).Seconds()) + return proxy, err +} + +func (m queryMetricsStore) GetWorkspaceResourceByID(ctx context.Context, id uuid.UUID) (database.WorkspaceResource, error) { + start := time.Now() + resource, err := m.s.GetWorkspaceResourceByID(ctx, id) + m.queryLatencies.WithLabelValues("GetWorkspaceResourceByID").Observe(time.Since(start).Seconds()) + return resource, err +} + +func (m queryMetricsStore) GetWorkspaceResourceMetadataByResourceIDs(ctx context.Context, ids []uuid.UUID) ([]database.WorkspaceResourceMetadatum, error) { + start := time.Now() + metadata, err := m.s.GetWorkspaceResourceMetadataByResourceIDs(ctx, ids) + m.queryLatencies.WithLabelValues("GetWorkspaceResourceMetadataByResourceIDs").Observe(time.Since(start).Seconds()) + return metadata, err +} + +func (m queryMetricsStore) GetWorkspaceResourceMetadataCreatedAfter(ctx context.Context, createdAt time.Time) ([]database.WorkspaceResourceMetadatum, error) { + start := time.Now() + metadata, err := m.s.GetWorkspaceResourceMetadataCreatedAfter(ctx, createdAt) + m.queryLatencies.WithLabelValues("GetWorkspaceResourceMetadataCreatedAfter").Observe(time.Since(start).Seconds()) + return metadata, err +} + +func (m queryMetricsStore) GetWorkspaceResourcesByJobID(ctx context.Context, jobID uuid.UUID) ([]database.WorkspaceResource, error) { + start := time.Now() + resources, err := m.s.GetWorkspaceResourcesByJobID(ctx, jobID) + m.queryLatencies.WithLabelValues("GetWorkspaceResourcesByJobID").Observe(time.Since(start).Seconds()) + return resources, err +} + +func (m queryMetricsStore) GetWorkspaceResourcesByJobIDs(ctx context.Context, ids []uuid.UUID) ([]database.WorkspaceResource, error) { + start := time.Now() + resources, err := m.s.GetWorkspaceResourcesByJobIDs(ctx, ids) + m.queryLatencies.WithLabelValues("GetWorkspaceResourcesByJobIDs").Observe(time.Since(start).Seconds()) + return resources, err +} + +func (m queryMetricsStore) GetWorkspaceResourcesCreatedAfter(ctx context.Context, createdAt time.Time) ([]database.WorkspaceResource, error) { + start := time.Now() + resources, err := m.s.GetWorkspaceResourcesCreatedAfter(ctx, createdAt) + m.queryLatencies.WithLabelValues("GetWorkspaceResourcesCreatedAfter").Observe(time.Since(start).Seconds()) + return resources, err +} + +func (m queryMetricsStore) GetWorkspaceUniqueOwnerCountByTemplateIDs(ctx context.Context, templateIds []uuid.UUID) ([]database.GetWorkspaceUniqueOwnerCountByTemplateIDsRow, error) { + start := time.Now() + r0, r1 := m.s.GetWorkspaceUniqueOwnerCountByTemplateIDs(ctx, templateIds) + m.queryLatencies.WithLabelValues("GetWorkspaceUniqueOwnerCountByTemplateIDs").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetWorkspaces(ctx context.Context, arg database.GetWorkspacesParams) ([]database.GetWorkspacesRow, error) { + start := time.Now() + workspaces, err := m.s.GetWorkspaces(ctx, arg) + m.queryLatencies.WithLabelValues("GetWorkspaces").Observe(time.Since(start).Seconds()) + return workspaces, err +} + +func (m queryMetricsStore) GetWorkspacesEligibleForTransition(ctx context.Context, now time.Time) ([]database.WorkspaceTable, error) { + start := time.Now() + workspaces, err := m.s.GetWorkspacesEligibleForTransition(ctx, now) + m.queryLatencies.WithLabelValues("GetWorkspacesEligibleForAutoStartStop").Observe(time.Since(start).Seconds()) + return workspaces, err +} + +func (m queryMetricsStore) InsertAPIKey(ctx context.Context, arg database.InsertAPIKeyParams) (database.APIKey, error) { + start := time.Now() + key, err := m.s.InsertAPIKey(ctx, arg) + m.queryLatencies.WithLabelValues("InsertAPIKey").Observe(time.Since(start).Seconds()) + return key, err +} + +func (m queryMetricsStore) InsertAllUsersGroup(ctx context.Context, organizationID uuid.UUID) (database.Group, error) { + start := time.Now() + group, err := m.s.InsertAllUsersGroup(ctx, organizationID) + m.queryLatencies.WithLabelValues("InsertAllUsersGroup").Observe(time.Since(start).Seconds()) + return group, err +} + +func (m queryMetricsStore) InsertAuditLog(ctx context.Context, arg database.InsertAuditLogParams) (database.AuditLog, error) { + start := time.Now() + log, err := m.s.InsertAuditLog(ctx, arg) + m.queryLatencies.WithLabelValues("InsertAuditLog").Observe(time.Since(start).Seconds()) + return log, err +} + +func (m queryMetricsStore) InsertCryptoKey(ctx context.Context, arg database.InsertCryptoKeyParams) (database.CryptoKey, error) { + start := time.Now() + key, err := m.s.InsertCryptoKey(ctx, arg) + m.queryLatencies.WithLabelValues("InsertCryptoKey").Observe(time.Since(start).Seconds()) + return key, err +} + +func (m queryMetricsStore) InsertCustomRole(ctx context.Context, arg database.InsertCustomRoleParams) (database.CustomRole, error) { + start := time.Now() + r0, r1 := m.s.InsertCustomRole(ctx, arg) + m.queryLatencies.WithLabelValues("InsertCustomRole").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) InsertDBCryptKey(ctx context.Context, arg database.InsertDBCryptKeyParams) error { + start := time.Now() + r0 := m.s.InsertDBCryptKey(ctx, arg) + m.queryLatencies.WithLabelValues("InsertDBCryptKey").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) InsertDERPMeshKey(ctx context.Context, value string) error { + start := time.Now() + err := m.s.InsertDERPMeshKey(ctx, value) + m.queryLatencies.WithLabelValues("InsertDERPMeshKey").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) InsertDeploymentID(ctx context.Context, value string) error { + start := time.Now() + err := m.s.InsertDeploymentID(ctx, value) + m.queryLatencies.WithLabelValues("InsertDeploymentID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) InsertExternalAuthLink(ctx context.Context, arg database.InsertExternalAuthLinkParams) (database.ExternalAuthLink, error) { + start := time.Now() + link, err := m.s.InsertExternalAuthLink(ctx, arg) + m.queryLatencies.WithLabelValues("InsertExternalAuthLink").Observe(time.Since(start).Seconds()) + return link, err +} + +func (m queryMetricsStore) InsertFile(ctx context.Context, arg database.InsertFileParams) (database.File, error) { + start := time.Now() + file, err := m.s.InsertFile(ctx, arg) + m.queryLatencies.WithLabelValues("InsertFile").Observe(time.Since(start).Seconds()) + return file, err +} + +func (m queryMetricsStore) InsertGitSSHKey(ctx context.Context, arg database.InsertGitSSHKeyParams) (database.GitSSHKey, error) { + start := time.Now() + key, err := m.s.InsertGitSSHKey(ctx, arg) + m.queryLatencies.WithLabelValues("InsertGitSSHKey").Observe(time.Since(start).Seconds()) + return key, err +} + +func (m queryMetricsStore) InsertGroup(ctx context.Context, arg database.InsertGroupParams) (database.Group, error) { + start := time.Now() + group, err := m.s.InsertGroup(ctx, arg) + m.queryLatencies.WithLabelValues("InsertGroup").Observe(time.Since(start).Seconds()) + return group, err +} + +func (m queryMetricsStore) InsertGroupMember(ctx context.Context, arg database.InsertGroupMemberParams) error { + start := time.Now() + err := m.s.InsertGroupMember(ctx, arg) + m.queryLatencies.WithLabelValues("InsertGroupMember").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) InsertLicense(ctx context.Context, arg database.InsertLicenseParams) (database.License, error) { + start := time.Now() + license, err := m.s.InsertLicense(ctx, arg) + m.queryLatencies.WithLabelValues("InsertLicense").Observe(time.Since(start).Seconds()) + return license, err +} + +func (m queryMetricsStore) InsertMissingGroups(ctx context.Context, arg database.InsertMissingGroupsParams) ([]database.Group, error) { + start := time.Now() + r0, r1 := m.s.InsertMissingGroups(ctx, arg) + m.queryLatencies.WithLabelValues("InsertMissingGroups").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) InsertOAuth2ProviderApp(ctx context.Context, arg database.InsertOAuth2ProviderAppParams) (database.OAuth2ProviderApp, error) { + start := time.Now() + r0, r1 := m.s.InsertOAuth2ProviderApp(ctx, arg) + m.queryLatencies.WithLabelValues("InsertOAuth2ProviderApp").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) InsertOAuth2ProviderAppCode(ctx context.Context, arg database.InsertOAuth2ProviderAppCodeParams) (database.OAuth2ProviderAppCode, error) { + start := time.Now() + r0, r1 := m.s.InsertOAuth2ProviderAppCode(ctx, arg) + m.queryLatencies.WithLabelValues("InsertOAuth2ProviderAppCode").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) InsertOAuth2ProviderAppSecret(ctx context.Context, arg database.InsertOAuth2ProviderAppSecretParams) (database.OAuth2ProviderAppSecret, error) { + start := time.Now() + r0, r1 := m.s.InsertOAuth2ProviderAppSecret(ctx, arg) + m.queryLatencies.WithLabelValues("InsertOAuth2ProviderAppSecret").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) InsertOAuth2ProviderAppToken(ctx context.Context, arg database.InsertOAuth2ProviderAppTokenParams) (database.OAuth2ProviderAppToken, error) { + start := time.Now() + r0, r1 := m.s.InsertOAuth2ProviderAppToken(ctx, arg) + m.queryLatencies.WithLabelValues("InsertOAuth2ProviderAppToken").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) InsertOrganization(ctx context.Context, arg database.InsertOrganizationParams) (database.Organization, error) { + start := time.Now() + organization, err := m.s.InsertOrganization(ctx, arg) + m.queryLatencies.WithLabelValues("InsertOrganization").Observe(time.Since(start).Seconds()) + return organization, err +} + +func (m queryMetricsStore) InsertOrganizationMember(ctx context.Context, arg database.InsertOrganizationMemberParams) (database.OrganizationMember, error) { + start := time.Now() + member, err := m.s.InsertOrganizationMember(ctx, arg) + m.queryLatencies.WithLabelValues("InsertOrganizationMember").Observe(time.Since(start).Seconds()) + return member, err +} + +func (m queryMetricsStore) InsertProvisionerJob(ctx context.Context, arg database.InsertProvisionerJobParams) (database.ProvisionerJob, error) { + start := time.Now() + job, err := m.s.InsertProvisionerJob(ctx, arg) + m.queryLatencies.WithLabelValues("InsertProvisionerJob").Observe(time.Since(start).Seconds()) + return job, err +} + +func (m queryMetricsStore) InsertProvisionerJobLogs(ctx context.Context, arg database.InsertProvisionerJobLogsParams) ([]database.ProvisionerJobLog, error) { + start := time.Now() + logs, err := m.s.InsertProvisionerJobLogs(ctx, arg) + m.queryLatencies.WithLabelValues("InsertProvisionerJobLogs").Observe(time.Since(start).Seconds()) + return logs, err +} + +func (m queryMetricsStore) InsertProvisionerJobTimings(ctx context.Context, arg database.InsertProvisionerJobTimingsParams) ([]database.ProvisionerJobTiming, error) { + start := time.Now() + r0, r1 := m.s.InsertProvisionerJobTimings(ctx, arg) + m.queryLatencies.WithLabelValues("InsertProvisionerJobTimings").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) InsertProvisionerKey(ctx context.Context, arg database.InsertProvisionerKeyParams) (database.ProvisionerKey, error) { + start := time.Now() + r0, r1 := m.s.InsertProvisionerKey(ctx, arg) + m.queryLatencies.WithLabelValues("InsertProvisionerKey").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) InsertReplica(ctx context.Context, arg database.InsertReplicaParams) (database.Replica, error) { + start := time.Now() + replica, err := m.s.InsertReplica(ctx, arg) + m.queryLatencies.WithLabelValues("InsertReplica").Observe(time.Since(start).Seconds()) + return replica, err +} + +func (m queryMetricsStore) InsertTemplate(ctx context.Context, arg database.InsertTemplateParams) error { + start := time.Now() + err := m.s.InsertTemplate(ctx, arg) + m.queryLatencies.WithLabelValues("InsertTemplate").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) InsertTemplateVersion(ctx context.Context, arg database.InsertTemplateVersionParams) error { + start := time.Now() + err := m.s.InsertTemplateVersion(ctx, arg) + m.queryLatencies.WithLabelValues("InsertTemplateVersion").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) InsertTemplateVersionParameter(ctx context.Context, arg database.InsertTemplateVersionParameterParams) (database.TemplateVersionParameter, error) { + start := time.Now() + parameter, err := m.s.InsertTemplateVersionParameter(ctx, arg) + m.queryLatencies.WithLabelValues("InsertTemplateVersionParameter").Observe(time.Since(start).Seconds()) + return parameter, err +} + +func (m queryMetricsStore) InsertTemplateVersionVariable(ctx context.Context, arg database.InsertTemplateVersionVariableParams) (database.TemplateVersionVariable, error) { + start := time.Now() + variable, err := m.s.InsertTemplateVersionVariable(ctx, arg) + m.queryLatencies.WithLabelValues("InsertTemplateVersionVariable").Observe(time.Since(start).Seconds()) + return variable, err +} + +func (m queryMetricsStore) InsertTemplateVersionWorkspaceTag(ctx context.Context, arg database.InsertTemplateVersionWorkspaceTagParams) (database.TemplateVersionWorkspaceTag, error) { + start := time.Now() + r0, r1 := m.s.InsertTemplateVersionWorkspaceTag(ctx, arg) + m.queryLatencies.WithLabelValues("InsertTemplateVersionWorkspaceTag").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) InsertUser(ctx context.Context, arg database.InsertUserParams) (database.User, error) { + start := time.Now() + user, err := m.s.InsertUser(ctx, arg) + m.queryLatencies.WithLabelValues("InsertUser").Observe(time.Since(start).Seconds()) + return user, err +} + +func (m queryMetricsStore) InsertUserGroupsByID(ctx context.Context, arg database.InsertUserGroupsByIDParams) ([]uuid.UUID, error) { + start := time.Now() + r0, r1 := m.s.InsertUserGroupsByID(ctx, arg) + m.queryLatencies.WithLabelValues("InsertUserGroupsByID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) InsertUserGroupsByName(ctx context.Context, arg database.InsertUserGroupsByNameParams) error { + start := time.Now() + err := m.s.InsertUserGroupsByName(ctx, arg) + m.queryLatencies.WithLabelValues("InsertUserGroupsByName").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) InsertUserLink(ctx context.Context, arg database.InsertUserLinkParams) (database.UserLink, error) { + start := time.Now() + link, err := m.s.InsertUserLink(ctx, arg) + m.queryLatencies.WithLabelValues("InsertUserLink").Observe(time.Since(start).Seconds()) + return link, err +} + +func (m queryMetricsStore) InsertWorkspace(ctx context.Context, arg database.InsertWorkspaceParams) (database.WorkspaceTable, error) { + start := time.Now() + workspace, err := m.s.InsertWorkspace(ctx, arg) + m.queryLatencies.WithLabelValues("InsertWorkspace").Observe(time.Since(start).Seconds()) + return workspace, err +} + +func (m queryMetricsStore) InsertWorkspaceAgent(ctx context.Context, arg database.InsertWorkspaceAgentParams) (database.WorkspaceAgent, error) { + start := time.Now() + agent, err := m.s.InsertWorkspaceAgent(ctx, arg) + m.queryLatencies.WithLabelValues("InsertWorkspaceAgent").Observe(time.Since(start).Seconds()) + return agent, err +} + +func (m queryMetricsStore) InsertWorkspaceAgentLogSources(ctx context.Context, arg database.InsertWorkspaceAgentLogSourcesParams) ([]database.WorkspaceAgentLogSource, error) { + start := time.Now() + r0, r1 := m.s.InsertWorkspaceAgentLogSources(ctx, arg) + m.queryLatencies.WithLabelValues("InsertWorkspaceAgentLogSources").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) InsertWorkspaceAgentLogs(ctx context.Context, arg database.InsertWorkspaceAgentLogsParams) ([]database.WorkspaceAgentLog, error) { + start := time.Now() + r0, r1 := m.s.InsertWorkspaceAgentLogs(ctx, arg) + m.queryLatencies.WithLabelValues("InsertWorkspaceAgentLogs").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) InsertWorkspaceAgentMetadata(ctx context.Context, arg database.InsertWorkspaceAgentMetadataParams) error { + start := time.Now() + err := m.s.InsertWorkspaceAgentMetadata(ctx, arg) + m.queryLatencies.WithLabelValues("InsertWorkspaceAgentMetadata").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) InsertWorkspaceAgentScriptTimings(ctx context.Context, arg database.InsertWorkspaceAgentScriptTimingsParams) (database.WorkspaceAgentScriptTiming, error) { + start := time.Now() + r0, r1 := m.s.InsertWorkspaceAgentScriptTimings(ctx, arg) + m.queryLatencies.WithLabelValues("InsertWorkspaceAgentScriptTimings").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) InsertWorkspaceAgentScripts(ctx context.Context, arg database.InsertWorkspaceAgentScriptsParams) ([]database.WorkspaceAgentScript, error) { + start := time.Now() + r0, r1 := m.s.InsertWorkspaceAgentScripts(ctx, arg) + m.queryLatencies.WithLabelValues("InsertWorkspaceAgentScripts").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) InsertWorkspaceAgentStats(ctx context.Context, arg database.InsertWorkspaceAgentStatsParams) error { + start := time.Now() + r0 := m.s.InsertWorkspaceAgentStats(ctx, arg) + m.queryLatencies.WithLabelValues("InsertWorkspaceAgentStats").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) InsertWorkspaceApp(ctx context.Context, arg database.InsertWorkspaceAppParams) (database.WorkspaceApp, error) { + start := time.Now() + app, err := m.s.InsertWorkspaceApp(ctx, arg) + m.queryLatencies.WithLabelValues("InsertWorkspaceApp").Observe(time.Since(start).Seconds()) + return app, err +} + +func (m queryMetricsStore) InsertWorkspaceAppStats(ctx context.Context, arg database.InsertWorkspaceAppStatsParams) error { + start := time.Now() + r0 := m.s.InsertWorkspaceAppStats(ctx, arg) + m.queryLatencies.WithLabelValues("InsertWorkspaceAppStats").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) InsertWorkspaceBuild(ctx context.Context, arg database.InsertWorkspaceBuildParams) error { + start := time.Now() + err := m.s.InsertWorkspaceBuild(ctx, arg) + m.queryLatencies.WithLabelValues("InsertWorkspaceBuild").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) InsertWorkspaceBuildParameters(ctx context.Context, arg database.InsertWorkspaceBuildParametersParams) error { + start := time.Now() + err := m.s.InsertWorkspaceBuildParameters(ctx, arg) + m.queryLatencies.WithLabelValues("InsertWorkspaceBuildParameters").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) InsertWorkspaceProxy(ctx context.Context, arg database.InsertWorkspaceProxyParams) (database.WorkspaceProxy, error) { + start := time.Now() + proxy, err := m.s.InsertWorkspaceProxy(ctx, arg) + m.queryLatencies.WithLabelValues("InsertWorkspaceProxy").Observe(time.Since(start).Seconds()) + return proxy, err +} + +func (m queryMetricsStore) InsertWorkspaceResource(ctx context.Context, arg database.InsertWorkspaceResourceParams) (database.WorkspaceResource, error) { + start := time.Now() + resource, err := m.s.InsertWorkspaceResource(ctx, arg) + m.queryLatencies.WithLabelValues("InsertWorkspaceResource").Observe(time.Since(start).Seconds()) + return resource, err +} + +func (m queryMetricsStore) InsertWorkspaceResourceMetadata(ctx context.Context, arg database.InsertWorkspaceResourceMetadataParams) ([]database.WorkspaceResourceMetadatum, error) { + start := time.Now() + metadata, err := m.s.InsertWorkspaceResourceMetadata(ctx, arg) + m.queryLatencies.WithLabelValues("InsertWorkspaceResourceMetadata").Observe(time.Since(start).Seconds()) + return metadata, err +} + +func (m queryMetricsStore) ListProvisionerKeysByOrganization(ctx context.Context, organizationID uuid.UUID) ([]database.ProvisionerKey, error) { + start := time.Now() + r0, r1 := m.s.ListProvisionerKeysByOrganization(ctx, organizationID) + m.queryLatencies.WithLabelValues("ListProvisionerKeysByOrganization").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) ListProvisionerKeysByOrganizationExcludeReserved(ctx context.Context, organizationID uuid.UUID) ([]database.ProvisionerKey, error) { + start := time.Now() + r0, r1 := m.s.ListProvisionerKeysByOrganizationExcludeReserved(ctx, organizationID) + m.queryLatencies.WithLabelValues("ListProvisionerKeysByOrganizationExcludeReserved").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) ListWorkspaceAgentPortShares(ctx context.Context, workspaceID uuid.UUID) ([]database.WorkspaceAgentPortShare, error) { + start := time.Now() + r0, r1 := m.s.ListWorkspaceAgentPortShares(ctx, workspaceID) + m.queryLatencies.WithLabelValues("ListWorkspaceAgentPortShares").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) OrganizationMembers(ctx context.Context, arg database.OrganizationMembersParams) ([]database.OrganizationMembersRow, error) { + start := time.Now() + r0, r1 := m.s.OrganizationMembers(ctx, arg) + m.queryLatencies.WithLabelValues("OrganizationMembers").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) ReduceWorkspaceAgentShareLevelToAuthenticatedByTemplate(ctx context.Context, templateID uuid.UUID) error { + start := time.Now() + r0 := m.s.ReduceWorkspaceAgentShareLevelToAuthenticatedByTemplate(ctx, templateID) + m.queryLatencies.WithLabelValues("ReduceWorkspaceAgentShareLevelToAuthenticatedByTemplate").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) RegisterWorkspaceProxy(ctx context.Context, arg database.RegisterWorkspaceProxyParams) (database.WorkspaceProxy, error) { + start := time.Now() + proxy, err := m.s.RegisterWorkspaceProxy(ctx, arg) + m.queryLatencies.WithLabelValues("RegisterWorkspaceProxy").Observe(time.Since(start).Seconds()) + return proxy, err +} + +func (m queryMetricsStore) RemoveUserFromAllGroups(ctx context.Context, userID uuid.UUID) error { + start := time.Now() + r0 := m.s.RemoveUserFromAllGroups(ctx, userID) + m.queryLatencies.WithLabelValues("RemoveUserFromAllGroups").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) RemoveUserFromGroups(ctx context.Context, arg database.RemoveUserFromGroupsParams) ([]uuid.UUID, error) { + start := time.Now() + r0, r1 := m.s.RemoveUserFromGroups(ctx, arg) + m.queryLatencies.WithLabelValues("RemoveUserFromGroups").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) RevokeDBCryptKey(ctx context.Context, activeKeyDigest string) error { + start := time.Now() + r0 := m.s.RevokeDBCryptKey(ctx, activeKeyDigest) + m.queryLatencies.WithLabelValues("RevokeDBCryptKey").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) TryAcquireLock(ctx context.Context, pgTryAdvisoryXactLock int64) (bool, error) { + start := time.Now() + ok, err := m.s.TryAcquireLock(ctx, pgTryAdvisoryXactLock) + m.queryLatencies.WithLabelValues("TryAcquireLock").Observe(time.Since(start).Seconds()) + return ok, err +} + +func (m queryMetricsStore) UnarchiveTemplateVersion(ctx context.Context, arg database.UnarchiveTemplateVersionParams) error { + start := time.Now() + r0 := m.s.UnarchiveTemplateVersion(ctx, arg) + m.queryLatencies.WithLabelValues("UnarchiveTemplateVersion").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UnfavoriteWorkspace(ctx context.Context, arg uuid.UUID) error { + start := time.Now() + r0 := m.s.UnfavoriteWorkspace(ctx, arg) + m.queryLatencies.WithLabelValues("UnfavoriteWorkspace").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpdateAPIKeyByID(ctx context.Context, arg database.UpdateAPIKeyByIDParams) error { + start := time.Now() + err := m.s.UpdateAPIKeyByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateAPIKeyByID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateCryptoKeyDeletesAt(ctx context.Context, arg database.UpdateCryptoKeyDeletesAtParams) (database.CryptoKey, error) { + start := time.Now() + key, err := m.s.UpdateCryptoKeyDeletesAt(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateCryptoKeyDeletesAt").Observe(time.Since(start).Seconds()) + return key, err +} + +func (m queryMetricsStore) UpdateCustomRole(ctx context.Context, arg database.UpdateCustomRoleParams) (database.CustomRole, error) { + start := time.Now() + r0, r1 := m.s.UpdateCustomRole(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateCustomRole").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) UpdateExternalAuthLink(ctx context.Context, arg database.UpdateExternalAuthLinkParams) (database.ExternalAuthLink, error) { + start := time.Now() + link, err := m.s.UpdateExternalAuthLink(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateExternalAuthLink").Observe(time.Since(start).Seconds()) + return link, err +} + +func (m queryMetricsStore) UpdateGitSSHKey(ctx context.Context, arg database.UpdateGitSSHKeyParams) (database.GitSSHKey, error) { + start := time.Now() + key, err := m.s.UpdateGitSSHKey(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateGitSSHKey").Observe(time.Since(start).Seconds()) + return key, err +} + +func (m queryMetricsStore) UpdateGroupByID(ctx context.Context, arg database.UpdateGroupByIDParams) (database.Group, error) { + start := time.Now() + group, err := m.s.UpdateGroupByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateGroupByID").Observe(time.Since(start).Seconds()) + return group, err +} + +func (m queryMetricsStore) UpdateInactiveUsersToDormant(ctx context.Context, lastSeenAfter database.UpdateInactiveUsersToDormantParams) ([]database.UpdateInactiveUsersToDormantRow, error) { + start := time.Now() + r0, r1 := m.s.UpdateInactiveUsersToDormant(ctx, lastSeenAfter) + m.queryLatencies.WithLabelValues("UpdateInactiveUsersToDormant").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) UpdateMemberRoles(ctx context.Context, arg database.UpdateMemberRolesParams) (database.OrganizationMember, error) { + start := time.Now() + member, err := m.s.UpdateMemberRoles(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateMemberRoles").Observe(time.Since(start).Seconds()) + return member, err +} + +func (m queryMetricsStore) UpdateNotificationTemplateMethodByID(ctx context.Context, arg database.UpdateNotificationTemplateMethodByIDParams) (database.NotificationTemplate, error) { + start := time.Now() + r0, r1 := m.s.UpdateNotificationTemplateMethodByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateNotificationTemplateMethodByID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) UpdateOAuth2ProviderAppByID(ctx context.Context, arg database.UpdateOAuth2ProviderAppByIDParams) (database.OAuth2ProviderApp, error) { + start := time.Now() + r0, r1 := m.s.UpdateOAuth2ProviderAppByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateOAuth2ProviderAppByID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) UpdateOAuth2ProviderAppSecretByID(ctx context.Context, arg database.UpdateOAuth2ProviderAppSecretByIDParams) (database.OAuth2ProviderAppSecret, error) { + start := time.Now() + r0, r1 := m.s.UpdateOAuth2ProviderAppSecretByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateOAuth2ProviderAppSecretByID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) UpdateOrganization(ctx context.Context, arg database.UpdateOrganizationParams) (database.Organization, error) { + start := time.Now() + r0, r1 := m.s.UpdateOrganization(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateOrganization").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) UpdateProvisionerDaemonLastSeenAt(ctx context.Context, arg database.UpdateProvisionerDaemonLastSeenAtParams) error { + start := time.Now() + r0 := m.s.UpdateProvisionerDaemonLastSeenAt(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateProvisionerDaemonLastSeenAt").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpdateProvisionerJobByID(ctx context.Context, arg database.UpdateProvisionerJobByIDParams) error { + start := time.Now() + err := m.s.UpdateProvisionerJobByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateProvisionerJobByID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateProvisionerJobWithCancelByID(ctx context.Context, arg database.UpdateProvisionerJobWithCancelByIDParams) error { + start := time.Now() + err := m.s.UpdateProvisionerJobWithCancelByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateProvisionerJobWithCancelByID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateProvisionerJobWithCompleteByID(ctx context.Context, arg database.UpdateProvisionerJobWithCompleteByIDParams) error { + start := time.Now() + err := m.s.UpdateProvisionerJobWithCompleteByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateProvisionerJobWithCompleteByID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateReplica(ctx context.Context, arg database.UpdateReplicaParams) (database.Replica, error) { + start := time.Now() + replica, err := m.s.UpdateReplica(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateReplica").Observe(time.Since(start).Seconds()) + return replica, err +} + +func (m queryMetricsStore) UpdateTailnetPeerStatusByCoordinator(ctx context.Context, arg database.UpdateTailnetPeerStatusByCoordinatorParams) error { + start := time.Now() + r0 := m.s.UpdateTailnetPeerStatusByCoordinator(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateTailnetPeerStatusByCoordinator").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpdateTemplateACLByID(ctx context.Context, arg database.UpdateTemplateACLByIDParams) error { + start := time.Now() + err := m.s.UpdateTemplateACLByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateTemplateACLByID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateTemplateAccessControlByID(ctx context.Context, arg database.UpdateTemplateAccessControlByIDParams) error { + start := time.Now() + r0 := m.s.UpdateTemplateAccessControlByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateTemplateAccessControlByID").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpdateTemplateActiveVersionByID(ctx context.Context, arg database.UpdateTemplateActiveVersionByIDParams) error { + start := time.Now() + err := m.s.UpdateTemplateActiveVersionByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateTemplateActiveVersionByID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateTemplateDeletedByID(ctx context.Context, arg database.UpdateTemplateDeletedByIDParams) error { + start := time.Now() + err := m.s.UpdateTemplateDeletedByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateTemplateDeletedByID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateTemplateMetaByID(ctx context.Context, arg database.UpdateTemplateMetaByIDParams) error { + start := time.Now() + err := m.s.UpdateTemplateMetaByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateTemplateMetaByID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateTemplateScheduleByID(ctx context.Context, arg database.UpdateTemplateScheduleByIDParams) error { + start := time.Now() + err := m.s.UpdateTemplateScheduleByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateTemplateScheduleByID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateTemplateVersionByID(ctx context.Context, arg database.UpdateTemplateVersionByIDParams) error { + start := time.Now() + err := m.s.UpdateTemplateVersionByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateTemplateVersionByID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateTemplateVersionDescriptionByJobID(ctx context.Context, arg database.UpdateTemplateVersionDescriptionByJobIDParams) error { + start := time.Now() + err := m.s.UpdateTemplateVersionDescriptionByJobID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateTemplateVersionDescriptionByJobID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateTemplateVersionExternalAuthProvidersByJobID(ctx context.Context, arg database.UpdateTemplateVersionExternalAuthProvidersByJobIDParams) error { + start := time.Now() + err := m.s.UpdateTemplateVersionExternalAuthProvidersByJobID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateTemplateVersionExternalAuthProvidersByJobID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateTemplateWorkspacesLastUsedAt(ctx context.Context, arg database.UpdateTemplateWorkspacesLastUsedAtParams) error { + start := time.Now() + r0 := m.s.UpdateTemplateWorkspacesLastUsedAt(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateTemplateWorkspacesLastUsedAt").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpdateUserAppearanceSettings(ctx context.Context, arg database.UpdateUserAppearanceSettingsParams) (database.User, error) { + start := time.Now() + r0, r1 := m.s.UpdateUserAppearanceSettings(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateUserAppearanceSettings").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) UpdateUserDeletedByID(ctx context.Context, id uuid.UUID) error { + start := time.Now() + r0 := m.s.UpdateUserDeletedByID(ctx, id) + m.queryLatencies.WithLabelValues("UpdateUserDeletedByID").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpdateUserGithubComUserID(ctx context.Context, arg database.UpdateUserGithubComUserIDParams) error { + start := time.Now() + r0 := m.s.UpdateUserGithubComUserID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateUserGithubComUserID").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpdateUserHashedOneTimePasscode(ctx context.Context, arg database.UpdateUserHashedOneTimePasscodeParams) error { + start := time.Now() + r0 := m.s.UpdateUserHashedOneTimePasscode(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateUserHashedOneTimePasscode").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpdateUserHashedPassword(ctx context.Context, arg database.UpdateUserHashedPasswordParams) error { + start := time.Now() + err := m.s.UpdateUserHashedPassword(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateUserHashedPassword").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateUserLastSeenAt(ctx context.Context, arg database.UpdateUserLastSeenAtParams) (database.User, error) { + start := time.Now() + user, err := m.s.UpdateUserLastSeenAt(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateUserLastSeenAt").Observe(time.Since(start).Seconds()) + return user, err +} + +func (m queryMetricsStore) UpdateUserLink(ctx context.Context, arg database.UpdateUserLinkParams) (database.UserLink, error) { + start := time.Now() + link, err := m.s.UpdateUserLink(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateUserLink").Observe(time.Since(start).Seconds()) + return link, err +} + +func (m queryMetricsStore) UpdateUserLinkedID(ctx context.Context, arg database.UpdateUserLinkedIDParams) (database.UserLink, error) { + start := time.Now() + link, err := m.s.UpdateUserLinkedID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateUserLinkedID").Observe(time.Since(start).Seconds()) + return link, err +} + +func (m queryMetricsStore) UpdateUserLoginType(ctx context.Context, arg database.UpdateUserLoginTypeParams) (database.User, error) { + start := time.Now() + r0, r1 := m.s.UpdateUserLoginType(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateUserLoginType").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) UpdateUserNotificationPreferences(ctx context.Context, arg database.UpdateUserNotificationPreferencesParams) (int64, error) { + start := time.Now() + r0, r1 := m.s.UpdateUserNotificationPreferences(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateUserNotificationPreferences").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) UpdateUserProfile(ctx context.Context, arg database.UpdateUserProfileParams) (database.User, error) { + start := time.Now() + user, err := m.s.UpdateUserProfile(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateUserProfile").Observe(time.Since(start).Seconds()) + return user, err +} + +func (m queryMetricsStore) UpdateUserQuietHoursSchedule(ctx context.Context, arg database.UpdateUserQuietHoursScheduleParams) (database.User, error) { + start := time.Now() + r0, r1 := m.s.UpdateUserQuietHoursSchedule(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateUserQuietHoursSchedule").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) UpdateUserRoles(ctx context.Context, arg database.UpdateUserRolesParams) (database.User, error) { + start := time.Now() + user, err := m.s.UpdateUserRoles(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateUserRoles").Observe(time.Since(start).Seconds()) + return user, err +} + +func (m queryMetricsStore) UpdateUserStatus(ctx context.Context, arg database.UpdateUserStatusParams) (database.User, error) { + start := time.Now() + user, err := m.s.UpdateUserStatus(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateUserStatus").Observe(time.Since(start).Seconds()) + return user, err +} + +func (m queryMetricsStore) UpdateWorkspace(ctx context.Context, arg database.UpdateWorkspaceParams) (database.WorkspaceTable, error) { + start := time.Now() + workspace, err := m.s.UpdateWorkspace(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateWorkspace").Observe(time.Since(start).Seconds()) + return workspace, err +} + +func (m queryMetricsStore) UpdateWorkspaceAgentConnectionByID(ctx context.Context, arg database.UpdateWorkspaceAgentConnectionByIDParams) error { + start := time.Now() + err := m.s.UpdateWorkspaceAgentConnectionByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateWorkspaceAgentConnectionByID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateWorkspaceAgentLifecycleStateByID(ctx context.Context, arg database.UpdateWorkspaceAgentLifecycleStateByIDParams) error { + start := time.Now() + r0 := m.s.UpdateWorkspaceAgentLifecycleStateByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateWorkspaceAgentLifecycleStateByID").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpdateWorkspaceAgentLogOverflowByID(ctx context.Context, arg database.UpdateWorkspaceAgentLogOverflowByIDParams) error { + start := time.Now() + r0 := m.s.UpdateWorkspaceAgentLogOverflowByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateWorkspaceAgentLogOverflowByID").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpdateWorkspaceAgentMetadata(ctx context.Context, arg database.UpdateWorkspaceAgentMetadataParams) error { + start := time.Now() + err := m.s.UpdateWorkspaceAgentMetadata(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateWorkspaceAgentMetadata").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateWorkspaceAgentStartupByID(ctx context.Context, arg database.UpdateWorkspaceAgentStartupByIDParams) error { + start := time.Now() + err := m.s.UpdateWorkspaceAgentStartupByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateWorkspaceAgentStartupByID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateWorkspaceAppHealthByID(ctx context.Context, arg database.UpdateWorkspaceAppHealthByIDParams) error { + start := time.Now() + err := m.s.UpdateWorkspaceAppHealthByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateWorkspaceAppHealthByID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateWorkspaceAutomaticUpdates(ctx context.Context, arg database.UpdateWorkspaceAutomaticUpdatesParams) error { + start := time.Now() + r0 := m.s.UpdateWorkspaceAutomaticUpdates(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateWorkspaceAutomaticUpdates").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpdateWorkspaceAutostart(ctx context.Context, arg database.UpdateWorkspaceAutostartParams) error { + start := time.Now() + err := m.s.UpdateWorkspaceAutostart(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateWorkspaceAutostart").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateWorkspaceBuildCostByID(ctx context.Context, arg database.UpdateWorkspaceBuildCostByIDParams) error { + start := time.Now() + err := m.s.UpdateWorkspaceBuildCostByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateWorkspaceBuildCostByID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateWorkspaceBuildDeadlineByID(ctx context.Context, arg database.UpdateWorkspaceBuildDeadlineByIDParams) error { + start := time.Now() + r0 := m.s.UpdateWorkspaceBuildDeadlineByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateWorkspaceBuildDeadlineByID").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpdateWorkspaceBuildProvisionerStateByID(ctx context.Context, arg database.UpdateWorkspaceBuildProvisionerStateByIDParams) error { + start := time.Now() + r0 := m.s.UpdateWorkspaceBuildProvisionerStateByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateWorkspaceBuildProvisionerStateByID").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpdateWorkspaceDeletedByID(ctx context.Context, arg database.UpdateWorkspaceDeletedByIDParams) error { + start := time.Now() + err := m.s.UpdateWorkspaceDeletedByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateWorkspaceDeletedByID").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateWorkspaceDormantDeletingAt(ctx context.Context, arg database.UpdateWorkspaceDormantDeletingAtParams) (database.WorkspaceTable, error) { + start := time.Now() + ws, r0 := m.s.UpdateWorkspaceDormantDeletingAt(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateWorkspaceDormantDeletingAt").Observe(time.Since(start).Seconds()) + return ws, r0 +} + +func (m queryMetricsStore) UpdateWorkspaceLastUsedAt(ctx context.Context, arg database.UpdateWorkspaceLastUsedAtParams) error { + start := time.Now() + err := m.s.UpdateWorkspaceLastUsedAt(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateWorkspaceLastUsedAt").Observe(time.Since(start).Seconds()) + return err +} + +func (m queryMetricsStore) UpdateWorkspaceProxy(ctx context.Context, arg database.UpdateWorkspaceProxyParams) (database.WorkspaceProxy, error) { + start := time.Now() + proxy, err := m.s.UpdateWorkspaceProxy(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateWorkspaceProxy").Observe(time.Since(start).Seconds()) + return proxy, err +} + +func (m queryMetricsStore) UpdateWorkspaceProxyDeleted(ctx context.Context, arg database.UpdateWorkspaceProxyDeletedParams) error { + start := time.Now() + r0 := m.s.UpdateWorkspaceProxyDeleted(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateWorkspaceProxyDeleted").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpdateWorkspaceTTL(ctx context.Context, arg database.UpdateWorkspaceTTLParams) error { + start := time.Now() + r0 := m.s.UpdateWorkspaceTTL(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateWorkspaceTTL").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpdateWorkspacesDormantDeletingAtByTemplateID(ctx context.Context, arg database.UpdateWorkspacesDormantDeletingAtByTemplateIDParams) ([]database.WorkspaceTable, error) { + start := time.Now() + r0, r1 := m.s.UpdateWorkspacesDormantDeletingAtByTemplateID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateWorkspacesDormantDeletingAtByTemplateID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) UpsertAnnouncementBanners(ctx context.Context, value string) error { + start := time.Now() + r0 := m.s.UpsertAnnouncementBanners(ctx, value) + m.queryLatencies.WithLabelValues("UpsertAnnouncementBanners").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpsertAppSecurityKey(ctx context.Context, value string) error { + start := time.Now() + r0 := m.s.UpsertAppSecurityKey(ctx, value) + m.queryLatencies.WithLabelValues("UpsertAppSecurityKey").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpsertApplicationName(ctx context.Context, value string) error { + start := time.Now() + r0 := m.s.UpsertApplicationName(ctx, value) + m.queryLatencies.WithLabelValues("UpsertApplicationName").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpsertCoordinatorResumeTokenSigningKey(ctx context.Context, value string) error { + start := time.Now() + r0 := m.s.UpsertCoordinatorResumeTokenSigningKey(ctx, value) + m.queryLatencies.WithLabelValues("UpsertCoordinatorResumeTokenSigningKey").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpsertDefaultProxy(ctx context.Context, arg database.UpsertDefaultProxyParams) error { + start := time.Now() + r0 := m.s.UpsertDefaultProxy(ctx, arg) + m.queryLatencies.WithLabelValues("UpsertDefaultProxy").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpsertHealthSettings(ctx context.Context, value string) error { + start := time.Now() + r0 := m.s.UpsertHealthSettings(ctx, value) + m.queryLatencies.WithLabelValues("UpsertHealthSettings").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpsertJFrogXrayScanByWorkspaceAndAgentID(ctx context.Context, arg database.UpsertJFrogXrayScanByWorkspaceAndAgentIDParams) error { + start := time.Now() + r0 := m.s.UpsertJFrogXrayScanByWorkspaceAndAgentID(ctx, arg) + m.queryLatencies.WithLabelValues("UpsertJFrogXrayScanByWorkspaceAndAgentID").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpsertLastUpdateCheck(ctx context.Context, value string) error { + start := time.Now() + r0 := m.s.UpsertLastUpdateCheck(ctx, value) + m.queryLatencies.WithLabelValues("UpsertLastUpdateCheck").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpsertLogoURL(ctx context.Context, value string) error { + start := time.Now() + r0 := m.s.UpsertLogoURL(ctx, value) + m.queryLatencies.WithLabelValues("UpsertLogoURL").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpsertNotificationReportGeneratorLog(ctx context.Context, arg database.UpsertNotificationReportGeneratorLogParams) error { + start := time.Now() + r0 := m.s.UpsertNotificationReportGeneratorLog(ctx, arg) + m.queryLatencies.WithLabelValues("UpsertNotificationReportGeneratorLog").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpsertNotificationsSettings(ctx context.Context, value string) error { + start := time.Now() + r0 := m.s.UpsertNotificationsSettings(ctx, value) + m.queryLatencies.WithLabelValues("UpsertNotificationsSettings").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpsertOAuthSigningKey(ctx context.Context, value string) error { + start := time.Now() + r0 := m.s.UpsertOAuthSigningKey(ctx, value) + m.queryLatencies.WithLabelValues("UpsertOAuthSigningKey").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpsertProvisionerDaemon(ctx context.Context, arg database.UpsertProvisionerDaemonParams) (database.ProvisionerDaemon, error) { + start := time.Now() + r0, r1 := m.s.UpsertProvisionerDaemon(ctx, arg) + m.queryLatencies.WithLabelValues("UpsertProvisionerDaemon").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) UpsertRuntimeConfig(ctx context.Context, arg database.UpsertRuntimeConfigParams) error { + start := time.Now() + r0 := m.s.UpsertRuntimeConfig(ctx, arg) + m.queryLatencies.WithLabelValues("UpsertRuntimeConfig").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpsertTailnetAgent(ctx context.Context, arg database.UpsertTailnetAgentParams) (database.TailnetAgent, error) { + start := time.Now() + r0, r1 := m.s.UpsertTailnetAgent(ctx, arg) + m.queryLatencies.WithLabelValues("UpsertTailnetAgent").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) UpsertTailnetClient(ctx context.Context, arg database.UpsertTailnetClientParams) (database.TailnetClient, error) { + start := time.Now() + r0, r1 := m.s.UpsertTailnetClient(ctx, arg) + m.queryLatencies.WithLabelValues("UpsertTailnetClient").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) UpsertTailnetClientSubscription(ctx context.Context, arg database.UpsertTailnetClientSubscriptionParams) error { + start := time.Now() + r0 := m.s.UpsertTailnetClientSubscription(ctx, arg) + m.queryLatencies.WithLabelValues("UpsertTailnetClientSubscription").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpsertTailnetCoordinator(ctx context.Context, id uuid.UUID) (database.TailnetCoordinator, error) { + start := time.Now() + r0, r1 := m.s.UpsertTailnetCoordinator(ctx, id) + m.queryLatencies.WithLabelValues("UpsertTailnetCoordinator").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) UpsertTailnetPeer(ctx context.Context, arg database.UpsertTailnetPeerParams) (database.TailnetPeer, error) { + start := time.Now() + r0, r1 := m.s.UpsertTailnetPeer(ctx, arg) + m.queryLatencies.WithLabelValues("UpsertTailnetPeer").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) UpsertTailnetTunnel(ctx context.Context, arg database.UpsertTailnetTunnelParams) (database.TailnetTunnel, error) { + start := time.Now() + r0, r1 := m.s.UpsertTailnetTunnel(ctx, arg) + m.queryLatencies.WithLabelValues("UpsertTailnetTunnel").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) UpsertTemplateUsageStats(ctx context.Context) error { + start := time.Now() + r0 := m.s.UpsertTemplateUsageStats(ctx) + m.queryLatencies.WithLabelValues("UpsertTemplateUsageStats").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpsertWorkspaceAgentPortShare(ctx context.Context, arg database.UpsertWorkspaceAgentPortShareParams) (database.WorkspaceAgentPortShare, error) { + start := time.Now() + r0, r1 := m.s.UpsertWorkspaceAgentPortShare(ctx, arg) + m.queryLatencies.WithLabelValues("UpsertWorkspaceAgentPortShare").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetAuthorizedTemplates(ctx context.Context, arg database.GetTemplatesWithFilterParams, prepared rbac.PreparedAuthorized) ([]database.Template, error) { + start := time.Now() + templates, err := m.s.GetAuthorizedTemplates(ctx, arg, prepared) + m.queryLatencies.WithLabelValues("GetAuthorizedTemplates").Observe(time.Since(start).Seconds()) + return templates, err +} + +func (m queryMetricsStore) GetTemplateGroupRoles(ctx context.Context, id uuid.UUID) ([]database.TemplateGroup, error) { + start := time.Now() + roles, err := m.s.GetTemplateGroupRoles(ctx, id) + m.queryLatencies.WithLabelValues("GetTemplateGroupRoles").Observe(time.Since(start).Seconds()) + return roles, err +} + +func (m queryMetricsStore) GetTemplateUserRoles(ctx context.Context, id uuid.UUID) ([]database.TemplateUser, error) { + start := time.Now() + roles, err := m.s.GetTemplateUserRoles(ctx, id) + m.queryLatencies.WithLabelValues("GetTemplateUserRoles").Observe(time.Since(start).Seconds()) + return roles, err +} + +func (m queryMetricsStore) GetAuthorizedWorkspaces(ctx context.Context, arg database.GetWorkspacesParams, prepared rbac.PreparedAuthorized) ([]database.GetWorkspacesRow, error) { + start := time.Now() + workspaces, err := m.s.GetAuthorizedWorkspaces(ctx, arg, prepared) + m.queryLatencies.WithLabelValues("GetAuthorizedWorkspaces").Observe(time.Since(start).Seconds()) + return workspaces, err +} + +func (m queryMetricsStore) GetAuthorizedUsers(ctx context.Context, arg database.GetUsersParams, prepared rbac.PreparedAuthorized) ([]database.GetUsersRow, error) { + start := time.Now() + r0, r1 := m.s.GetAuthorizedUsers(ctx, arg, prepared) + m.queryLatencies.WithLabelValues("GetAuthorizedUsers").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetAuthorizedAuditLogsOffset(ctx context.Context, arg database.GetAuditLogsOffsetParams, prepared rbac.PreparedAuthorized) ([]database.GetAuditLogsOffsetRow, error) { + start := time.Now() + r0, r1 := m.s.GetAuthorizedAuditLogsOffset(ctx, arg, prepared) + m.queryLatencies.WithLabelValues("GetAuthorizedAuditLogsOffset").Observe(time.Since(start).Seconds()) + return r0, r1 +} diff --git a/coderd/database/dbmock/dbmock.go b/coderd/database/dbmock/dbmock.go index b3c7b9e7615d3..ffc9ab79f777e 100644 --- a/coderd/database/dbmock/dbmock.go +++ b/coderd/database/dbmock/dbmock.go @@ -11,7 +11,6 @@ package dbmock import ( context "context" - sql "database/sql" reflect "reflect" time "time" @@ -3489,7 +3488,7 @@ func (mr *MockStoreMockRecorder) GetWorkspacesEligibleForTransition(arg0, arg1 a } // InTx mocks base method. -func (m *MockStore) InTx(arg0 func(database.Store) error, arg1 *sql.TxOptions) error { +func (m *MockStore) InTx(arg0 func(database.Store) error, arg1 *database.TxOptions) error { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "InTx", arg0, arg1) ret0, _ := ret[0].(error) diff --git a/coderd/database/dbpurge/dbpurge.go b/coderd/database/dbpurge/dbpurge.go index 00244cfd63533..e9c22611f1879 100644 --- a/coderd/database/dbpurge/dbpurge.go +++ b/coderd/database/dbpurge/dbpurge.go @@ -66,7 +66,7 @@ func New(ctx context.Context, logger slog.Logger, db database.Store, clk quartz. logger.Info(ctx, "purged old database entries", slog.F("duration", clk.Since(start))) return nil - }, nil); err != nil { + }, database.DefaultTXOptions().WithID("db_purge")); err != nil { logger.Error(ctx, "failed to purge old database entries", slog.Error(err)) return } diff --git a/coderd/database/dbrollup/dbrollup.go b/coderd/database/dbrollup/dbrollup.go index 36eddc41fc544..c6b61c587580e 100644 --- a/coderd/database/dbrollup/dbrollup.go +++ b/coderd/database/dbrollup/dbrollup.go @@ -108,7 +108,7 @@ func (r *Rolluper) start(ctx context.Context) { ev.TemplateUsageStats = true return tx.UpsertTemplateUsageStats(ctx) - }, nil) + }, database.DefaultTXOptions().WithID("db_rollup")) }) err := eg.Wait() diff --git a/coderd/database/dbrollup/dbrollup_test.go b/coderd/database/dbrollup/dbrollup_test.go index 0c32ddc9a9c9a..6d541dd66969b 100644 --- a/coderd/database/dbrollup/dbrollup_test.go +++ b/coderd/database/dbrollup/dbrollup_test.go @@ -38,7 +38,7 @@ type wrapUpsertDB struct { resume <-chan struct{} } -func (w *wrapUpsertDB) InTx(fn func(database.Store) error, opts *sql.TxOptions) error { +func (w *wrapUpsertDB) InTx(fn func(database.Store) error, opts *database.TxOptions) error { return w.Store.InTx(func(tx database.Store) error { return fn(&wrapUpsertDB{Store: tx, resume: w.resume}) }, opts) diff --git a/coderd/database/tx.go b/coderd/database/tx.go index 43da15f3f058c..32a25753513ed 100644 --- a/coderd/database/tx.go +++ b/coderd/database/tx.go @@ -33,7 +33,7 @@ func ReadModifyUpdate(db Store, f func(tx Store) error, ) error { var err error for retries := 0; retries < maxRetries; retries++ { - err = db.InTx(f, &sql.TxOptions{ + err = db.InTx(f, &TxOptions{ Isolation: sql.LevelRepeatableRead, }) var pqe *pq.Error diff --git a/coderd/database/tx_test.go b/coderd/database/tx_test.go index d97c1bc26d57f..5f051085188ca 100644 --- a/coderd/database/tx_test.go +++ b/coderd/database/tx_test.go @@ -19,7 +19,7 @@ func TestReadModifyUpdate_OK(t *testing.T) { mDB := dbmock.NewMockStore(gomock.NewController(t)) mDB.EXPECT(). - InTx(gomock.Any(), &sql.TxOptions{Isolation: sql.LevelRepeatableRead}). + InTx(gomock.Any(), &database.TxOptions{Isolation: sql.LevelRepeatableRead}). Times(1). Return(nil) err := database.ReadModifyUpdate(mDB, func(tx database.Store) error { @@ -34,11 +34,11 @@ func TestReadModifyUpdate_RetryOK(t *testing.T) { mDB := dbmock.NewMockStore(gomock.NewController(t)) firstUpdate := mDB.EXPECT(). - InTx(gomock.Any(), &sql.TxOptions{Isolation: sql.LevelRepeatableRead}). + InTx(gomock.Any(), &database.TxOptions{Isolation: sql.LevelRepeatableRead}). Times(1). Return(&pq.Error{Code: pq.ErrorCode("40001")}) mDB.EXPECT(). - InTx(gomock.Any(), &sql.TxOptions{Isolation: sql.LevelRepeatableRead}). + InTx(gomock.Any(), &database.TxOptions{Isolation: sql.LevelRepeatableRead}). After(firstUpdate). Times(1). Return(nil) @@ -55,7 +55,7 @@ func TestReadModifyUpdate_HardError(t *testing.T) { mDB := dbmock.NewMockStore(gomock.NewController(t)) mDB.EXPECT(). - InTx(gomock.Any(), &sql.TxOptions{Isolation: sql.LevelRepeatableRead}). + InTx(gomock.Any(), &database.TxOptions{Isolation: sql.LevelRepeatableRead}). Times(1). Return(xerrors.New("a bad thing happened")) @@ -71,7 +71,7 @@ func TestReadModifyUpdate_TooManyRetries(t *testing.T) { mDB := dbmock.NewMockStore(gomock.NewController(t)) mDB.EXPECT(). - InTx(gomock.Any(), &sql.TxOptions{Isolation: sql.LevelRepeatableRead}). + InTx(gomock.Any(), &database.TxOptions{Isolation: sql.LevelRepeatableRead}). Times(5). Return(&pq.Error{Code: pq.ErrorCode("40001")}) err := database.ReadModifyUpdate(mDB, func(tx database.Store) error { diff --git a/coderd/idpsync/role_test.go b/coderd/idpsync/role_test.go index c6ab989881976..45e9edd6c1dd4 100644 --- a/coderd/idpsync/role_test.go +++ b/coderd/idpsync/role_test.go @@ -2,7 +2,6 @@ package idpsync_test import ( "context" - "database/sql" "encoding/json" "testing" @@ -324,7 +323,7 @@ func TestNoopNoDiff(t *testing.T) { // and 'UpdateMemberRoles'. mDB.EXPECT().InTx( gomock.Any(), gomock.Any(), - ).DoAndReturn(func(f func(database.Store) error, _ *sql.TxOptions) error { + ).DoAndReturn(func(f func(database.Store) error, _ *database.TxOptions) error { err := f(mDB) return err }) diff --git a/coderd/promoauth/oauth2_test.go b/coderd/promoauth/oauth2_test.go index e54608385ccfe..9e31d90944f36 100644 --- a/coderd/promoauth/oauth2_test.go +++ b/coderd/promoauth/oauth2_test.go @@ -3,24 +3,19 @@ package promoauth_test import ( "context" "fmt" - "io" "net/http" - "net/http/httptest" "net/url" "strings" "testing" "time" "github.com/prometheus/client_golang/prometheus" - "github.com/prometheus/client_golang/prometheus/promhttp" - ptestutil "github.com/prometheus/client_golang/prometheus/testutil" - io_prometheus_client "github.com/prometheus/client_model/go" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "golang.org/x/exp/maps" "golang.org/x/oauth2" "github.com/coder/coder/v2/coderd/coderdtest/oidctest" + "github.com/coder/coder/v2/coderd/coderdtest/promhelp" "github.com/coder/coder/v2/coderd/externalauth" "github.com/coder/coder/v2/coderd/promoauth" "github.com/coder/coder/v2/testutil" @@ -34,7 +29,7 @@ func TestInstrument(t *testing.T) { reg := prometheus.NewRegistry() t.Cleanup(func() { if t.Failed() { - t.Log(registryDump(reg)) + t.Log(promhelp.RegistryDump(reg)) } }) @@ -46,7 +41,7 @@ func TestInstrument(t *testing.T) { const metricname = "coderd_oauth2_external_requests_total" count := func(source string) int { labels["source"] = source - return counterValue(t, reg, "coderd_oauth2_external_requests_total", labels) + return promhelp.CounterValue(t, reg, "coderd_oauth2_external_requests_total", labels) } factory := promoauth.NewFactory(reg) @@ -58,7 +53,7 @@ func TestInstrument(t *testing.T) { } // 0 Requests before we start - require.Nil(t, metricValue(t, reg, metricname, labels), "no metrics at start") + require.Nil(t, promhelp.MetricValue(t, reg, metricname, labels), "no metrics at start") noClientCtx := ctx // This should never be done, but promoauth should not break the default client @@ -94,7 +89,7 @@ func TestInstrument(t *testing.T) { // Verify the default client was not broken. This check is added because we // extend the http.DefaultTransport. If a `.Clone()` is not done, this can be // mis-used. It is cheap to run this quick check. - snapshot := registryDump(reg) + snapshot := promhelp.RegistryDump(reg) req, err := http.NewRequestWithContext(ctx, http.MethodGet, must[*url.URL](t)(idp.IssuerURL().Parse("/.well-known/openid-configuration")).String(), nil) require.NoError(t, err) @@ -103,7 +98,7 @@ func TestInstrument(t *testing.T) { require.NoError(t, err) _ = resp.Body.Close() - require.NoError(t, compare(reg, snapshot), "http default client corrupted") + require.NoError(t, promhelp.Compare(reg, snapshot), "http default client corrupted") } func TestGithubRateLimits(t *testing.T) { @@ -214,37 +209,26 @@ func TestGithubRateLimits(t *testing.T) { } pass := true if !c.ExpectNoMetrics { - pass = pass && assert.Equal(t, gaugeValue(t, reg, "coderd_oauth2_external_requests_rate_limit_total", labels), c.Limit, "limit") - pass = pass && assert.Equal(t, gaugeValue(t, reg, "coderd_oauth2_external_requests_rate_limit_remaining", labels), c.Remaining, "remaining") - pass = pass && assert.Equal(t, gaugeValue(t, reg, "coderd_oauth2_external_requests_rate_limit_used", labels), c.Used, "used") + pass = pass && assert.Equal(t, promhelp.GaugeValue(t, reg, "coderd_oauth2_external_requests_rate_limit_total", labels), c.Limit, "limit") + pass = pass && assert.Equal(t, promhelp.GaugeValue(t, reg, "coderd_oauth2_external_requests_rate_limit_remaining", labels), c.Remaining, "remaining") + pass = pass && assert.Equal(t, promhelp.GaugeValue(t, reg, "coderd_oauth2_external_requests_rate_limit_used", labels), c.Used, "used") if !c.at.IsZero() { until := c.Reset.Sub(c.at) // Float accuracy is not great, so we allow a delta of 2 - pass = pass && assert.InDelta(t, gaugeValue(t, reg, "coderd_oauth2_external_requests_rate_limit_reset_in_seconds", labels), int(until.Seconds()), 2, "reset in") + pass = pass && assert.InDelta(t, promhelp.GaugeValue(t, reg, "coderd_oauth2_external_requests_rate_limit_reset_in_seconds", labels), int(until.Seconds()), 2, "reset in") } } else { - pass = pass && assert.Nil(t, metricValue(t, reg, "coderd_oauth2_external_requests_rate_limit_total", labels), "not exists") + pass = pass && assert.Nil(t, promhelp.MetricValue(t, reg, "coderd_oauth2_external_requests_rate_limit_total", labels), "not exists") } // Helpful debugging if !pass { - t.Log(registryDump(reg)) + t.Log(promhelp.RegistryDump(reg)) } }) } } -func registryDump(reg *prometheus.Registry) string { - h := promhttp.HandlerFor(reg, promhttp.HandlerOpts{}) - rec := httptest.NewRecorder() - req, _ := http.NewRequestWithContext(context.Background(), http.MethodGet, "/", nil) - h.ServeHTTP(rec, req) - resp := rec.Result() - data, _ := io.ReadAll(resp.Body) - _ = resp.Body.Close() - return string(data) -} - func must[V any](t *testing.T) func(v V, err error) V { return func(v V, err error) V { t.Helper() @@ -252,39 +236,3 @@ func must[V any](t *testing.T) func(v V, err error) V { return v } } - -func gaugeValue(t testing.TB, reg prometheus.Gatherer, metricName string, labels prometheus.Labels) int { - labeled := metricValue(t, reg, metricName, labels) - require.NotNilf(t, labeled, "metric %q with labels %v not found", metricName, labels) - return int(labeled.GetGauge().GetValue()) -} - -func counterValue(t testing.TB, reg prometheus.Gatherer, metricName string, labels prometheus.Labels) int { - labeled := metricValue(t, reg, metricName, labels) - require.NotNilf(t, labeled, "metric %q with labels %v not found", metricName, labels) - return int(labeled.GetCounter().GetValue()) -} - -func compare(reg prometheus.Gatherer, compare string) error { - return ptestutil.GatherAndCompare(reg, strings.NewReader(compare)) -} - -func metricValue(t testing.TB, reg prometheus.Gatherer, metricName string, labels prometheus.Labels) *io_prometheus_client.Metric { - metrics, err := reg.Gather() - require.NoError(t, err) - - for _, m := range metrics { - if m.GetName() == metricName { - for _, labeled := range m.GetMetric() { - mLables := make(prometheus.Labels) - for _, v := range labeled.GetLabel() { - mLables[v.GetName()] = v.GetValue() - } - if maps.Equal(mLables, labels) { - return labeled - } - } - } - } - return nil -} diff --git a/coderd/templates.go b/coderd/templates.go index cbc6eb784d2e4..de47b5225a973 100644 --- a/coderd/templates.go +++ b/coderd/templates.go @@ -467,7 +467,7 @@ func (api *API) postTemplateByOrganization(rw http.ResponseWriter, r *http.Reque templateVersionAudit.New = newTemplateVersion return nil - }, nil) + }, database.DefaultTXOptions().WithID("postTemplate")) if err != nil { httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ Message: "Internal error inserting template.", diff --git a/coderd/wsbuilder/wsbuilder_test.go b/coderd/wsbuilder/wsbuilder_test.go index ad53cd7d45609..dd532467bbc92 100644 --- a/coderd/wsbuilder/wsbuilder_test.go +++ b/coderd/wsbuilder/wsbuilder_test.go @@ -735,9 +735,9 @@ func expectDB(t *testing.T, opts ...txExpect) *dbmock.MockStore { // we expect to be run in a transaction; we use mTx to record the // "in transaction" calls. mDB.EXPECT().InTx( - gomock.Any(), gomock.Eq(&sql.TxOptions{Isolation: sql.LevelRepeatableRead}), + gomock.Any(), gomock.Eq(&database.TxOptions{Isolation: sql.LevelRepeatableRead}), ). - DoAndReturn(func(f func(database.Store) error, _ *sql.TxOptions) error { + DoAndReturn(func(f func(database.Store) error, _ *database.TxOptions) error { err := f(mTx) return err }) @@ -763,7 +763,7 @@ func withTemplate(mTx *dbmock.MockStore) { // withInTx runs the given functions on the same db mock. func withInTx(mTx *dbmock.MockStore) { mTx.EXPECT().InTx(gomock.Any(), gomock.Any()).Times(1).DoAndReturn( - func(f func(store database.Store) error, _ *sql.TxOptions) error { + func(f func(store database.Store) error, _ *database.TxOptions) error { return f(mTx) }, ) diff --git a/codersdk/deployment.go b/codersdk/deployment.go index d6840df504b85..6394deb000d52 100644 --- a/codersdk/deployment.go +++ b/codersdk/deployment.go @@ -1357,14 +1357,18 @@ when required by your organization's security policy.`, Default: strings.Join(agentmetrics.LabelAll, ","), }, { - Name: "Prometheus Collect Database Metrics", - Description: "Collect database metrics (may increase charges for metrics storage).", - Flag: "prometheus-collect-db-metrics", - Env: "CODER_PROMETHEUS_COLLECT_DB_METRICS", - Value: &c.Prometheus.CollectDBMetrics, - Group: &deploymentGroupIntrospectionPrometheus, - YAML: "collect_db_metrics", - Default: "false", + Name: "Prometheus Collect Database Metrics", + // Some db metrics like transaction information will still be collected. + // Query metrics blow up the number of unique time series with labels + // and can be very expensive. So default to not capturing query metrics. + Description: "Collect database query metrics (may increase charges for metrics storage). " + + "If set to false, a reduced set of database metrics are still collected.", + Flag: "prometheus-collect-db-metrics", + Env: "CODER_PROMETHEUS_COLLECT_DB_METRICS", + Value: &c.Prometheus.CollectDBMetrics, + Group: &deploymentGroupIntrospectionPrometheus, + YAML: "collect_db_metrics", + Default: "false", }, // Pprof settings { diff --git a/docs/reference/cli/server.md b/docs/reference/cli/server.md index 17906465d2e3f..981c2419cf903 100644 --- a/docs/reference/cli/server.md +++ b/docs/reference/cli/server.md @@ -321,7 +321,7 @@ When collecting agent stats, aggregate metrics by a given set of comma-separated | YAML | introspection.prometheus.collect_db_metrics | | Default | false | -Collect database metrics (may increase charges for metrics storage). +Collect database query metrics (may increase charges for metrics storage). If set to false, a reduced set of database metrics are still collected. ### --pprof-enable diff --git a/enterprise/cli/testdata/coder_server_--help.golden b/enterprise/cli/testdata/coder_server_--help.golden index 95c0c957d80f6..b637a0da3f74d 100644 --- a/enterprise/cli/testdata/coder_server_--help.golden +++ b/enterprise/cli/testdata/coder_server_--help.golden @@ -146,7 +146,9 @@ INTROSPECTION / PROMETHEUS OPTIONS: Collect agent stats (may increase charges for metrics storage). --prometheus-collect-db-metrics bool, $CODER_PROMETHEUS_COLLECT_DB_METRICS (default: false) - Collect database metrics (may increase charges for metrics storage). + Collect database query metrics (may increase charges for metrics + storage). If set to false, a reduced set of database metrics are still + collected. --prometheus-enable bool, $CODER_PROMETHEUS_ENABLE Serve prometheus metrics on the address defined by prometheus address. diff --git a/enterprise/coderd/workspacequota.go b/enterprise/coderd/workspacequota.go index 8178f6304a947..7ea42ea24f491 100644 --- a/enterprise/coderd/workspacequota.go +++ b/enterprise/coderd/workspacequota.go @@ -104,8 +104,9 @@ func (c *committer) CommitQuota( permit = true consumed = newConsumed return nil - }, &sql.TxOptions{ - Isolation: sql.LevelSerializable, + }, &database.TxOptions{ + Isolation: sql.LevelSerializable, + TxIdentifier: "commit_quota", }) if err != nil { return nil, err diff --git a/enterprise/dbcrypt/cliutil.go b/enterprise/dbcrypt/cliutil.go index 4d8e7e7b0340f..47045f9bfefab 100644 --- a/enterprise/dbcrypt/cliutil.go +++ b/enterprise/dbcrypt/cliutil.go @@ -73,7 +73,7 @@ func Rotate(ctx context.Context, log slog.Logger, sqlDB *sql.DB, ciphers []Ciphe } } return nil - }, &sql.TxOptions{ + }, &database.TxOptions{ Isolation: sql.LevelRepeatableRead, }) if err != nil { @@ -163,7 +163,7 @@ func Decrypt(ctx context.Context, log slog.Logger, sqlDB *sql.DB, ciphers []Ciph } } return nil - }, &sql.TxOptions{ + }, &database.TxOptions{ Isolation: sql.LevelRepeatableRead, }) if err != nil { diff --git a/enterprise/dbcrypt/dbcrypt.go b/enterprise/dbcrypt/dbcrypt.go index 979a8ad137e6d..77a7d5cb78738 100644 --- a/enterprise/dbcrypt/dbcrypt.go +++ b/enterprise/dbcrypt/dbcrypt.go @@ -60,7 +60,7 @@ type dbCrypt struct { database.Store } -func (db *dbCrypt) InTx(function func(database.Store) error, txOpts *sql.TxOptions) error { +func (db *dbCrypt) InTx(function func(database.Store) error, txOpts *database.TxOptions) error { return db.Store.InTx(func(s database.Store) error { return function(&dbCrypt{ primaryCipherDigest: db.primaryCipherDigest, @@ -445,5 +445,5 @@ func (db *dbCrypt) ensureEncrypted(ctx context.Context) error { ActiveKeyDigest: db.primaryCipherDigest, Test: testValue, }) - }, &sql.TxOptions{Isolation: sql.LevelRepeatableRead}) + }, &database.TxOptions{Isolation: sql.LevelRepeatableRead}) } diff --git a/enterprise/dbcrypt/dbcrypt_internal_test.go b/enterprise/dbcrypt/dbcrypt_internal_test.go index 432dc90061677..a480fa08930f5 100644 --- a/enterprise/dbcrypt/dbcrypt_internal_test.go +++ b/enterprise/dbcrypt/dbcrypt_internal_test.go @@ -773,7 +773,7 @@ func TestEncryptDecryptField(t *testing.T) { func expectInTx(mdb *dbmock.MockStore) *gomock.Call { return mdb.EXPECT().InTx(gomock.Any(), gomock.Any()).Times(1).DoAndReturn( - func(f func(store database.Store) error, _ *sql.TxOptions) error { + func(f func(store database.Store) error, _ *database.TxOptions) error { return f(mdb) }, ) diff --git a/scripts/dbgen/main.go b/scripts/dbgen/main.go index 54b104d04f718..4ec08920e9741 100644 --- a/scripts/dbgen/main.go +++ b/scripts/dbgen/main.go @@ -60,7 +60,7 @@ func run() error { return xerrors.Errorf("stub dbmem: %w", err) } - err = orderAndStubDatabaseFunctions(filepath.Join(databasePath, "dbmetrics", "dbmetrics.go"), "m", "metricsStore", func(params stubParams) string { + err = orderAndStubDatabaseFunctions(filepath.Join(databasePath, "dbmetrics", "querymetrics.go"), "m", "queryMetricsStore", func(params stubParams) string { return fmt.Sprintf(` start := time.Now() %s := m.s.%s(%s) From cd890aa3a0fac2db6d0abcda8b303217d435192a Mon Sep 17 00:00:00 2001 From: Jon Ayers Date: Fri, 25 Oct 2024 17:14:35 +0100 Subject: [PATCH 02/42] feat: enable key rotation (#15066) This PR contains the remaining logic necessary to hook up key rotation to the product. --- cli/server.go | 102 ++---- coderd/apidoc/docs.go | 18 +- coderd/apidoc/swagger.json | 22 +- coderd/coderd.go | 72 ++++- coderd/coderdtest/coderdtest.go | 16 +- coderd/cryptokeys/cache.go | 92 ++++-- coderd/cryptokeys/cache_test.go | 2 +- coderd/cryptokeys/rotate.go | 23 +- coderd/cryptokeys/rotate_internal_test.go | 81 ++--- coderd/cryptokeys/rotate_test.go | 10 +- coderd/database/dbauthz/dbauthz.go | 46 +++ coderd/database/dbauthz/dbauthz_test.go | 14 +- coderd/database/dbgen/dbgen.go | 8 +- coderd/database/dump.sql | 3 +- .../000271_cryptokey_features.down.sql | 18 ++ .../000271_cryptokey_features.up.sql | 18 ++ .../fixtures/000271_cryptokey_features.up.sql | 40 +++ coderd/database/models.go | 17 +- coderd/database/sqlc.yaml | 2 + coderd/jwtutils/jwe.go | 8 +- coderd/jwtutils/jws.go | 62 +++- coderd/jwtutils/jwt_test.go | 10 +- coderd/userauth.go | 40 +-- coderd/userauth_test.go | 124 +++++++- coderd/workspaceagents.go | 14 +- coderd/workspaceagents_test.go | 193 ++++++++---- coderd/workspaceapps.go | 8 +- coderd/workspaceapps/apptest/apptest.go | 214 ++++++++++++- coderd/workspaceapps/db.go | 28 +- coderd/workspaceapps/db_test.go | 28 +- coderd/workspaceapps/provider.go | 4 +- coderd/workspaceapps/proxy.go | 15 +- coderd/workspaceapps/request.go | 4 +- coderd/workspaceapps/request_test.go | 2 +- coderd/workspaceapps/token.go | 211 ++----------- coderd/workspaceapps/token_test.go | 293 ++---------------- coderd/workspaceapps_test.go | 41 ++- codersdk/deployment.go | 8 +- .../workspacesdk/connector_internal_test.go | 21 +- docs/reference/api/schemas.md | 19 +- enterprise/coderd/coderdenttest/proxytest.go | 6 +- enterprise/coderd/workspaceproxy.go | 27 +- enterprise/coderd/workspaceproxy_test.go | 97 ++++-- enterprise/dbcrypt/dbcrypt_internal_test.go | 10 +- enterprise/workspaceapps_test.go | 6 + enterprise/wsproxy/keyfetcher.go | 7 +- enterprise/wsproxy/tokenprovider.go | 16 +- enterprise/wsproxy/wsproxy.go | 79 +++-- enterprise/wsproxy/wsproxy_test.go | 26 ++ enterprise/wsproxy/wsproxysdk/wsproxysdk.go | 13 +- site/src/api/typesGenerated.ts | 4 +- tailnet/resume.go | 143 ++------- tailnet/resume_test.go | 150 ++------- tailnet/service.go | 2 +- 54 files changed, 1410 insertions(+), 1127 deletions(-) create mode 100644 coderd/database/migrations/000271_cryptokey_features.down.sql create mode 100644 coderd/database/migrations/000271_cryptokey_features.up.sql create mode 100644 coderd/database/migrations/testdata/fixtures/000271_cryptokey_features.up.sql diff --git a/cli/server.go b/cli/server.go index 2cf2d95a065e9..b29b39b05fb4a 100644 --- a/cli/server.go +++ b/cli/server.go @@ -10,7 +10,6 @@ import ( "crypto/tls" "crypto/x509" "database/sql" - "encoding/hex" "errors" "flag" "fmt" @@ -62,6 +61,7 @@ import ( "github.com/coder/serpent" "github.com/coder/wgtunnel/tunnelsdk" + "github.com/coder/coder/v2/coderd/cryptokeys" "github.com/coder/coder/v2/coderd/entitlements" "github.com/coder/coder/v2/coderd/notifications/reports" "github.com/coder/coder/v2/coderd/runtimeconfig" @@ -97,7 +97,6 @@ import ( "github.com/coder/coder/v2/coderd/updatecheck" "github.com/coder/coder/v2/coderd/util/slice" stringutil "github.com/coder/coder/v2/coderd/util/strings" - "github.com/coder/coder/v2/coderd/workspaceapps" "github.com/coder/coder/v2/coderd/workspaceapps/appurl" "github.com/coder/coder/v2/coderd/workspacestats" "github.com/coder/coder/v2/codersdk" @@ -743,90 +742,31 @@ func (r *RootCmd) Server(newAPI func(context.Context, *coderd.Options) (*coderd. return xerrors.Errorf("set deployment id: %w", err) } } - - // Read the app signing key from the DB. We store it hex encoded - // since the config table uses strings for the value and we - // don't want to deal with automatic encoding issues. - appSecurityKeyStr, err := tx.GetAppSecurityKey(ctx) - if err != nil && !xerrors.Is(err, sql.ErrNoRows) { - return xerrors.Errorf("get app signing key: %w", err) - } - // If the string in the DB is an invalid hex string or the - // length is not equal to the current key length, generate a new - // one. - // - // If the key is regenerated, old signed tokens and encrypted - // strings will become invalid. New signed app tokens will be - // generated automatically on failure. Any workspace app token - // smuggling operations in progress may fail, although with a - // helpful error. - if decoded, err := hex.DecodeString(appSecurityKeyStr); err != nil || len(decoded) != len(workspaceapps.SecurityKey{}) { - b := make([]byte, len(workspaceapps.SecurityKey{})) - _, err := rand.Read(b) - if err != nil { - return xerrors.Errorf("generate fresh app signing key: %w", err) - } - - appSecurityKeyStr = hex.EncodeToString(b) - err = tx.UpsertAppSecurityKey(ctx, appSecurityKeyStr) - if err != nil { - return xerrors.Errorf("insert freshly generated app signing key to database: %w", err) - } - } - - appSecurityKey, err := workspaceapps.KeyFromString(appSecurityKeyStr) - if err != nil { - return xerrors.Errorf("decode app signing key from database: %w", err) - } - - options.AppSecurityKey = appSecurityKey - - // Read the oauth signing key from the database. Like the app security, generate a new one - // if it is invalid for any reason. - oauthSigningKeyStr, err := tx.GetOAuthSigningKey(ctx) - if err != nil && !xerrors.Is(err, sql.ErrNoRows) { - return xerrors.Errorf("get app oauth signing key: %w", err) - } - if decoded, err := hex.DecodeString(oauthSigningKeyStr); err != nil || len(decoded) != len(options.OAuthSigningKey) { - b := make([]byte, len(options.OAuthSigningKey)) - _, err := rand.Read(b) - if err != nil { - return xerrors.Errorf("generate fresh oauth signing key: %w", err) - } - - oauthSigningKeyStr = hex.EncodeToString(b) - err = tx.UpsertOAuthSigningKey(ctx, oauthSigningKeyStr) - if err != nil { - return xerrors.Errorf("insert freshly generated oauth signing key to database: %w", err) - } - } - - oauthKeyBytes, err := hex.DecodeString(oauthSigningKeyStr) - if err != nil { - return xerrors.Errorf("decode oauth signing key from database: %w", err) - } - if len(oauthKeyBytes) != len(options.OAuthSigningKey) { - return xerrors.Errorf("oauth signing key in database is not the correct length, expect %d got %d", len(options.OAuthSigningKey), len(oauthKeyBytes)) - } - copy(options.OAuthSigningKey[:], oauthKeyBytes) - if options.OAuthSigningKey == [32]byte{} { - return xerrors.Errorf("oauth signing key in database is empty") - } - - // Read the coordinator resume token signing key from the - // database. - resumeTokenKey, err := tailnet.ResumeTokenSigningKeyFromDatabase(ctx, tx) - if err != nil { - return xerrors.Errorf("get coordinator resume token key from database: %w", err) - } - options.CoordinatorResumeTokenProvider = tailnet.NewResumeTokenKeyProvider(resumeTokenKey, quartz.NewReal(), tailnet.DefaultResumeTokenExpiry) - return nil }, nil) if err != nil { - return err + return xerrors.Errorf("set deployment id: %w", err) + } + + fetcher := &cryptokeys.DBFetcher{ + DB: options.Database, + } + + resumeKeycache, err := cryptokeys.NewSigningCache(ctx, + logger, + fetcher, + codersdk.CryptoKeyFeatureTailnetResume, + ) + if err != nil { + logger.Critical(ctx, "failed to properly instantiate tailnet resume signing cache", slog.Error(err)) } + options.CoordinatorResumeTokenProvider = tailnet.NewResumeTokenKeyProvider( + resumeKeycache, + quartz.NewReal(), + tailnet.DefaultResumeTokenExpiry, + ) + options.RuntimeConfig = runtimeconfig.NewManager() // This should be output before the logs start streaming. diff --git a/coderd/apidoc/docs.go b/coderd/apidoc/docs.go index 76084b1ff54dd..09f070046066a 100644 --- a/coderd/apidoc/docs.go +++ b/coderd/apidoc/docs.go @@ -7646,6 +7646,15 @@ const docTemplate = `{ ], "summary": "Get workspace proxy crypto keys", "operationId": "get-workspace-proxy-crypto-keys", + "parameters": [ + { + "type": "string", + "description": "Feature key", + "name": "feature", + "in": "query", + "required": true + } + ], "responses": { "200": { "description": "OK", @@ -10011,12 +10020,14 @@ const docTemplate = `{ "codersdk.CryptoKeyFeature": { "type": "string", "enum": [ - "workspace_apps", + "workspace_apps_api_key", + "workspace_apps_token", "oidc_convert", "tailnet_resume" ], "x-enum-varnames": [ - "CryptoKeyFeatureWorkspaceApp", + "CryptoKeyFeatureWorkspaceAppsAPIKey", + "CryptoKeyFeatureWorkspaceAppsToken", "CryptoKeyFeatureOIDCConvert", "CryptoKeyFeatureTailnetResume" ] @@ -16244,9 +16255,6 @@ const docTemplate = `{ "wsproxysdk.RegisterWorkspaceProxyResponse": { "type": "object", "properties": { - "app_security_key": { - "type": "string" - }, "derp_force_websockets": { "type": "boolean" }, diff --git a/coderd/apidoc/swagger.json b/coderd/apidoc/swagger.json index beff69ca22373..42b34d576509a 100644 --- a/coderd/apidoc/swagger.json +++ b/coderd/apidoc/swagger.json @@ -6758,6 +6758,15 @@ "tags": ["Enterprise"], "summary": "Get workspace proxy crypto keys", "operationId": "get-workspace-proxy-crypto-keys", + "parameters": [ + { + "type": "string", + "description": "Feature key", + "name": "feature", + "in": "query", + "required": true + } + ], "responses": { "200": { "description": "OK", @@ -8914,9 +8923,15 @@ }, "codersdk.CryptoKeyFeature": { "type": "string", - "enum": ["workspace_apps", "oidc_convert", "tailnet_resume"], + "enum": [ + "workspace_apps_api_key", + "workspace_apps_token", + "oidc_convert", + "tailnet_resume" + ], "x-enum-varnames": [ - "CryptoKeyFeatureWorkspaceApp", + "CryptoKeyFeatureWorkspaceAppsAPIKey", + "CryptoKeyFeatureWorkspaceAppsToken", "CryptoKeyFeatureOIDCConvert", "CryptoKeyFeatureTailnetResume" ] @@ -14853,9 +14868,6 @@ "wsproxysdk.RegisterWorkspaceProxyResponse": { "type": "object", "properties": { - "app_security_key": { - "type": "string" - }, "derp_force_websockets": { "type": "boolean" }, diff --git a/coderd/coderd.go b/coderd/coderd.go index cb0884808ef27..3011c2d58d39c 100644 --- a/coderd/coderd.go +++ b/coderd/coderd.go @@ -40,6 +40,7 @@ import ( "github.com/coder/quartz" "github.com/coder/serpent" + "github.com/coder/coder/v2/coderd/cryptokeys" "github.com/coder/coder/v2/coderd/entitlements" "github.com/coder/coder/v2/coderd/idpsync" "github.com/coder/coder/v2/coderd/runtimeconfig" @@ -185,9 +186,6 @@ type Options struct { TemplateScheduleStore *atomic.Pointer[schedule.TemplateScheduleStore] UserQuietHoursScheduleStore *atomic.Pointer[schedule.UserQuietHoursScheduleStore] AccessControlStore *atomic.Pointer[dbauthz.AccessControlStore] - // AppSecurityKey is the crypto key used to sign and encrypt tokens related to - // workspace applications. It consists of both a signing and encryption key. - AppSecurityKey workspaceapps.SecurityKey // CoordinatorResumeTokenProvider is used to provide and validate resume // tokens issued by and passed to the coordinator DRPC API. CoordinatorResumeTokenProvider tailnet.ResumeTokenProvider @@ -251,6 +249,12 @@ type Options struct { // OneTimePasscodeValidityPeriod specifies how long a one time passcode should be valid for. OneTimePasscodeValidityPeriod time.Duration + + // Keycaches + AppSigningKeyCache cryptokeys.SigningKeycache + AppEncryptionKeyCache cryptokeys.EncryptionKeycache + OIDCConvertKeyCache cryptokeys.SigningKeycache + Clock quartz.Clock } // @title Coder API @@ -352,6 +356,9 @@ func New(options *Options) *API { if options.PrometheusRegistry == nil { options.PrometheusRegistry = prometheus.NewRegistry() } + if options.Clock == nil { + options.Clock = quartz.NewReal() + } if options.DERPServer == nil && options.DeploymentValues.DERP.Server.Enable { options.DERPServer = derp.NewServer(key.NewNode(), tailnet.Logger(options.Logger.Named("derp"))) } @@ -444,6 +451,49 @@ func New(options *Options) *API { if err != nil { panic(xerrors.Errorf("get deployment ID: %w", err)) } + + fetcher := &cryptokeys.DBFetcher{ + DB: options.Database, + } + + if options.OIDCConvertKeyCache == nil { + options.OIDCConvertKeyCache, err = cryptokeys.NewSigningCache(ctx, + options.Logger.Named("oidc_convert_keycache"), + fetcher, + codersdk.CryptoKeyFeatureOIDCConvert, + ) + if err != nil { + options.Logger.Critical(ctx, "failed to properly instantiate oidc convert signing cache", slog.Error(err)) + } + } + + if options.AppSigningKeyCache == nil { + options.AppSigningKeyCache, err = cryptokeys.NewSigningCache(ctx, + options.Logger.Named("app_signing_keycache"), + fetcher, + codersdk.CryptoKeyFeatureWorkspaceAppsToken, + ) + if err != nil { + options.Logger.Critical(ctx, "failed to properly instantiate app signing key cache", slog.Error(err)) + } + } + + if options.AppEncryptionKeyCache == nil { + options.AppEncryptionKeyCache, err = cryptokeys.NewEncryptionCache(ctx, + options.Logger, + fetcher, + codersdk.CryptoKeyFeatureWorkspaceAppsAPIKey, + ) + if err != nil { + options.Logger.Critical(ctx, "failed to properly instantiate app encryption key cache", slog.Error(err)) + } + } + + // Start a background process that rotates keys. We intentionally start this after the caches + // are created to force initial requests for a key to populate the caches. This helps catch + // bugs that may only occur when a key isn't precached in tests and the latency cost is minimal. + cryptokeys.StartRotator(ctx, options.Logger, options.Database) + api := &API{ ctx: ctx, cancel: cancel, @@ -464,7 +514,7 @@ func New(options *Options) *API { options.DeploymentValues, oauthConfigs, options.AgentInactiveDisconnectTimeout, - options.AppSecurityKey, + options.AppSigningKeyCache, ), metricsCache: metricsCache, Auditor: atomic.Pointer[audit.Auditor]{}, @@ -606,7 +656,7 @@ func New(options *Options) *API { ResumeTokenProvider: api.Options.CoordinatorResumeTokenProvider, }) if err != nil { - api.Logger.Fatal(api.ctx, "failed to initialize tailnet client service", slog.Error(err)) + api.Logger.Fatal(context.Background(), "failed to initialize tailnet client service", slog.Error(err)) } api.statsReporter = workspacestats.NewReporter(workspacestats.ReporterOptions{ @@ -628,9 +678,6 @@ func New(options *Options) *API { options.WorkspaceAppsStatsCollectorOptions.Reporter = api.statsReporter } - if options.AppSecurityKey.IsZero() { - api.Logger.Fatal(api.ctx, "app security key cannot be zero") - } api.workspaceAppServer = &workspaceapps.Server{ Logger: workspaceAppsLogger, @@ -642,11 +689,11 @@ func New(options *Options) *API { SignedTokenProvider: api.WorkspaceAppsProvider, AgentProvider: api.agentProvider, - AppSecurityKey: options.AppSecurityKey, StatsCollector: workspaceapps.NewStatsCollector(options.WorkspaceAppsStatsCollectorOptions), - DisablePathApps: options.DeploymentValues.DisablePathApps.Value(), - SecureAuthCookie: options.DeploymentValues.SecureAuthCookie.Value(), + DisablePathApps: options.DeploymentValues.DisablePathApps.Value(), + SecureAuthCookie: options.DeploymentValues.SecureAuthCookie.Value(), + APIKeyEncryptionKeycache: options.AppEncryptionKeyCache, } apiKeyMiddleware := httpmw.ExtractAPIKeyMW(httpmw.ExtractAPIKeyConfig{ @@ -1434,6 +1481,9 @@ func (api *API) Close() error { _ = api.agentProvider.Close() _ = api.statsReporter.Close() _ = api.NetworkTelemetryBatcher.Close() + _ = api.OIDCConvertKeyCache.Close() + _ = api.AppSigningKeyCache.Close() + _ = api.AppEncryptionKeyCache.Close() return nil } diff --git a/coderd/coderdtest/coderdtest.go b/coderd/coderdtest/coderdtest.go index 05c31f35bd20a..d94a6fbe93c4e 100644 --- a/coderd/coderdtest/coderdtest.go +++ b/coderd/coderdtest/coderdtest.go @@ -55,6 +55,7 @@ import ( "github.com/coder/coder/v2/coderd/audit" "github.com/coder/coder/v2/coderd/autobuild" "github.com/coder/coder/v2/coderd/awsidentity" + "github.com/coder/coder/v2/coderd/cryptokeys" "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/db2sdk" "github.com/coder/coder/v2/coderd/database/dbauthz" @@ -88,12 +89,9 @@ import ( sdkproto "github.com/coder/coder/v2/provisionersdk/proto" "github.com/coder/coder/v2/tailnet" "github.com/coder/coder/v2/testutil" + "github.com/coder/quartz" ) -// AppSecurityKey is a 96-byte key used to sign JWTs and encrypt JWEs for -// workspace app tokens in tests. -var AppSecurityKey = must(workspaceapps.KeyFromString("6465616e207761732068657265206465616e207761732068657265206465616e207761732068657265206465616e207761732068657265206465616e207761732068657265206465616e207761732068657265206465616e2077617320686572")) - type Options struct { // AccessURL denotes a custom access URL. By default we use the httptest // server's URL. Setting this may result in unexpected behavior (especially @@ -161,8 +159,10 @@ type Options struct { DatabaseRolluper *dbrollup.Rolluper WorkspaceUsageTrackerFlush chan int WorkspaceUsageTrackerTick chan time.Time - - NotificationsEnqueuer notifications.Enqueuer + NotificationsEnqueuer notifications.Enqueuer + APIKeyEncryptionCache cryptokeys.EncryptionKeycache + OIDCConvertKeyCache cryptokeys.SigningKeycache + Clock quartz.Clock } // New constructs a codersdk client connected to an in-memory API instance. @@ -525,7 +525,6 @@ func NewOptions(t testing.TB, options *Options) (func(http.Handler), context.Can DeploymentOptions: codersdk.DeploymentOptionsWithoutSecrets(options.DeploymentValues.Options()), UpdateCheckOptions: options.UpdateCheckOptions, SwaggerEndpoint: options.SwaggerEndpoint, - AppSecurityKey: AppSecurityKey, SSHConfig: options.ConfigSSH, HealthcheckFunc: options.HealthcheckFunc, HealthcheckTimeout: options.HealthcheckTimeout, @@ -538,6 +537,9 @@ func NewOptions(t testing.TB, options *Options) (func(http.Handler), context.Can WorkspaceUsageTracker: wuTracker, NotificationsEnqueuer: options.NotificationsEnqueuer, OneTimePasscodeValidityPeriod: options.OneTimePasscodeValidityPeriod, + Clock: options.Clock, + AppEncryptionKeyCache: options.APIKeyEncryptionCache, + OIDCConvertKeyCache: options.OIDCConvertKeyCache, } } diff --git a/coderd/cryptokeys/cache.go b/coderd/cryptokeys/cache.go index 74fb025d416fd..7777d5f75b942 100644 --- a/coderd/cryptokeys/cache.go +++ b/coderd/cryptokeys/cache.go @@ -3,6 +3,7 @@ package cryptokeys import ( "context" "encoding/hex" + "fmt" "io" "strconv" "sync" @@ -12,7 +13,7 @@ import ( "cdr.dev/slog" "github.com/coder/coder/v2/coderd/database" - "github.com/coder/coder/v2/coderd/database/db2sdk" + "github.com/coder/coder/v2/coderd/database/dbauthz" "github.com/coder/coder/v2/codersdk" "github.com/coder/quartz" ) @@ -25,7 +26,7 @@ var ( ) type Fetcher interface { - Fetch(ctx context.Context) ([]codersdk.CryptoKey, error) + Fetch(ctx context.Context, feature codersdk.CryptoKeyFeature) ([]codersdk.CryptoKey, error) } type EncryptionKeycache interface { @@ -62,27 +63,26 @@ const ( ) type DBFetcher struct { - DB database.Store - Feature database.CryptoKeyFeature + DB database.Store } -func (d *DBFetcher) Fetch(ctx context.Context) ([]codersdk.CryptoKey, error) { - keys, err := d.DB.GetCryptoKeysByFeature(ctx, d.Feature) +func (d *DBFetcher) Fetch(ctx context.Context, feature codersdk.CryptoKeyFeature) ([]codersdk.CryptoKey, error) { + keys, err := d.DB.GetCryptoKeysByFeature(ctx, database.CryptoKeyFeature(feature)) if err != nil { return nil, xerrors.Errorf("get crypto keys by feature: %w", err) } - return db2sdk.CryptoKeys(keys), nil + return toSDKKeys(keys), nil } // cache implements the caching functionality for both signing and encryption keys. type cache struct { - clock quartz.Clock - refreshCtx context.Context - refreshCancel context.CancelFunc - fetcher Fetcher - logger slog.Logger - feature codersdk.CryptoKeyFeature + ctx context.Context + cancel context.CancelFunc + clock quartz.Clock + fetcher Fetcher + logger slog.Logger + feature codersdk.CryptoKeyFeature mu sync.Mutex keys map[int32]codersdk.CryptoKey @@ -109,7 +109,8 @@ func NewSigningCache(ctx context.Context, logger slog.Logger, fetcher Fetcher, if !isSigningKeyFeature(feature) { return nil, xerrors.Errorf("invalid feature: %s", feature) } - return newCache(ctx, logger, fetcher, feature, opts...) + logger = logger.Named(fmt.Sprintf("%s_signing_keycache", feature)) + return newCache(ctx, logger, fetcher, feature, opts...), nil } func NewEncryptionCache(ctx context.Context, logger slog.Logger, fetcher Fetcher, @@ -118,10 +119,11 @@ func NewEncryptionCache(ctx context.Context, logger slog.Logger, fetcher Fetcher if !isEncryptionKeyFeature(feature) { return nil, xerrors.Errorf("invalid feature: %s", feature) } - return newCache(ctx, logger, fetcher, feature, opts...) + logger = logger.Named(fmt.Sprintf("%s_encryption_keycache", feature)) + return newCache(ctx, logger, fetcher, feature, opts...), nil } -func newCache(ctx context.Context, logger slog.Logger, fetcher Fetcher, feature codersdk.CryptoKeyFeature, opts ...func(*cache)) (*cache, error) { +func newCache(ctx context.Context, logger slog.Logger, fetcher Fetcher, feature codersdk.CryptoKeyFeature, opts ...func(*cache)) *cache { cache := &cache{ clock: quartz.NewReal(), logger: logger, @@ -134,16 +136,16 @@ func newCache(ctx context.Context, logger slog.Logger, fetcher Fetcher, feature } cache.cond = sync.NewCond(&cache.mu) - cache.refreshCtx, cache.refreshCancel = context.WithCancel(ctx) + //nolint:gocritic // We need to be able to read the keys in order to cache them. + cache.ctx, cache.cancel = context.WithCancel(dbauthz.AsKeyReader(ctx)) cache.refresher = cache.clock.AfterFunc(refreshInterval, cache.refresh) - keys, err := cache.cryptoKeys(ctx) + keys, err := cache.cryptoKeys(cache.ctx) if err != nil { - cache.refreshCancel() - return nil, xerrors.Errorf("initial fetch: %w", err) + cache.logger.Critical(cache.ctx, "failed initial fetch", slog.Error(err)) } cache.keys = keys - return cache, nil + return cache } func (c *cache) EncryptingKey(ctx context.Context) (string, interface{}, error) { @@ -151,6 +153,8 @@ func (c *cache) EncryptingKey(ctx context.Context) (string, interface{}, error) return "", nil, ErrInvalidFeature } + //nolint:gocritic // cache can only read crypto keys. + ctx = dbauthz.AsKeyReader(ctx) return c.cryptoKey(ctx, latestSequence) } @@ -164,6 +168,8 @@ func (c *cache) DecryptingKey(ctx context.Context, id string) (interface{}, erro return nil, xerrors.Errorf("parse id: %w", err) } + //nolint:gocritic // cache can only read crypto keys. + ctx = dbauthz.AsKeyReader(ctx) _, secret, err := c.cryptoKey(ctx, int32(seq)) if err != nil { return nil, xerrors.Errorf("crypto key: %w", err) @@ -176,6 +182,8 @@ func (c *cache) SigningKey(ctx context.Context) (string, interface{}, error) { return "", nil, ErrInvalidFeature } + //nolint:gocritic // cache can only read crypto keys. + ctx = dbauthz.AsKeyReader(ctx) return c.cryptoKey(ctx, latestSequence) } @@ -188,7 +196,8 @@ func (c *cache) VerifyingKey(ctx context.Context, id string) (interface{}, error if err != nil { return nil, xerrors.Errorf("parse id: %w", err) } - + //nolint:gocritic // cache can only read crypto keys. + ctx = dbauthz.AsKeyReader(ctx) _, secret, err := c.cryptoKey(ctx, int32(seq)) if err != nil { return nil, xerrors.Errorf("crypto key: %w", err) @@ -198,12 +207,12 @@ func (c *cache) VerifyingKey(ctx context.Context, id string) (interface{}, error } func isEncryptionKeyFeature(feature codersdk.CryptoKeyFeature) bool { - return feature == codersdk.CryptoKeyFeatureWorkspaceApp + return feature == codersdk.CryptoKeyFeatureWorkspaceAppsAPIKey } func isSigningKeyFeature(feature codersdk.CryptoKeyFeature) bool { switch feature { - case codersdk.CryptoKeyFeatureTailnetResume, codersdk.CryptoKeyFeatureOIDCConvert: + case codersdk.CryptoKeyFeatureTailnetResume, codersdk.CryptoKeyFeatureOIDCConvert, codersdk.CryptoKeyFeatureWorkspaceAppsToken: return true default: return false @@ -292,14 +301,15 @@ func checkKey(key codersdk.CryptoKey, sequence int32, now time.Time) (string, [] func (c *cache) refresh() { now := c.clock.Now("CryptoKeyCache", "refresh") c.mu.Lock() - defer c.mu.Unlock() if c.closed { + c.mu.Unlock() return } // If something's already fetching, we don't need to do anything. if c.fetching { + c.mu.Unlock() return } @@ -307,20 +317,21 @@ func (c *cache) refresh() { // is ongoing but prior to the timer getting reset. In this case we want to // avoid double fetching. if now.Sub(c.lastFetch) < refreshInterval { + c.mu.Unlock() return } c.fetching = true c.mu.Unlock() - keys, err := c.cryptoKeys(c.refreshCtx) + keys, err := c.cryptoKeys(c.ctx) if err != nil { - c.logger.Error(c.refreshCtx, "fetch crypto keys", slog.Error(err)) + c.logger.Error(c.ctx, "fetch crypto keys", slog.Error(err)) return } - // We don't defer an unlock here due to the deferred unlock at the top of the function. c.mu.Lock() + defer c.mu.Unlock() c.lastFetch = c.clock.Now() c.refresher.Reset(refreshInterval) @@ -332,9 +343,9 @@ func (c *cache) refresh() { // cryptoKeys queries the control plane for the crypto keys. // Outside of initialization, this should only be called by fetch. func (c *cache) cryptoKeys(ctx context.Context) (map[int32]codersdk.CryptoKey, error) { - keys, err := c.fetcher.Fetch(ctx) + keys, err := c.fetcher.Fetch(ctx, c.feature) if err != nil { - return nil, xerrors.Errorf("crypto keys: %w", err) + return nil, xerrors.Errorf("fetch: %w", err) } cache := toKeyMap(keys, c.clock.Now()) return cache, nil @@ -361,9 +372,28 @@ func (c *cache) Close() error { } c.closed = true - c.refreshCancel() + c.cancel() c.refresher.Stop() c.cond.Broadcast() return nil } + +// We have to do this to avoid a circular dependency on db2sdk (cryptokeys -> db2sdk -> tailnet -> cryptokeys) +func toSDKKeys(keys []database.CryptoKey) []codersdk.CryptoKey { + into := make([]codersdk.CryptoKey, 0, len(keys)) + for _, key := range keys { + into = append(into, toSDK(key)) + } + return into +} + +func toSDK(key database.CryptoKey) codersdk.CryptoKey { + return codersdk.CryptoKey{ + Feature: codersdk.CryptoKeyFeature(key.Feature), + Sequence: key.Sequence, + StartsAt: key.StartsAt, + DeletesAt: key.DeletesAt.Time, + Secret: key.Secret.String, + } +} diff --git a/coderd/cryptokeys/cache_test.go b/coderd/cryptokeys/cache_test.go index 92fc4527ae7b3..cda87315605a4 100644 --- a/coderd/cryptokeys/cache_test.go +++ b/coderd/cryptokeys/cache_test.go @@ -488,7 +488,7 @@ type fakeFetcher struct { called int } -func (f *fakeFetcher) Fetch(_ context.Context) ([]codersdk.CryptoKey, error) { +func (f *fakeFetcher) Fetch(_ context.Context, _ codersdk.CryptoKeyFeature) ([]codersdk.CryptoKey, error) { f.called++ return f.keys, nil } diff --git a/coderd/cryptokeys/rotate.go b/coderd/cryptokeys/rotate.go index 83e4106584b03..26256b4cd4c12 100644 --- a/coderd/cryptokeys/rotate.go +++ b/coderd/cryptokeys/rotate.go @@ -11,6 +11,7 @@ import ( "cdr.dev/slog" "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/database/dbauthz" "github.com/coder/coder/v2/coderd/database/dbtime" "github.com/coder/quartz" ) @@ -53,10 +54,12 @@ func WithKeyDuration(keyDuration time.Duration) RotatorOption { // StartRotator starts a background process that rotates keys in the database. // It ensures there's at least one valid key per feature prior to returning. // Canceling the provided context will stop the background process. -func StartRotator(ctx context.Context, logger slog.Logger, db database.Store, opts ...RotatorOption) error { +func StartRotator(ctx context.Context, logger slog.Logger, db database.Store, opts ...RotatorOption) { + //nolint:gocritic // KeyRotator can only rotate crypto keys. + ctx = dbauthz.AsKeyRotator(ctx) kr := &rotator{ db: db, - logger: logger, + logger: logger.Named("keyrotator"), clock: quartz.NewReal(), keyDuration: DefaultKeyDuration, features: database.AllCryptoKeyFeatureValues(), @@ -68,12 +71,10 @@ func StartRotator(ctx context.Context, logger slog.Logger, db database.Store, op err := kr.rotateKeys(ctx) if err != nil { - return xerrors.Errorf("rotate keys: %w", err) + kr.logger.Critical(ctx, "failed to rotate keys", slog.Error(err)) } go kr.start(ctx) - - return nil } // start begins the process of rotating keys. @@ -227,9 +228,11 @@ func (k *rotator) rotateKey(ctx context.Context, tx database.Store, key database func generateNewSecret(feature database.CryptoKeyFeature) (string, error) { switch feature { - case database.CryptoKeyFeatureWorkspaceApps: + case database.CryptoKeyFeatureWorkspaceAppsAPIKey: return generateKey(32) - case database.CryptoKeyFeatureOidcConvert: + case database.CryptoKeyFeatureWorkspaceAppsToken: + return generateKey(64) + case database.CryptoKeyFeatureOIDCConvert: return generateKey(64) case database.CryptoKeyFeatureTailnetResume: return generateKey(64) @@ -248,9 +251,11 @@ func generateKey(length int) (string, error) { func tokenDuration(feature database.CryptoKeyFeature) time.Duration { switch feature { - case database.CryptoKeyFeatureWorkspaceApps: + case database.CryptoKeyFeatureWorkspaceAppsAPIKey: + return WorkspaceAppsTokenDuration + case database.CryptoKeyFeatureWorkspaceAppsToken: return WorkspaceAppsTokenDuration - case database.CryptoKeyFeatureOidcConvert: + case database.CryptoKeyFeatureOIDCConvert: return OIDCConvertTokenDuration case database.CryptoKeyFeatureTailnetResume: return TailnetResumeTokenDuration diff --git a/coderd/cryptokeys/rotate_internal_test.go b/coderd/cryptokeys/rotate_internal_test.go index 43754c1d8750f..e427a3c6216ac 100644 --- a/coderd/cryptokeys/rotate_internal_test.go +++ b/coderd/cryptokeys/rotate_internal_test.go @@ -38,7 +38,7 @@ func Test_rotateKeys(t *testing.T) { clock: clock, logger: logger, features: []database.CryptoKeyFeature{ - database.CryptoKeyFeatureWorkspaceApps, + database.CryptoKeyFeatureWorkspaceAppsAPIKey, }, } @@ -46,7 +46,7 @@ func Test_rotateKeys(t *testing.T) { // Seed the database with an existing key. oldKey := dbgen.CryptoKey(t, db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, StartsAt: now, Sequence: 15, }) @@ -69,11 +69,11 @@ func Test_rotateKeys(t *testing.T) { // The new key should be created and have a starts_at of the old key's expires_at. newKey, err := db.GetCryptoKeyByFeatureAndSequence(ctx, database.GetCryptoKeyByFeatureAndSequenceParams{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, Sequence: oldKey.Sequence + 1, }) require.NoError(t, err) - requireKey(t, newKey, database.CryptoKeyFeatureWorkspaceApps, oldKey.ExpiresAt(keyDuration), nullTime, oldKey.Sequence+1) + requireKey(t, newKey, database.CryptoKeyFeatureWorkspaceAppsAPIKey, oldKey.ExpiresAt(keyDuration), nullTime, oldKey.Sequence+1) // Advance the clock just before the keys delete time. clock.Advance(oldKey.DeletesAt.Time.UTC().Sub(now) - time.Second) @@ -123,7 +123,7 @@ func Test_rotateKeys(t *testing.T) { clock: clock, logger: logger, features: []database.CryptoKeyFeature{ - database.CryptoKeyFeatureWorkspaceApps, + database.CryptoKeyFeatureWorkspaceAppsAPIKey, }, } @@ -131,7 +131,7 @@ func Test_rotateKeys(t *testing.T) { // Seed the database with an existing key existingKey := dbgen.CryptoKey(t, db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, StartsAt: now, Sequence: 123, }) @@ -179,7 +179,7 @@ func Test_rotateKeys(t *testing.T) { clock: clock, logger: logger, features: []database.CryptoKeyFeature{ - database.CryptoKeyFeatureWorkspaceApps, + database.CryptoKeyFeatureWorkspaceAppsAPIKey, }, } @@ -187,7 +187,7 @@ func Test_rotateKeys(t *testing.T) { // Seed the database with an existing key deletingKey := dbgen.CryptoKey(t, db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, StartsAt: now.Add(-keyDuration), Sequence: 789, DeletesAt: sql.NullTime{ @@ -232,7 +232,7 @@ func Test_rotateKeys(t *testing.T) { clock: clock, logger: logger, features: []database.CryptoKeyFeature{ - database.CryptoKeyFeatureWorkspaceApps, + database.CryptoKeyFeatureWorkspaceAppsAPIKey, }, } @@ -240,7 +240,7 @@ func Test_rotateKeys(t *testing.T) { // Seed the database with an existing key deletingKey := dbgen.CryptoKey(t, db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, StartsAt: now, Sequence: 456, DeletesAt: sql.NullTime{ @@ -281,7 +281,7 @@ func Test_rotateKeys(t *testing.T) { clock: clock, logger: logger, features: []database.CryptoKeyFeature{ - database.CryptoKeyFeatureWorkspaceApps, + database.CryptoKeyFeatureWorkspaceAppsAPIKey, }, } @@ -291,7 +291,7 @@ func Test_rotateKeys(t *testing.T) { keys, err := db.GetCryptoKeys(ctx) require.NoError(t, err) require.Len(t, keys, 1) - requireKey(t, keys[0], database.CryptoKeyFeatureWorkspaceApps, clock.Now().UTC(), nullTime, 1) + requireKey(t, keys[0], database.CryptoKeyFeatureWorkspaceAppsAPIKey, clock.Now().UTC(), nullTime, 1) }) // Assert we insert a new key when the only key was manually deleted. @@ -312,14 +312,14 @@ func Test_rotateKeys(t *testing.T) { clock: clock, logger: logger, features: []database.CryptoKeyFeature{ - database.CryptoKeyFeatureWorkspaceApps, + database.CryptoKeyFeatureWorkspaceAppsAPIKey, }, } now := dbnow(clock) deletedkey := dbgen.CryptoKey(t, db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, StartsAt: now, Sequence: 19, DeletesAt: sql.NullTime{ @@ -338,7 +338,7 @@ func Test_rotateKeys(t *testing.T) { keys, err := db.GetCryptoKeys(ctx) require.NoError(t, err) require.Len(t, keys, 1) - requireKey(t, keys[0], database.CryptoKeyFeatureWorkspaceApps, now, nullTime, deletedkey.Sequence+1) + requireKey(t, keys[0], database.CryptoKeyFeatureWorkspaceAppsAPIKey, now, nullTime, deletedkey.Sequence+1) }) // This tests ensures that rotation works with multiple @@ -365,9 +365,11 @@ func Test_rotateKeys(t *testing.T) { now := dbnow(clock) - // We'll test a scenario where one feature has no valid keys. - // Another has a key that should be rotate. And one that - // has a valid key that shouldn't trigger an action. + // We'll test a scenario where: + // - One feature has no valid keys. + // - One has a key that should be rotated. + // - One has a valid key that shouldn't trigger an action. + // - One has no keys at all. _ = dbgen.CryptoKey(t, db, database.CryptoKey{ Feature: database.CryptoKeyFeatureTailnetResume, StartsAt: now.Add(-keyDuration), @@ -377,6 +379,7 @@ func Test_rotateKeys(t *testing.T) { Valid: false, }, }) + // Generate another deleted key to ensure we insert after the latest sequence. deletedKey := dbgen.CryptoKey(t, db, database.CryptoKey{ Feature: database.CryptoKeyFeatureTailnetResume, StartsAt: now.Add(-keyDuration), @@ -389,14 +392,14 @@ func Test_rotateKeys(t *testing.T) { // Insert a key that should be rotated. rotatedKey := dbgen.CryptoKey(t, db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, StartsAt: now.Add(-keyDuration + time.Hour), Sequence: 42, }) // Insert a key that should not trigger an action. validKey := dbgen.CryptoKey(t, db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureOidcConvert, + Feature: database.CryptoKeyFeatureOIDCConvert, StartsAt: now, Sequence: 17, }) @@ -406,26 +409,28 @@ func Test_rotateKeys(t *testing.T) { keys, err := db.GetCryptoKeys(ctx) require.NoError(t, err) - require.Len(t, keys, 4) + require.Len(t, keys, 5) kbf, err := keysByFeature(keys, database.AllCryptoKeyFeatureValues()) require.NoError(t, err) // No actions on OIDC convert. - require.Len(t, kbf[database.CryptoKeyFeatureOidcConvert], 1) + require.Len(t, kbf[database.CryptoKeyFeatureOIDCConvert], 1) // Workspace apps should have been rotated. - require.Len(t, kbf[database.CryptoKeyFeatureWorkspaceApps], 2) + require.Len(t, kbf[database.CryptoKeyFeatureWorkspaceAppsAPIKey], 2) // No existing key for tailnet resume should've // caused a key to be inserted. require.Len(t, kbf[database.CryptoKeyFeatureTailnetResume], 1) + require.Len(t, kbf[database.CryptoKeyFeatureWorkspaceAppsToken], 1) - oidcKey := kbf[database.CryptoKeyFeatureOidcConvert][0] + oidcKey := kbf[database.CryptoKeyFeatureOIDCConvert][0] tailnetKey := kbf[database.CryptoKeyFeatureTailnetResume][0] - requireKey(t, oidcKey, database.CryptoKeyFeatureOidcConvert, now, nullTime, validKey.Sequence) + appTokenKey := kbf[database.CryptoKeyFeatureWorkspaceAppsToken][0] + requireKey(t, oidcKey, database.CryptoKeyFeatureOIDCConvert, now, nullTime, validKey.Sequence) requireKey(t, tailnetKey, database.CryptoKeyFeatureTailnetResume, now, nullTime, deletedKey.Sequence+1) - - newKey := kbf[database.CryptoKeyFeatureWorkspaceApps][0] - oldKey := kbf[database.CryptoKeyFeatureWorkspaceApps][1] + requireKey(t, appTokenKey, database.CryptoKeyFeatureWorkspaceAppsToken, now, nullTime, 1) + newKey := kbf[database.CryptoKeyFeatureWorkspaceAppsAPIKey][0] + oldKey := kbf[database.CryptoKeyFeatureWorkspaceAppsAPIKey][1] if newKey.Sequence == rotatedKey.Sequence { oldKey, newKey = newKey, oldKey } @@ -433,8 +438,8 @@ func Test_rotateKeys(t *testing.T) { Time: rotatedKey.ExpiresAt(keyDuration).Add(WorkspaceAppsTokenDuration + time.Hour), Valid: true, } - requireKey(t, oldKey, database.CryptoKeyFeatureWorkspaceApps, rotatedKey.StartsAt.UTC(), deletesAt, rotatedKey.Sequence) - requireKey(t, newKey, database.CryptoKeyFeatureWorkspaceApps, rotatedKey.ExpiresAt(keyDuration), nullTime, rotatedKey.Sequence+1) + requireKey(t, oldKey, database.CryptoKeyFeatureWorkspaceAppsAPIKey, rotatedKey.StartsAt.UTC(), deletesAt, rotatedKey.Sequence) + requireKey(t, newKey, database.CryptoKeyFeatureWorkspaceAppsAPIKey, rotatedKey.ExpiresAt(keyDuration), nullTime, rotatedKey.Sequence+1) }) t.Run("UnknownFeature", func(t *testing.T) { @@ -478,11 +483,11 @@ func Test_rotateKeys(t *testing.T) { keyDuration: keyDuration, clock: clock, logger: logger, - features: []database.CryptoKeyFeature{database.CryptoKeyFeatureWorkspaceApps}, + features: []database.CryptoKeyFeature{database.CryptoKeyFeatureWorkspaceAppsAPIKey}, } expiringKey := dbgen.CryptoKey(t, db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, StartsAt: now.Add(-keyDuration), Sequence: 345, }) @@ -522,19 +527,19 @@ func Test_rotateKeys(t *testing.T) { keyDuration: keyDuration, clock: clock, logger: logger, - features: []database.CryptoKeyFeature{database.CryptoKeyFeatureWorkspaceApps}, + features: []database.CryptoKeyFeature{database.CryptoKeyFeatureWorkspaceAppsAPIKey}, } now := dbnow(clock) expiredKey := dbgen.CryptoKey(t, db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, StartsAt: now.Add(-keyDuration - 2*time.Hour), Sequence: 19, }) deletedKey := dbgen.CryptoKey(t, db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, StartsAt: now, Sequence: 20, Secret: sql.NullString{ @@ -587,9 +592,11 @@ func requireKey(t *testing.T, key database.CryptoKey, feature database.CryptoKey require.NoError(t, err) switch key.Feature { - case database.CryptoKeyFeatureOidcConvert: + case database.CryptoKeyFeatureOIDCConvert: + require.Len(t, secret, 64) + case database.CryptoKeyFeatureWorkspaceAppsToken: require.Len(t, secret, 64) - case database.CryptoKeyFeatureWorkspaceApps: + case database.CryptoKeyFeatureWorkspaceAppsAPIKey: require.Len(t, secret, 32) case database.CryptoKeyFeatureTailnetResume: require.Len(t, secret, 64) diff --git a/coderd/cryptokeys/rotate_test.go b/coderd/cryptokeys/rotate_test.go index 190ad213b1153..9e147c8f921f0 100644 --- a/coderd/cryptokeys/rotate_test.go +++ b/coderd/cryptokeys/rotate_test.go @@ -34,8 +34,7 @@ func TestRotator(t *testing.T) { require.NoError(t, err) require.Len(t, dbkeys, 0) - err = cryptokeys.StartRotator(ctx, logger, db, cryptokeys.WithClock(clock)) - require.NoError(t, err) + cryptokeys.StartRotator(ctx, logger, db, cryptokeys.WithClock(clock)) // Fetch the keys from the database and ensure they // are as expected. @@ -58,7 +57,7 @@ func TestRotator(t *testing.T) { now := clock.Now().UTC() rotatingKey := dbgen.CryptoKey(t, db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, StartsAt: now.Add(-cryptokeys.DefaultKeyDuration + time.Hour + time.Minute), Sequence: 12345, }) @@ -66,8 +65,7 @@ func TestRotator(t *testing.T) { trap := clock.Trap().TickerFunc() t.Cleanup(trap.Close) - err := cryptokeys.StartRotator(ctx, logger, db, cryptokeys.WithClock(clock)) - require.NoError(t, err) + cryptokeys.StartRotator(ctx, logger, db, cryptokeys.WithClock(clock)) initialKeyLen := len(database.AllCryptoKeyFeatureValues()) // Fetch the keys from the database and ensure they @@ -85,7 +83,7 @@ func TestRotator(t *testing.T) { require.NoError(t, err) require.Len(t, keys, initialKeyLen+1) - newKey, err := db.GetLatestCryptoKeyByFeature(ctx, database.CryptoKeyFeatureWorkspaceApps) + newKey, err := db.GetLatestCryptoKeyByFeature(ctx, database.CryptoKeyFeatureWorkspaceAppsAPIKey) require.NoError(t, err) require.Equal(t, rotatingKey.Sequence+1, newKey.Sequence) require.Equal(t, rotatingKey.ExpiresAt(cryptokeys.DefaultKeyDuration), newKey.StartsAt.UTC()) diff --git a/coderd/database/dbauthz/dbauthz.go b/coderd/database/dbauthz/dbauthz.go index 46ccdd15933e8..ae6b307b3e7d3 100644 --- a/coderd/database/dbauthz/dbauthz.go +++ b/coderd/database/dbauthz/dbauthz.go @@ -228,6 +228,42 @@ var ( Scope: rbac.ScopeAll, }.WithCachedASTValue() + // See cryptokeys package. + subjectCryptoKeyRotator = rbac.Subject{ + FriendlyName: "Crypto Key Rotator", + ID: uuid.Nil.String(), + Roles: rbac.Roles([]rbac.Role{ + { + Identifier: rbac.RoleIdentifier{Name: "keyrotator"}, + DisplayName: "Key Rotator", + Site: rbac.Permissions(map[string][]policy.Action{ + rbac.ResourceCryptoKey.Type: {policy.WildcardSymbol}, + }), + Org: map[string][]rbac.Permission{}, + User: []rbac.Permission{}, + }, + }), + Scope: rbac.ScopeAll, + }.WithCachedASTValue() + + // See cryptokeys package. + subjectCryptoKeyReader = rbac.Subject{ + FriendlyName: "Crypto Key Reader", + ID: uuid.Nil.String(), + Roles: rbac.Roles([]rbac.Role{ + { + Identifier: rbac.RoleIdentifier{Name: "keyrotator"}, + DisplayName: "Key Rotator", + Site: rbac.Permissions(map[string][]policy.Action{ + rbac.ResourceCryptoKey.Type: {policy.WildcardSymbol}, + }), + Org: map[string][]rbac.Permission{}, + User: []rbac.Permission{}, + }, + }), + Scope: rbac.ScopeAll, + }.WithCachedASTValue() + subjectSystemRestricted = rbac.Subject{ FriendlyName: "System", ID: uuid.Nil.String(), @@ -281,6 +317,16 @@ func AsHangDetector(ctx context.Context) context.Context { return context.WithValue(ctx, authContextKey{}, subjectHangDetector) } +// AsKeyRotator returns a context with an actor that has permissions required for rotating crypto keys. +func AsKeyRotator(ctx context.Context) context.Context { + return context.WithValue(ctx, authContextKey{}, subjectCryptoKeyRotator) +} + +// AsKeyReader returns a context with an actor that has permissions required for reading crypto keys. +func AsKeyReader(ctx context.Context) context.Context { + return context.WithValue(ctx, authContextKey{}, subjectCryptoKeyReader) +} + // AsSystemRestricted returns a context with an actor that has permissions // required for various system operations (login, logout, metrics cache). func AsSystemRestricted(ctx context.Context) context.Context { diff --git a/coderd/database/dbauthz/dbauthz_test.go b/coderd/database/dbauthz/dbauthz_test.go index 6a34e88104ce1..439cf1bdaec19 100644 --- a/coderd/database/dbauthz/dbauthz_test.go +++ b/coderd/database/dbauthz/dbauthz_test.go @@ -2243,13 +2243,13 @@ func (s *MethodTestSuite) TestCryptoKeys() { })) s.Run("InsertCryptoKey", s.Subtest(func(db database.Store, check *expects) { check.Args(database.InsertCryptoKeyParams{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, }). Asserts(rbac.ResourceCryptoKey, policy.ActionCreate) })) s.Run("DeleteCryptoKey", s.Subtest(func(db database.Store, check *expects) { key := dbgen.CryptoKey(s.T(), db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, Sequence: 4, }) check.Args(database.DeleteCryptoKeyParams{ @@ -2259,7 +2259,7 @@ func (s *MethodTestSuite) TestCryptoKeys() { })) s.Run("GetCryptoKeyByFeatureAndSequence", s.Subtest(func(db database.Store, check *expects) { key := dbgen.CryptoKey(s.T(), db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, Sequence: 4, }) check.Args(database.GetCryptoKeyByFeatureAndSequenceParams{ @@ -2269,14 +2269,14 @@ func (s *MethodTestSuite) TestCryptoKeys() { })) s.Run("GetLatestCryptoKeyByFeature", s.Subtest(func(db database.Store, check *expects) { dbgen.CryptoKey(s.T(), db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, Sequence: 4, }) - check.Args(database.CryptoKeyFeatureWorkspaceApps).Asserts(rbac.ResourceCryptoKey, policy.ActionRead) + check.Args(database.CryptoKeyFeatureWorkspaceAppsAPIKey).Asserts(rbac.ResourceCryptoKey, policy.ActionRead) })) s.Run("UpdateCryptoKeyDeletesAt", s.Subtest(func(db database.Store, check *expects) { key := dbgen.CryptoKey(s.T(), db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, Sequence: 4, }) check.Args(database.UpdateCryptoKeyDeletesAtParams{ @@ -2286,7 +2286,7 @@ func (s *MethodTestSuite) TestCryptoKeys() { }).Asserts(rbac.ResourceCryptoKey, policy.ActionUpdate) })) s.Run("GetCryptoKeysByFeature", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.CryptoKeyFeatureWorkspaceApps). + check.Args(database.CryptoKeyFeatureWorkspaceAppsAPIKey). Asserts(rbac.ResourceCryptoKey, policy.ActionRead) })) } diff --git a/coderd/database/dbgen/dbgen.go b/coderd/database/dbgen/dbgen.go index 255c62f82aef2..69419b98c79b1 100644 --- a/coderd/database/dbgen/dbgen.go +++ b/coderd/database/dbgen/dbgen.go @@ -943,7 +943,7 @@ func CustomRole(t testing.TB, db database.Store, seed database.CustomRole) datab func CryptoKey(t testing.TB, db database.Store, seed database.CryptoKey) database.CryptoKey { t.Helper() - seed.Feature = takeFirst(seed.Feature, database.CryptoKeyFeatureWorkspaceApps) + seed.Feature = takeFirst(seed.Feature, database.CryptoKeyFeatureWorkspaceAppsAPIKey) // An empty string for the secret is interpreted as // a caller wanting a new secret to be generated. @@ -1048,9 +1048,11 @@ func takeFirst[Value comparable](values ...Value) Value { func newCryptoKeySecret(feature database.CryptoKeyFeature) (string, error) { switch feature { - case database.CryptoKeyFeatureWorkspaceApps: + case database.CryptoKeyFeatureWorkspaceAppsAPIKey: return generateCryptoKey(32) - case database.CryptoKeyFeatureOidcConvert: + case database.CryptoKeyFeatureWorkspaceAppsToken: + return generateCryptoKey(64) + case database.CryptoKeyFeatureOIDCConvert: return generateCryptoKey(64) case database.CryptoKeyFeatureTailnetResume: return generateCryptoKey(64) diff --git a/coderd/database/dump.sql b/coderd/database/dump.sql index 3a9a5a7a2d8f6..fc7819e38f218 100644 --- a/coderd/database/dump.sql +++ b/coderd/database/dump.sql @@ -38,7 +38,8 @@ CREATE TYPE build_reason AS ENUM ( ); CREATE TYPE crypto_key_feature AS ENUM ( - 'workspace_apps', + 'workspace_apps_token', + 'workspace_apps_api_key', 'oidc_convert', 'tailnet_resume' ); diff --git a/coderd/database/migrations/000271_cryptokey_features.down.sql b/coderd/database/migrations/000271_cryptokey_features.down.sql new file mode 100644 index 0000000000000..7cdd00d222da8 --- /dev/null +++ b/coderd/database/migrations/000271_cryptokey_features.down.sql @@ -0,0 +1,18 @@ +-- Step 1: Remove the new entries from crypto_keys table +DELETE FROM crypto_keys +WHERE feature IN ('workspace_apps_token', 'workspace_apps_api_key'); + +CREATE TYPE old_crypto_key_feature AS ENUM ( + 'workspace_apps', + 'oidc_convert', + 'tailnet_resume' +); + +ALTER TABLE crypto_keys + ALTER COLUMN feature TYPE old_crypto_key_feature + USING (feature::text::old_crypto_key_feature); + +DROP TYPE crypto_key_feature; + +ALTER TYPE old_crypto_key_feature RENAME TO crypto_key_feature; + diff --git a/coderd/database/migrations/000271_cryptokey_features.up.sql b/coderd/database/migrations/000271_cryptokey_features.up.sql new file mode 100644 index 0000000000000..bca75d220d0c7 --- /dev/null +++ b/coderd/database/migrations/000271_cryptokey_features.up.sql @@ -0,0 +1,18 @@ +-- Create a new enum type with the desired values +CREATE TYPE new_crypto_key_feature AS ENUM ( + 'workspace_apps_token', + 'workspace_apps_api_key', + 'oidc_convert', + 'tailnet_resume' +); + +DELETE FROM crypto_keys WHERE feature = 'workspace_apps'; + +-- Drop the old type and rename the new one +ALTER TABLE crypto_keys + ALTER COLUMN feature TYPE new_crypto_key_feature + USING (feature::text::new_crypto_key_feature); + +DROP TYPE crypto_key_feature; + +ALTER TYPE new_crypto_key_feature RENAME TO crypto_key_feature; diff --git a/coderd/database/migrations/testdata/fixtures/000271_cryptokey_features.up.sql b/coderd/database/migrations/testdata/fixtures/000271_cryptokey_features.up.sql new file mode 100644 index 0000000000000..5cb2cd4c95509 --- /dev/null +++ b/coderd/database/migrations/testdata/fixtures/000271_cryptokey_features.up.sql @@ -0,0 +1,40 @@ +INSERT INTO crypto_keys (feature, sequence, secret, secret_key_id, starts_at, deletes_at) +VALUES ( + 'workspace_apps_token', + 1, + 'abc', + NULL, + '1970-01-01 00:00:00 UTC'::timestamptz, + '2100-01-01 00:00:00 UTC'::timestamptz +); + +INSERT INTO crypto_keys (feature, sequence, secret, secret_key_id, starts_at, deletes_at) +VALUES ( + 'workspace_apps_api_key', + 1, + 'def', + NULL, + '1970-01-01 00:00:00 UTC'::timestamptz, + '2100-01-01 00:00:00 UTC'::timestamptz +); + +INSERT INTO crypto_keys (feature, sequence, secret, secret_key_id, starts_at, deletes_at) +VALUES ( + 'oidc_convert', + 2, + 'ghi', + NULL, + '1970-01-01 00:00:00 UTC'::timestamptz, + '2100-01-01 00:00:00 UTC'::timestamptz +); + +INSERT INTO crypto_keys (feature, sequence, secret, secret_key_id, starts_at, deletes_at) +VALUES ( + 'tailnet_resume', + 2, + 'jkl', + NULL, + '1970-01-01 00:00:00 UTC'::timestamptz, + '2100-01-01 00:00:00 UTC'::timestamptz +); + diff --git a/coderd/database/models.go b/coderd/database/models.go index 1207587d46529..e7d90acf5ea94 100644 --- a/coderd/database/models.go +++ b/coderd/database/models.go @@ -345,9 +345,10 @@ func AllBuildReasonValues() []BuildReason { type CryptoKeyFeature string const ( - CryptoKeyFeatureWorkspaceApps CryptoKeyFeature = "workspace_apps" - CryptoKeyFeatureOidcConvert CryptoKeyFeature = "oidc_convert" - CryptoKeyFeatureTailnetResume CryptoKeyFeature = "tailnet_resume" + CryptoKeyFeatureWorkspaceAppsToken CryptoKeyFeature = "workspace_apps_token" + CryptoKeyFeatureWorkspaceAppsAPIKey CryptoKeyFeature = "workspace_apps_api_key" + CryptoKeyFeatureOIDCConvert CryptoKeyFeature = "oidc_convert" + CryptoKeyFeatureTailnetResume CryptoKeyFeature = "tailnet_resume" ) func (e *CryptoKeyFeature) Scan(src interface{}) error { @@ -387,8 +388,9 @@ func (ns NullCryptoKeyFeature) Value() (driver.Value, error) { func (e CryptoKeyFeature) Valid() bool { switch e { - case CryptoKeyFeatureWorkspaceApps, - CryptoKeyFeatureOidcConvert, + case CryptoKeyFeatureWorkspaceAppsToken, + CryptoKeyFeatureWorkspaceAppsAPIKey, + CryptoKeyFeatureOIDCConvert, CryptoKeyFeatureTailnetResume: return true } @@ -397,8 +399,9 @@ func (e CryptoKeyFeature) Valid() bool { func AllCryptoKeyFeatureValues() []CryptoKeyFeature { return []CryptoKeyFeature{ - CryptoKeyFeatureWorkspaceApps, - CryptoKeyFeatureOidcConvert, + CryptoKeyFeatureWorkspaceAppsToken, + CryptoKeyFeatureWorkspaceAppsAPIKey, + CryptoKeyFeatureOIDCConvert, CryptoKeyFeatureTailnetResume, } } diff --git a/coderd/database/sqlc.yaml b/coderd/database/sqlc.yaml index a70e45a522989..257c95ddb2d7a 100644 --- a/coderd/database/sqlc.yaml +++ b/coderd/database/sqlc.yaml @@ -135,6 +135,8 @@ sql: api_key_id: APIKeyID callback_url: CallbackURL login_type_oauth2_provider_app: LoginTypeOAuth2ProviderApp + crypto_key_feature_workspace_apps_api_key: CryptoKeyFeatureWorkspaceAppsAPIKey + crypto_key_feature_oidc_convert: CryptoKeyFeatureOIDCConvert rules: - name: do-not-use-public-schema-in-queries message: "do not use public schema in queries" diff --git a/coderd/jwtutils/jwe.go b/coderd/jwtutils/jwe.go index d03816a477a26..bc9d0ddd2a9c8 100644 --- a/coderd/jwtutils/jwe.go +++ b/coderd/jwtutils/jwe.go @@ -65,6 +65,12 @@ func Encrypt(ctx context.Context, e EncryptKeyProvider, claims Claims) (string, return compact, nil } +func WithDecryptExpected(expected jwt.Expected) func(*DecryptOptions) { + return func(opts *DecryptOptions) { + opts.RegisteredClaims = expected + } +} + // DecryptOptions are options for decrypting a JWE. type DecryptOptions struct { RegisteredClaims jwt.Expected @@ -100,7 +106,7 @@ func Decrypt(ctx context.Context, d DecryptKeyProvider, token string, claims Cla kid := object.Header.KeyID if kid == "" { - return xerrors.Errorf("expected %q header to be a string", keyIDHeaderKey) + return ErrMissingKeyID } key, err := d.DecryptingKey(ctx, kid) diff --git a/coderd/jwtutils/jws.go b/coderd/jwtutils/jws.go index 73f35e672492d..0c8ca9aa30f39 100644 --- a/coderd/jwtutils/jws.go +++ b/coderd/jwtutils/jws.go @@ -10,10 +10,27 @@ import ( "golang.org/x/xerrors" ) +var ErrMissingKeyID = xerrors.New("missing key ID") + const ( keyIDHeaderKey = "kid" ) +// RegisteredClaims is a convenience type for embedding jwt.Claims. It should be +// preferred over embedding jwt.Claims directly since it will ensure that certain fields are set. +type RegisteredClaims jwt.Claims + +func (r RegisteredClaims) Validate(e jwt.Expected) error { + if r.Expiry == nil { + return xerrors.Errorf("expiry is required") + } + if e.Time.IsZero() { + return xerrors.Errorf("expected time is required") + } + + return (jwt.Claims(r)).Validate(e) +} + // Claims defines the payload for a JWT. Most callers // should embed jwt.Claims type Claims interface { @@ -24,6 +41,11 @@ const ( signingAlgo = jose.HS512 ) +type SigningKeyManager interface { + SigningKeyProvider + VerifyKeyProvider +} + type SigningKeyProvider interface { SigningKey(ctx context.Context) (id string, key interface{}, err error) } @@ -75,6 +97,12 @@ type VerifyOptions struct { SignatureAlgorithm jose.SignatureAlgorithm } +func WithVerifyExpected(expected jwt.Expected) func(*VerifyOptions) { + return func(opts *VerifyOptions) { + opts.RegisteredClaims = expected + } +} + // Verify verifies that a token was signed by the provided key. It unmarshals into the provided claims. func Verify(ctx context.Context, v VerifyKeyProvider, token string, claims Claims, opts ...func(*VerifyOptions)) error { options := VerifyOptions{ @@ -105,7 +133,7 @@ func Verify(ctx context.Context, v VerifyKeyProvider, token string, claims Claim kid := signature.Header.KeyID if kid == "" { - return xerrors.Errorf("expected %q header to be a string", keyIDHeaderKey) + return ErrMissingKeyID } key, err := v.VerifyingKey(ctx, kid) @@ -125,3 +153,35 @@ func Verify(ctx context.Context, v VerifyKeyProvider, token string, claims Claim return claims.Validate(options.RegisteredClaims) } + +// StaticKey fulfills the SigningKeycache and EncryptionKeycache interfaces. Useful for testing. +type StaticKey struct { + ID string + Key interface{} +} + +func (s StaticKey) SigningKey(_ context.Context) (string, interface{}, error) { + return s.ID, s.Key, nil +} + +func (s StaticKey) VerifyingKey(_ context.Context, id string) (interface{}, error) { + if id != s.ID { + return nil, xerrors.Errorf("invalid id %q", id) + } + return s.Key, nil +} + +func (s StaticKey) EncryptingKey(_ context.Context) (string, interface{}, error) { + return s.ID, s.Key, nil +} + +func (s StaticKey) DecryptingKey(_ context.Context, id string) (interface{}, error) { + if id != s.ID { + return nil, xerrors.Errorf("invalid id %q", id) + } + return s.Key, nil +} + +func (StaticKey) Close() error { + return nil +} diff --git a/coderd/jwtutils/jwt_test.go b/coderd/jwtutils/jwt_test.go index 697e5d210d858..5d1f4d48bdb4a 100644 --- a/coderd/jwtutils/jwt_test.go +++ b/coderd/jwtutils/jwt_test.go @@ -236,11 +236,11 @@ func TestJWS(t *testing.T) { ctx = testutil.Context(t, testutil.WaitShort) db, _ = dbtestutil.NewDB(t) _ = dbgen.CryptoKey(t, db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureOidcConvert, + Feature: database.CryptoKeyFeatureOIDCConvert, StartsAt: time.Now(), }) log = slogtest.Make(t, nil) - fetcher = &cryptokeys.DBFetcher{DB: db, Feature: database.CryptoKeyFeatureOidcConvert} + fetcher = &cryptokeys.DBFetcher{DB: db} ) cache, err := cryptokeys.NewSigningCache(ctx, log, fetcher, codersdk.CryptoKeyFeatureOIDCConvert) @@ -326,15 +326,15 @@ func TestJWE(t *testing.T) { ctx = testutil.Context(t, testutil.WaitShort) db, _ = dbtestutil.NewDB(t) _ = dbgen.CryptoKey(t, db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, StartsAt: time.Now(), }) log = slogtest.Make(t, nil) - fetcher = &cryptokeys.DBFetcher{DB: db, Feature: database.CryptoKeyFeatureWorkspaceApps} + fetcher = &cryptokeys.DBFetcher{DB: db} ) - cache, err := cryptokeys.NewEncryptionCache(ctx, log, fetcher, codersdk.CryptoKeyFeatureWorkspaceApp) + cache, err := cryptokeys.NewEncryptionCache(ctx, log, fetcher, codersdk.CryptoKeyFeatureWorkspaceAppsAPIKey) require.NoError(t, err) claims := testClaims{ diff --git a/coderd/userauth.go b/coderd/userauth.go index 85ab0d77e6cc1..f1a19d77d23d0 100644 --- a/coderd/userauth.go +++ b/coderd/userauth.go @@ -15,7 +15,8 @@ import ( "time" "github.com/coreos/go-oidc/v3/oidc" - "github.com/golang-jwt/jwt/v4" + "github.com/go-jose/go-jose/v4" + "github.com/go-jose/go-jose/v4/jwt" "github.com/google/go-github/v43/github" "github.com/google/uuid" "github.com/moby/moby/pkg/namesgenerator" @@ -23,6 +24,9 @@ import ( "golang.org/x/xerrors" "cdr.dev/slog" + "github.com/coder/coder/v2/coderd/cryptokeys" + "github.com/coder/coder/v2/coderd/idpsync" + "github.com/coder/coder/v2/coderd/jwtutils" "github.com/coder/coder/v2/coderd/apikey" "github.com/coder/coder/v2/coderd/audit" @@ -32,7 +36,6 @@ import ( "github.com/coder/coder/v2/coderd/externalauth" "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/coderd/httpmw" - "github.com/coder/coder/v2/coderd/idpsync" "github.com/coder/coder/v2/coderd/notifications" "github.com/coder/coder/v2/coderd/promoauth" "github.com/coder/coder/v2/coderd/rbac" @@ -49,7 +52,7 @@ const ( ) type OAuthConvertStateClaims struct { - jwt.RegisteredClaims + jwtutils.RegisteredClaims UserID uuid.UUID `json:"user_id"` State string `json:"state"` @@ -57,6 +60,10 @@ type OAuthConvertStateClaims struct { ToLoginType codersdk.LoginType `json:"to_login_type"` } +func (o *OAuthConvertStateClaims) Validate(e jwt.Expected) error { + return o.RegisteredClaims.Validate(e) +} + // postConvertLoginType replies with an oauth state token capable of converting // the user to an oauth user. // @@ -149,11 +156,11 @@ func (api *API) postConvertLoginType(rw http.ResponseWriter, r *http.Request) { // Eg: Developers with more than 1 deployment. now := time.Now() claims := &OAuthConvertStateClaims{ - RegisteredClaims: jwt.RegisteredClaims{ + RegisteredClaims: jwtutils.RegisteredClaims{ Issuer: api.DeploymentID, Subject: stateString, Audience: []string{user.ID.String()}, - ExpiresAt: jwt.NewNumericDate(now.Add(time.Minute * 5)), + Expiry: jwt.NewNumericDate(now.Add(time.Minute * 5)), NotBefore: jwt.NewNumericDate(now.Add(time.Second * -1)), IssuedAt: jwt.NewNumericDate(now), ID: uuid.NewString(), @@ -164,9 +171,7 @@ func (api *API) postConvertLoginType(rw http.ResponseWriter, r *http.Request) { ToLoginType: req.ToType, } - token := jwt.NewWithClaims(jwt.SigningMethodHS512, claims) - // Key must be a byte slice, not an array. So make sure to include the [:] - tokenString, err := token.SignedString(api.OAuthSigningKey[:]) + token, err := jwtutils.Sign(ctx, api.OIDCConvertKeyCache, claims) if err != nil { httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ Message: "Internal error signing state jwt.", @@ -176,8 +181,8 @@ func (api *API) postConvertLoginType(rw http.ResponseWriter, r *http.Request) { } aReq.New = database.AuditOAuthConvertState{ - CreatedAt: claims.IssuedAt.Time, - ExpiresAt: claims.ExpiresAt.Time, + CreatedAt: claims.IssuedAt.Time(), + ExpiresAt: claims.Expiry.Time(), FromLoginType: database.LoginType(claims.FromLoginType), ToLoginType: database.LoginType(claims.ToLoginType), UserID: claims.UserID, @@ -186,8 +191,8 @@ func (api *API) postConvertLoginType(rw http.ResponseWriter, r *http.Request) { http.SetCookie(rw, &http.Cookie{ Name: OAuthConvertCookieValue, Path: "/", - Value: tokenString, - Expires: claims.ExpiresAt.Time, + Value: token, + Expires: claims.Expiry.Time(), Secure: api.SecureAuthCookie, HttpOnly: true, // Must be SameSite to work on the redirected auth flow from the @@ -196,7 +201,7 @@ func (api *API) postConvertLoginType(rw http.ResponseWriter, r *http.Request) { }) httpapi.Write(ctx, rw, http.StatusCreated, codersdk.OAuthConversionResponse{ StateString: stateString, - ExpiresAt: claims.ExpiresAt.Time, + ExpiresAt: claims.Expiry.Time(), ToType: claims.ToLoginType, UserID: claims.UserID, }) @@ -1677,10 +1682,9 @@ func (api *API) convertUserToOauth(ctx context.Context, r *http.Request, db data } } var claims OAuthConvertStateClaims - token, err := jwt.ParseWithClaims(jwtCookie.Value, &claims, func(_ *jwt.Token) (interface{}, error) { - return api.OAuthSigningKey[:], nil - }) - if xerrors.Is(err, jwt.ErrSignatureInvalid) || !token.Valid { + + err = jwtutils.Verify(ctx, api.OIDCConvertKeyCache, jwtCookie.Value, &claims) + if xerrors.Is(err, cryptokeys.ErrKeyNotFound) || xerrors.Is(err, cryptokeys.ErrKeyInvalid) || xerrors.Is(err, jose.ErrCryptoFailure) || xerrors.Is(err, jwtutils.ErrMissingKeyID) { // These errors are probably because the user is mixing 2 coder deployments. return database.User{}, idpsync.HTTPError{ Code: http.StatusBadRequest, @@ -1709,7 +1713,7 @@ func (api *API) convertUserToOauth(ctx context.Context, r *http.Request, db data oauthConvertAudit.UserID = claims.UserID oauthConvertAudit.Old = user - if claims.RegisteredClaims.Issuer != api.DeploymentID { + if claims.Issuer != api.DeploymentID { return database.User{}, idpsync.HTTPError{ Code: http.StatusForbidden, Msg: "Request to convert login type failed. Issuer mismatch. Found a cookie from another coder deployment, please try again.", diff --git a/coderd/userauth_test.go b/coderd/userauth_test.go index 20dfe7f723899..6386be7eb8be4 100644 --- a/coderd/userauth_test.go +++ b/coderd/userauth_test.go @@ -3,6 +3,8 @@ package coderd_test import ( "context" "crypto" + "crypto/rand" + "encoding/json" "fmt" "io" "net/http" @@ -13,6 +15,7 @@ import ( "time" "github.com/coreos/go-oidc/v3/oidc" + "github.com/go-jose/go-jose/v4" "github.com/golang-jwt/jwt/v4" "github.com/google/go-github/v43/github" "github.com/google/uuid" @@ -27,10 +30,12 @@ import ( "github.com/coder/coder/v2/coderd/audit" "github.com/coder/coder/v2/coderd/coderdtest" "github.com/coder/coder/v2/coderd/coderdtest/oidctest" + "github.com/coder/coder/v2/coderd/cryptokeys" "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/dbauthz" "github.com/coder/coder/v2/coderd/database/dbgen" "github.com/coder/coder/v2/coderd/database/dbtestutil" + "github.com/coder/coder/v2/coderd/jwtutils" "github.com/coder/coder/v2/coderd/notifications" "github.com/coder/coder/v2/coderd/promoauth" "github.com/coder/coder/v2/codersdk" @@ -1316,6 +1321,7 @@ func TestUserOIDC(t *testing.T) { owner := coderdtest.CreateFirstUser(t, client) user, userData := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) + require.Equal(t, codersdk.LoginTypePassword, userData.LoginType) claims := jwt.MapClaims{ "email": userData.Email, @@ -1323,15 +1329,17 @@ func TestUserOIDC(t *testing.T) { var err error user.HTTPClient.Jar, err = cookiejar.New(nil) require.NoError(t, err) + user.HTTPClient.Transport = http.DefaultTransport.(*http.Transport).Clone() ctx := testutil.Context(t, testutil.WaitShort) + convertResponse, err := user.ConvertLoginType(ctx, codersdk.ConvertLoginRequest{ ToType: codersdk.LoginTypeOIDC, Password: "SomeSecurePassword!", }) require.NoError(t, err) - fake.LoginWithClient(t, user, claims, func(r *http.Request) { + _, _ = fake.LoginWithClient(t, user, claims, func(r *http.Request) { r.URL.RawQuery = url.Values{ "oidc_merge_state": {convertResponse.StateString}, }.Encode() @@ -1341,6 +1349,99 @@ func TestUserOIDC(t *testing.T) { r.AddCookie(cookie) } }) + + info, err := client.User(ctx, userData.ID.String()) + require.NoError(t, err) + require.Equal(t, codersdk.LoginTypeOIDC, info.LoginType) + }) + + t.Run("BadJWT", func(t *testing.T) { + t.Parallel() + + var ( + ctx = testutil.Context(t, testutil.WaitMedium) + logger = slogtest.Make(t, nil) + ) + + auditor := audit.NewMock() + fake := oidctest.NewFakeIDP(t, + oidctest.WithRefresh(func(_ string) error { + return xerrors.New("refreshing token should never occur") + }), + oidctest.WithServing(), + ) + cfg := fake.OIDCConfig(t, nil, func(cfg *coderd.OIDCConfig) { + cfg.AllowSignups = true + }) + + db, ps := dbtestutil.NewDB(t) + fetcher := &cryptokeys.DBFetcher{ + DB: db, + } + + kc, err := cryptokeys.NewSigningCache(ctx, logger, fetcher, codersdk.CryptoKeyFeatureOIDCConvert) + require.NoError(t, err) + + client := coderdtest.New(t, &coderdtest.Options{ + Auditor: auditor, + OIDCConfig: cfg, + Database: db, + Pubsub: ps, + OIDCConvertKeyCache: kc, + }) + + owner := coderdtest.CreateFirstUser(t, client) + user, userData := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) + + claims := jwt.MapClaims{ + "email": userData.Email, + } + user.HTTPClient.Jar, err = cookiejar.New(nil) + require.NoError(t, err) + user.HTTPClient.Transport = http.DefaultTransport.(*http.Transport).Clone() + + convertResponse, err := user.ConvertLoginType(ctx, codersdk.ConvertLoginRequest{ + ToType: codersdk.LoginTypeOIDC, + Password: "SomeSecurePassword!", + }) + require.NoError(t, err) + + // Update the cookie to use a bad signing key. We're asserting the behavior of the scenario + // where a JWT gets minted on an old version of Coder but gets verified on a new version. + _, resp := fake.AttemptLogin(t, user, claims, func(r *http.Request) { + r.URL.RawQuery = url.Values{ + "oidc_merge_state": {convertResponse.StateString}, + }.Encode() + r.Header.Set(codersdk.SessionTokenHeader, user.SessionToken()) + + cookies := user.HTTPClient.Jar.Cookies(user.URL) + for i, cookie := range cookies { + if cookie.Name != coderd.OAuthConvertCookieValue { + continue + } + + jwt := cookie.Value + var claims coderd.OAuthConvertStateClaims + err := jwtutils.Verify(ctx, kc, jwt, &claims) + require.NoError(t, err) + badJWT := generateBadJWT(t, claims) + cookie.Value = badJWT + cookies[i] = cookie + } + + user.HTTPClient.Jar.SetCookies(user.URL, cookies) + + for _, cookie := range cookies { + fmt.Printf("cookie: %+v\n", cookie) + r.AddCookie(cookie) + } + }) + defer resp.Body.Close() + require.Equal(t, http.StatusBadRequest, resp.StatusCode) + var respErr codersdk.Response + err = json.NewDecoder(resp.Body).Decode(&respErr) + require.NoError(t, err) + require.Contains(t, respErr.Message, "Using an invalid jwt to authorize this action.") }) t.Run("AlternateUsername", func(t *testing.T) { @@ -2022,3 +2123,24 @@ func inflateClaims(t testing.TB, seed jwt.MapClaims, size int) jwt.MapClaims { seed["random_data"] = junk return seed } + +// generateBadJWT generates a JWT with a random key. It's intended to emulate the old-style JWT's we generated. +func generateBadJWT(t *testing.T, claims interface{}) string { + t.Helper() + + var buf [64]byte + _, err := rand.Read(buf[:]) + require.NoError(t, err) + signer, err := jose.NewSigner(jose.SigningKey{ + Algorithm: jose.HS512, + Key: buf[:], + }, nil) + require.NoError(t, err) + payload, err := json.Marshal(claims) + require.NoError(t, err) + signed, err := signer.Sign(payload) + require.NoError(t, err) + compact, err := signed.CompactSerialize() + require.NoError(t, err) + return compact +} diff --git a/coderd/workspaceagents.go b/coderd/workspaceagents.go index 6ea631f2e7d0c..a181697f27279 100644 --- a/coderd/workspaceagents.go +++ b/coderd/workspaceagents.go @@ -32,6 +32,7 @@ import ( "github.com/coder/coder/v2/coderd/externalauth" "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/coderd/httpmw" + "github.com/coder/coder/v2/coderd/jwtutils" "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/codersdk/agentsdk" @@ -852,8 +853,12 @@ func (api *API) workspaceAgentClientCoordinate(rw http.ResponseWriter, r *http.R ) if resumeToken != "" { var err error - peerID, err = api.Options.CoordinatorResumeTokenProvider.VerifyResumeToken(resumeToken) - if err != nil { + peerID, err = api.Options.CoordinatorResumeTokenProvider.VerifyResumeToken(ctx, resumeToken) + // If the token is missing the key ID, it's probably an old token in which + // case we just want to generate a new peer ID. + if xerrors.Is(err, jwtutils.ErrMissingKeyID) { + peerID = uuid.New() + } else if err != nil { httpapi.Write(ctx, rw, http.StatusUnauthorized, codersdk.Response{ Message: workspacesdk.CoordinateAPIInvalidResumeToken, Detail: err.Error(), @@ -862,9 +867,10 @@ func (api *API) workspaceAgentClientCoordinate(rw http.ResponseWriter, r *http.R }, }) return + } else { + api.Logger.Debug(ctx, "accepted coordinate resume token for peer", + slog.F("peer_id", peerID.String())) } - api.Logger.Debug(ctx, "accepted coordinate resume token for peer", - slog.F("peer_id", peerID.String())) } api.WebsocketWaitMutex.Lock() diff --git a/coderd/workspaceagents_test.go b/coderd/workspaceagents_test.go index 8c0801a914d61..ba677975471d6 100644 --- a/coderd/workspaceagents_test.go +++ b/coderd/workspaceagents_test.go @@ -13,6 +13,7 @@ import ( "testing" "time" + "github.com/go-jose/go-jose/v4/jwt" "github.com/google/uuid" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -36,6 +37,7 @@ import ( "github.com/coder/coder/v2/coderd/database/dbtime" "github.com/coder/coder/v2/coderd/database/pubsub" "github.com/coder/coder/v2/coderd/externalauth" + "github.com/coder/coder/v2/coderd/jwtutils" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/codersdk/agentsdk" "github.com/coder/coder/v2/codersdk/workspacesdk" @@ -531,20 +533,20 @@ func newResumeTokenRecordingProvider(t testing.TB, underlying tailnet.ResumeToke } } -func (r *resumeTokenRecordingProvider) GenerateResumeToken(peerID uuid.UUID) (*tailnetproto.RefreshResumeTokenResponse, error) { +func (r *resumeTokenRecordingProvider) GenerateResumeToken(ctx context.Context, peerID uuid.UUID) (*tailnetproto.RefreshResumeTokenResponse, error) { select { case r.generateCalls <- peerID: - return r.ResumeTokenProvider.GenerateResumeToken(peerID) + return r.ResumeTokenProvider.GenerateResumeToken(ctx, peerID) default: r.t.Error("generateCalls full") return nil, xerrors.New("generateCalls full") } } -func (r *resumeTokenRecordingProvider) VerifyResumeToken(token string) (uuid.UUID, error) { +func (r *resumeTokenRecordingProvider) VerifyResumeToken(ctx context.Context, token string) (uuid.UUID, error) { select { case r.verifyCalls <- token: - return r.ResumeTokenProvider.VerifyResumeToken(token) + return r.ResumeTokenProvider.VerifyResumeToken(ctx, token) default: r.t.Error("verifyCalls full") return uuid.Nil, xerrors.New("verifyCalls full") @@ -554,69 +556,136 @@ func (r *resumeTokenRecordingProvider) VerifyResumeToken(token string) (uuid.UUI func TestWorkspaceAgentClientCoordinate_ResumeToken(t *testing.T) { t.Parallel() - logger := slogtest.Make(t, nil).Leveled(slog.LevelDebug) - clock := quartz.NewMock(t) - resumeTokenSigningKey, err := tailnet.GenerateResumeTokenSigningKey() - require.NoError(t, err) - resumeTokenProvider := newResumeTokenRecordingProvider( - t, - tailnet.NewResumeTokenKeyProvider(resumeTokenSigningKey, clock, time.Hour), - ) - client, closer, api := coderdtest.NewWithAPI(t, &coderdtest.Options{ - Coordinator: tailnet.NewCoordinator(logger), - CoordinatorResumeTokenProvider: resumeTokenProvider, + t.Run("OK", func(t *testing.T) { + t.Parallel() + + logger := slogtest.Make(t, nil).Leveled(slog.LevelDebug) + clock := quartz.NewMock(t) + resumeTokenSigningKey, err := tailnet.GenerateResumeTokenSigningKey() + mgr := jwtutils.StaticKey{ + ID: uuid.New().String(), + Key: resumeTokenSigningKey[:], + } + require.NoError(t, err) + resumeTokenProvider := newResumeTokenRecordingProvider( + t, + tailnet.NewResumeTokenKeyProvider(mgr, clock, time.Hour), + ) + client, closer, api := coderdtest.NewWithAPI(t, &coderdtest.Options{ + Coordinator: tailnet.NewCoordinator(logger), + CoordinatorResumeTokenProvider: resumeTokenProvider, + }) + defer closer.Close() + user := coderdtest.CreateFirstUser(t, client) + + // Create a workspace with an agent. No need to connect it since clients can + // still connect to the coordinator while the agent isn't connected. + r := dbfake.WorkspaceBuild(t, api.Database, database.WorkspaceTable{ + OrganizationID: user.OrganizationID, + OwnerID: user.UserID, + }).WithAgent().Do() + agentTokenUUID, err := uuid.Parse(r.AgentToken) + require.NoError(t, err) + ctx := testutil.Context(t, testutil.WaitLong) + agentAndBuild, err := api.Database.GetWorkspaceAgentAndLatestBuildByAuthToken(dbauthz.AsSystemRestricted(ctx), agentTokenUUID) //nolint + require.NoError(t, err) + + // Connect with no resume token, and ensure that the peer ID is set to a + // random value. + originalResumeToken, err := connectToCoordinatorAndFetchResumeToken(ctx, logger, client, agentAndBuild.WorkspaceAgent.ID, "") + require.NoError(t, err) + originalPeerID := testutil.RequireRecvCtx(ctx, t, resumeTokenProvider.generateCalls) + require.NotEqual(t, originalPeerID, uuid.Nil) + + // Connect with a valid resume token, and ensure that the peer ID is set to + // the stored value. + clock.Advance(time.Second) + newResumeToken, err := connectToCoordinatorAndFetchResumeToken(ctx, logger, client, agentAndBuild.WorkspaceAgent.ID, originalResumeToken) + require.NoError(t, err) + verifiedToken := testutil.RequireRecvCtx(ctx, t, resumeTokenProvider.verifyCalls) + require.Equal(t, originalResumeToken, verifiedToken) + newPeerID := testutil.RequireRecvCtx(ctx, t, resumeTokenProvider.generateCalls) + require.Equal(t, originalPeerID, newPeerID) + require.NotEqual(t, originalResumeToken, newResumeToken) + + // Connect with an invalid resume token, and ensure that the request is + // rejected. + clock.Advance(time.Second) + _, err = connectToCoordinatorAndFetchResumeToken(ctx, logger, client, agentAndBuild.WorkspaceAgent.ID, "invalid") + require.Error(t, err) + var sdkErr *codersdk.Error + require.ErrorAs(t, err, &sdkErr) + require.Equal(t, http.StatusUnauthorized, sdkErr.StatusCode()) + require.Len(t, sdkErr.Validations, 1) + require.Equal(t, "resume_token", sdkErr.Validations[0].Field) + verifiedToken = testutil.RequireRecvCtx(ctx, t, resumeTokenProvider.verifyCalls) + require.Equal(t, "invalid", verifiedToken) + + select { + case <-resumeTokenProvider.generateCalls: + t.Fatal("unexpected peer ID in channel") + default: + } }) - defer closer.Close() - user := coderdtest.CreateFirstUser(t, client) - // Create a workspace with an agent. No need to connect it since clients can - // still connect to the coordinator while the agent isn't connected. - r := dbfake.WorkspaceBuild(t, api.Database, database.WorkspaceTable{ - OrganizationID: user.OrganizationID, - OwnerID: user.UserID, - }).WithAgent().Do() - agentTokenUUID, err := uuid.Parse(r.AgentToken) - require.NoError(t, err) - ctx := testutil.Context(t, testutil.WaitLong) - agentAndBuild, err := api.Database.GetWorkspaceAgentAndLatestBuildByAuthToken(dbauthz.AsSystemRestricted(ctx), agentTokenUUID) //nolint - require.NoError(t, err) + t.Run("BadJWT", func(t *testing.T) { + t.Parallel() - // Connect with no resume token, and ensure that the peer ID is set to a - // random value. - originalResumeToken, err := connectToCoordinatorAndFetchResumeToken(ctx, logger, client, agentAndBuild.WorkspaceAgent.ID, "") - require.NoError(t, err) - originalPeerID := testutil.RequireRecvCtx(ctx, t, resumeTokenProvider.generateCalls) - require.NotEqual(t, originalPeerID, uuid.Nil) + logger := slogtest.Make(t, nil).Leveled(slog.LevelDebug) + clock := quartz.NewMock(t) + resumeTokenSigningKey, err := tailnet.GenerateResumeTokenSigningKey() + mgr := jwtutils.StaticKey{ + ID: uuid.New().String(), + Key: resumeTokenSigningKey[:], + } + require.NoError(t, err) + resumeTokenProvider := newResumeTokenRecordingProvider( + t, + tailnet.NewResumeTokenKeyProvider(mgr, clock, time.Hour), + ) + client, closer, api := coderdtest.NewWithAPI(t, &coderdtest.Options{ + Coordinator: tailnet.NewCoordinator(logger), + CoordinatorResumeTokenProvider: resumeTokenProvider, + }) + defer closer.Close() + user := coderdtest.CreateFirstUser(t, client) - // Connect with a valid resume token, and ensure that the peer ID is set to - // the stored value. - clock.Advance(time.Second) - newResumeToken, err := connectToCoordinatorAndFetchResumeToken(ctx, logger, client, agentAndBuild.WorkspaceAgent.ID, originalResumeToken) - require.NoError(t, err) - verifiedToken := testutil.RequireRecvCtx(ctx, t, resumeTokenProvider.verifyCalls) - require.Equal(t, originalResumeToken, verifiedToken) - newPeerID := testutil.RequireRecvCtx(ctx, t, resumeTokenProvider.generateCalls) - require.Equal(t, originalPeerID, newPeerID) - require.NotEqual(t, originalResumeToken, newResumeToken) - - // Connect with an invalid resume token, and ensure that the request is - // rejected. - clock.Advance(time.Second) - _, err = connectToCoordinatorAndFetchResumeToken(ctx, logger, client, agentAndBuild.WorkspaceAgent.ID, "invalid") - require.Error(t, err) - var sdkErr *codersdk.Error - require.ErrorAs(t, err, &sdkErr) - require.Equal(t, http.StatusUnauthorized, sdkErr.StatusCode()) - require.Len(t, sdkErr.Validations, 1) - require.Equal(t, "resume_token", sdkErr.Validations[0].Field) - verifiedToken = testutil.RequireRecvCtx(ctx, t, resumeTokenProvider.verifyCalls) - require.Equal(t, "invalid", verifiedToken) + // Create a workspace with an agent. No need to connect it since clients can + // still connect to the coordinator while the agent isn't connected. + r := dbfake.WorkspaceBuild(t, api.Database, database.WorkspaceTable{ + OrganizationID: user.OrganizationID, + OwnerID: user.UserID, + }).WithAgent().Do() + agentTokenUUID, err := uuid.Parse(r.AgentToken) + require.NoError(t, err) + ctx := testutil.Context(t, testutil.WaitLong) + agentAndBuild, err := api.Database.GetWorkspaceAgentAndLatestBuildByAuthToken(dbauthz.AsSystemRestricted(ctx), agentTokenUUID) //nolint + require.NoError(t, err) - select { - case <-resumeTokenProvider.generateCalls: - t.Fatal("unexpected peer ID in channel") - default: - } + // Connect with no resume token, and ensure that the peer ID is set to a + // random value. + originalResumeToken, err := connectToCoordinatorAndFetchResumeToken(ctx, logger, client, agentAndBuild.WorkspaceAgent.ID, "") + require.NoError(t, err) + originalPeerID := testutil.RequireRecvCtx(ctx, t, resumeTokenProvider.generateCalls) + require.NotEqual(t, originalPeerID, uuid.Nil) + + // Connect with an outdated token, and ensure that the peer ID is set to a + // random value. We don't want to fail requests just because + // a user got unlucky during a deployment upgrade. + outdatedToken := generateBadJWT(t, jwtutils.RegisteredClaims{ + Subject: originalPeerID.String(), + Expiry: jwt.NewNumericDate(clock.Now().Add(time.Minute)), + }) + + clock.Advance(time.Second) + newResumeToken, err := connectToCoordinatorAndFetchResumeToken(ctx, logger, client, agentAndBuild.WorkspaceAgent.ID, outdatedToken) + require.NoError(t, err) + verifiedToken := testutil.RequireRecvCtx(ctx, t, resumeTokenProvider.verifyCalls) + require.Equal(t, outdatedToken, verifiedToken) + newPeerID := testutil.RequireRecvCtx(ctx, t, resumeTokenProvider.generateCalls) + require.NotEqual(t, originalPeerID, newPeerID) + require.NotEqual(t, originalResumeToken, newResumeToken) + }) } // connectToCoordinatorAndFetchResumeToken connects to the tailnet coordinator diff --git a/coderd/workspaceapps.go b/coderd/workspaceapps.go index d2fa11b9ea2ea..e264dbd80b58d 100644 --- a/coderd/workspaceapps.go +++ b/coderd/workspaceapps.go @@ -16,6 +16,7 @@ import ( "github.com/coder/coder/v2/coderd/database/dbtime" "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/coderd/httpmw" + "github.com/coder/coder/v2/coderd/jwtutils" "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/coderd/workspaceapps" "github.com/coder/coder/v2/coderd/workspaceapps/appurl" @@ -122,10 +123,11 @@ func (api *API) workspaceApplicationAuth(rw http.ResponseWriter, r *http.Request return } - // Encrypt the API key. - encryptedAPIKey, err := api.AppSecurityKey.EncryptAPIKey(workspaceapps.EncryptedAPIKeyPayload{ + payload := workspaceapps.EncryptedAPIKeyPayload{ APIKey: cookie.Value, - }) + } + payload.Fill(api.Clock.Now()) + encryptedAPIKey, err := jwtutils.Encrypt(ctx, api.AppEncryptionKeyCache, payload) if err != nil { httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ Message: "Failed to encrypt API key.", diff --git a/coderd/workspaceapps/apptest/apptest.go b/coderd/workspaceapps/apptest/apptest.go index 14adf2d61d362..c6e251806230d 100644 --- a/coderd/workspaceapps/apptest/apptest.go +++ b/coderd/workspaceapps/apptest/apptest.go @@ -3,6 +3,7 @@ package apptest import ( "bufio" "context" + "crypto/rand" "encoding/json" "fmt" "io" @@ -408,6 +409,67 @@ func Run(t *testing.T, appHostIsPrimary bool, factory DeploymentFactory) { require.Equal(t, http.StatusInternalServerError, resp.StatusCode) assertWorkspaceLastUsedAtNotUpdated(t, appDetails) }) + + t.Run("BadJWT", func(t *testing.T) { + t.Parallel() + + appDetails := setupProxyTest(t, nil) + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) + defer cancel() + + u := appDetails.PathAppURL(appDetails.Apps.Owner) + resp, err := requestWithRetries(ctx, t, appDetails.AppClient(t), http.MethodGet, u.String(), nil) + require.NoError(t, err) + defer resp.Body.Close() + body, err := io.ReadAll(resp.Body) + require.NoError(t, err) + require.Equal(t, proxyTestAppBody, string(body)) + require.Equal(t, http.StatusOK, resp.StatusCode) + + appTokenCookie := findCookie(resp.Cookies(), codersdk.SignedAppTokenCookie) + require.NotNil(t, appTokenCookie, "no signed app token cookie in response") + require.Equal(t, appTokenCookie.Path, u.Path, "incorrect path on app token cookie") + + object, err := jose.ParseSigned(appTokenCookie.Value) + require.NoError(t, err) + require.Len(t, object.Signatures, 1) + + // Parse the payload. + var tok workspaceapps.SignedToken + //nolint:gosec + err = json.Unmarshal(object.UnsafePayloadWithoutVerification(), &tok) + require.NoError(t, err) + + appTokenClient := appDetails.AppClient(t) + apiKey := appTokenClient.SessionToken() + appTokenClient.SetSessionToken("") + appTokenClient.HTTPClient.Jar, err = cookiejar.New(nil) + require.NoError(t, err) + // Sign the token with an old-style key. + appTokenCookie.Value = generateBadJWT(t, tok) + appTokenClient.HTTPClient.Jar.SetCookies(u, + []*http.Cookie{ + appTokenCookie, + { + Name: codersdk.PathAppSessionTokenCookie, + Value: apiKey, + }, + }, + ) + + resp, err = requestWithRetries(ctx, t, appTokenClient, http.MethodGet, u.String(), nil) + require.NoError(t, err) + defer resp.Body.Close() + body, err = io.ReadAll(resp.Body) + require.NoError(t, err) + require.Equal(t, proxyTestAppBody, string(body)) + require.Equal(t, http.StatusOK, resp.StatusCode) + assertWorkspaceLastUsedAtUpdated(t, appDetails) + + // Since the old token is invalid, the signed app token cookie should have a new value. + newTokenCookie := findCookie(resp.Cookies(), codersdk.SignedAppTokenCookie) + require.NotEqual(t, appTokenCookie.Value, newTokenCookie.Value) + }) }) t.Run("WorkspaceApplicationAuth", func(t *testing.T) { @@ -463,7 +525,7 @@ func Run(t *testing.T, appHostIsPrimary bool, factory DeploymentFactory) { appClient.SetSessionToken("") // Try to load the application without authentication. - u := c.appURL + u := *c.appURL u.Path = path.Join(u.Path, "/test") req, err := http.NewRequestWithContext(ctx, http.MethodGet, u.String(), nil) require.NoError(t, err) @@ -500,7 +562,7 @@ func Run(t *testing.T, appHostIsPrimary bool, factory DeploymentFactory) { // Copy the query parameters and then check equality. u.RawQuery = gotLocation.RawQuery - require.Equal(t, u, gotLocation) + require.Equal(t, u, *gotLocation) // Verify the API key is set. encryptedAPIKey := gotLocation.Query().Get(workspaceapps.SubdomainProxyAPIKeyParam) @@ -580,6 +642,38 @@ func Run(t *testing.T, appHostIsPrimary bool, factory DeploymentFactory) { resp.Body.Close() require.Equal(t, http.StatusOK, resp.StatusCode) }) + + t.Run("BadJWE", func(t *testing.T) { + t.Parallel() + + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) + defer cancel() + + currentKeyStr := appDetails.SDKClient.SessionToken() + appClient := appDetails.AppClient(t) + appClient.SetSessionToken("") + u := *c.appURL + u.Path = path.Join(u.Path, "/test") + badToken := generateBadJWE(t, workspaceapps.EncryptedAPIKeyPayload{ + APIKey: currentKeyStr, + }) + + u.RawQuery = (url.Values{ + workspaceapps.SubdomainProxyAPIKeyParam: {badToken}, + }).Encode() + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, u.String(), nil) + require.NoError(t, err) + + var resp *http.Response + resp, err = doWithRetries(t, appClient, req) + require.NoError(t, err) + defer resp.Body.Close() + require.Equal(t, http.StatusBadRequest, resp.StatusCode) + body, err := io.ReadAll(resp.Body) + require.NoError(t, err) + require.Contains(t, string(body), "Could not decrypt API key. Please remove the query parameter and try again.") + }) } }) }) @@ -1077,6 +1171,68 @@ func Run(t *testing.T, appHostIsPrimary bool, factory DeploymentFactory) { assertWorkspaceLastUsedAtNotUpdated(t, appDetails) }) }) + + t.Run("BadJWT", func(t *testing.T) { + t.Parallel() + + appDetails := setupProxyTest(t, nil) + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) + defer cancel() + + u := appDetails.SubdomainAppURL(appDetails.Apps.Owner) + resp, err := requestWithRetries(ctx, t, appDetails.AppClient(t), http.MethodGet, u.String(), nil) + require.NoError(t, err) + defer resp.Body.Close() + body, err := io.ReadAll(resp.Body) + require.NoError(t, err) + require.Equal(t, proxyTestAppBody, string(body)) + require.Equal(t, http.StatusOK, resp.StatusCode) + + appTokenCookie := findCookie(resp.Cookies(), codersdk.SignedAppTokenCookie) + require.NotNil(t, appTokenCookie, "no signed token cookie in response") + require.Equal(t, appTokenCookie.Path, "/", "incorrect path on signed token cookie") + + object, err := jose.ParseSigned(appTokenCookie.Value) + require.NoError(t, err) + require.Len(t, object.Signatures, 1) + + // Parse the payload. + var tok workspaceapps.SignedToken + //nolint:gosec + err = json.Unmarshal(object.UnsafePayloadWithoutVerification(), &tok) + require.NoError(t, err) + + appTokenClient := appDetails.AppClient(t) + apiKey := appTokenClient.SessionToken() + appTokenClient.SetSessionToken("") + appTokenClient.HTTPClient.Jar, err = cookiejar.New(nil) + require.NoError(t, err) + // Sign the token with an old-style key. + appTokenCookie.Value = generateBadJWT(t, tok) + appTokenClient.HTTPClient.Jar.SetCookies(u, + []*http.Cookie{ + appTokenCookie, + { + Name: codersdk.SubdomainAppSessionTokenCookie, + Value: apiKey, + }, + }, + ) + + // We should still be able to successfully proxy. + resp, err = requestWithRetries(ctx, t, appTokenClient, http.MethodGet, u.String(), nil) + require.NoError(t, err) + defer resp.Body.Close() + body, err = io.ReadAll(resp.Body) + require.NoError(t, err) + require.Equal(t, proxyTestAppBody, string(body)) + require.Equal(t, http.StatusOK, resp.StatusCode) + assertWorkspaceLastUsedAtUpdated(t, appDetails) + + // Since the old token is invalid, the signed app token cookie should have a new value. + newTokenCookie := findCookie(resp.Cookies(), codersdk.SignedAppTokenCookie) + require.NotEqual(t, appTokenCookie.Value, newTokenCookie.Value) + }) }) t.Run("PortSharing", func(t *testing.T) { @@ -1789,3 +1945,57 @@ func assertWorkspaceLastUsedAtNotUpdated(t testing.TB, details *Details) { require.NoError(t, err) require.Equal(t, before.LastUsedAt, after.LastUsedAt, "workspace LastUsedAt updated when it should not have been") } + +func generateBadJWE(t *testing.T, claims interface{}) string { + t.Helper() + var buf [32]byte + _, err := rand.Read(buf[:]) + require.NoError(t, err) + encrypt, err := jose.NewEncrypter( + jose.A256GCM, + jose.Recipient{ + Algorithm: jose.A256GCMKW, + Key: buf[:], + }, &jose.EncrypterOptions{ + Compression: jose.DEFLATE, + }, + ) + require.NoError(t, err) + payload, err := json.Marshal(claims) + require.NoError(t, err) + signed, err := encrypt.Encrypt(payload) + require.NoError(t, err) + compact, err := signed.CompactSerialize() + require.NoError(t, err) + return compact +} + +// generateBadJWT generates a JWT with a random key. It's intended to emulate the old-style JWT's we generated. +func generateBadJWT(t *testing.T, claims interface{}) string { + t.Helper() + + var buf [64]byte + _, err := rand.Read(buf[:]) + require.NoError(t, err) + signer, err := jose.NewSigner(jose.SigningKey{ + Algorithm: jose.HS512, + Key: buf[:], + }, nil) + require.NoError(t, err) + payload, err := json.Marshal(claims) + require.NoError(t, err) + signed, err := signer.Sign(payload) + require.NoError(t, err) + compact, err := signed.CompactSerialize() + require.NoError(t, err) + return compact +} + +func findCookie(cookies []*http.Cookie, name string) *http.Cookie { + for _, cookie := range cookies { + if cookie.Name == name { + return cookie + } + } + return nil +} diff --git a/coderd/workspaceapps/db.go b/coderd/workspaceapps/db.go index 1b369cf6d6ef4..1aa4dfe91bdd0 100644 --- a/coderd/workspaceapps/db.go +++ b/coderd/workspaceapps/db.go @@ -13,11 +13,15 @@ import ( "golang.org/x/exp/slices" "golang.org/x/xerrors" + "github.com/go-jose/go-jose/v4/jwt" + "cdr.dev/slog" + "github.com/coder/coder/v2/coderd/cryptokeys" "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/dbauthz" "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/coderd/httpmw" + "github.com/coder/coder/v2/coderd/jwtutils" "github.com/coder/coder/v2/coderd/rbac" "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/codersdk" @@ -35,12 +39,20 @@ type DBTokenProvider struct { DeploymentValues *codersdk.DeploymentValues OAuth2Configs *httpmw.OAuth2Configs WorkspaceAgentInactiveTimeout time.Duration - SigningKey SecurityKey + Keycache cryptokeys.SigningKeycache } var _ SignedTokenProvider = &DBTokenProvider{} -func NewDBTokenProvider(log slog.Logger, accessURL *url.URL, authz rbac.Authorizer, db database.Store, cfg *codersdk.DeploymentValues, oauth2Cfgs *httpmw.OAuth2Configs, workspaceAgentInactiveTimeout time.Duration, signingKey SecurityKey) SignedTokenProvider { +func NewDBTokenProvider(log slog.Logger, + accessURL *url.URL, + authz rbac.Authorizer, + db database.Store, + cfg *codersdk.DeploymentValues, + oauth2Cfgs *httpmw.OAuth2Configs, + workspaceAgentInactiveTimeout time.Duration, + signer cryptokeys.SigningKeycache, +) SignedTokenProvider { if workspaceAgentInactiveTimeout == 0 { workspaceAgentInactiveTimeout = 1 * time.Minute } @@ -53,12 +65,12 @@ func NewDBTokenProvider(log slog.Logger, accessURL *url.URL, authz rbac.Authoriz DeploymentValues: cfg, OAuth2Configs: oauth2Cfgs, WorkspaceAgentInactiveTimeout: workspaceAgentInactiveTimeout, - SigningKey: signingKey, + Keycache: signer, } } func (p *DBTokenProvider) FromRequest(r *http.Request) (*SignedToken, bool) { - return FromRequest(r, p.SigningKey) + return FromRequest(r, p.Keycache) } func (p *DBTokenProvider) Issue(ctx context.Context, rw http.ResponseWriter, r *http.Request, issueReq IssueTokenRequest) (*SignedToken, string, bool) { @@ -70,7 +82,7 @@ func (p *DBTokenProvider) Issue(ctx context.Context, rw http.ResponseWriter, r * dangerousSystemCtx := dbauthz.AsSystemRestricted(ctx) appReq := issueReq.AppRequest.Normalize() - err := appReq.Validate() + err := appReq.Check() if err != nil { WriteWorkspaceApp500(p.Logger, p.DashboardURL, rw, r, &appReq, err, "invalid app request") return nil, "", false @@ -210,9 +222,11 @@ func (p *DBTokenProvider) Issue(ctx context.Context, rw http.ResponseWriter, r * return nil, "", false } + token.RegisteredClaims = jwtutils.RegisteredClaims{ + Expiry: jwt.NewNumericDate(time.Now().Add(DefaultTokenExpiry)), + } // Sign the token. - token.Expiry = time.Now().Add(DefaultTokenExpiry) - tokenStr, err := p.SigningKey.SignToken(token) + tokenStr, err := jwtutils.Sign(ctx, p.Keycache, token) if err != nil { WriteWorkspaceApp500(p.Logger, p.DashboardURL, rw, r, &appReq, err, "generate token") return nil, "", false diff --git a/coderd/workspaceapps/db_test.go b/coderd/workspaceapps/db_test.go index 6c5a0212aff2b..bf364f1ce62b3 100644 --- a/coderd/workspaceapps/db_test.go +++ b/coderd/workspaceapps/db_test.go @@ -13,6 +13,7 @@ import ( "testing" "time" + "github.com/go-jose/go-jose/v4/jwt" "github.com/google/uuid" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -20,6 +21,7 @@ import ( "github.com/coder/coder/v2/agent/agenttest" "github.com/coder/coder/v2/coderd/coderdtest" "github.com/coder/coder/v2/coderd/httpmw" + "github.com/coder/coder/v2/coderd/jwtutils" "github.com/coder/coder/v2/coderd/workspaceapps" "github.com/coder/coder/v2/coderd/workspaceapps/appurl" "github.com/coder/coder/v2/codersdk" @@ -94,8 +96,7 @@ func Test_ResolveRequest(t *testing.T) { _ = closer.Close() }) - ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitMedium) - t.Cleanup(cancel) + ctx := testutil.Context(t, testutil.WaitMedium) firstUser := coderdtest.CreateFirstUser(t, client) me, err := client.User(ctx, codersdk.Me) @@ -276,15 +277,17 @@ func Test_ResolveRequest(t *testing.T) { _ = w.Body.Close() require.Equal(t, &workspaceapps.SignedToken{ + RegisteredClaims: jwtutils.RegisteredClaims{ + Expiry: jwt.NewNumericDate(token.Expiry.Time()), + }, Request: req, - Expiry: token.Expiry, // ignored to avoid flakiness UserID: me.ID, WorkspaceID: workspace.ID, AgentID: agentID, AppURL: appURL, }, token) require.NotZero(t, token.Expiry) - require.WithinDuration(t, time.Now().Add(workspaceapps.DefaultTokenExpiry), token.Expiry, time.Minute) + require.WithinDuration(t, time.Now().Add(workspaceapps.DefaultTokenExpiry), token.Expiry.Time(), time.Minute) // Check that the token was set in the response and is valid. require.Len(t, w.Cookies(), 1) @@ -292,10 +295,11 @@ func Test_ResolveRequest(t *testing.T) { require.Equal(t, codersdk.SignedAppTokenCookie, cookie.Name) require.Equal(t, req.BasePath, cookie.Path) - parsedToken, err := api.AppSecurityKey.VerifySignedToken(cookie.Value) + var parsedToken workspaceapps.SignedToken + err := jwtutils.Verify(ctx, api.AppSigningKeyCache, cookie.Value, &parsedToken) require.NoError(t, err) // normalize expiry - require.WithinDuration(t, token.Expiry, parsedToken.Expiry, 2*time.Second) + require.WithinDuration(t, token.Expiry.Time(), parsedToken.Expiry.Time(), 2*time.Second) parsedToken.Expiry = token.Expiry require.Equal(t, token, &parsedToken) @@ -314,7 +318,7 @@ func Test_ResolveRequest(t *testing.T) { }) require.True(t, ok) // normalize expiry - require.WithinDuration(t, token.Expiry, secondToken.Expiry, 2*time.Second) + require.WithinDuration(t, token.Expiry.Time(), secondToken.Expiry.Time(), 2*time.Second) secondToken.Expiry = token.Expiry require.Equal(t, token, secondToken) } @@ -540,13 +544,16 @@ func Test_ResolveRequest(t *testing.T) { // App name differs AppSlugOrPort: appNamePublic, }).Normalize(), - Expiry: time.Now().Add(time.Minute), + RegisteredClaims: jwtutils.RegisteredClaims{ + Expiry: jwt.NewNumericDate(time.Now().Add(time.Minute)), + }, UserID: me.ID, WorkspaceID: workspace.ID, AgentID: agentID, AppURL: appURL, } - badTokenStr, err := api.AppSecurityKey.SignToken(badToken) + + badTokenStr, err := jwtutils.Sign(ctx, api.AppSigningKeyCache, badToken) require.NoError(t, err) req := (workspaceapps.Request{ @@ -589,7 +596,8 @@ func Test_ResolveRequest(t *testing.T) { require.Len(t, cookies, 1) require.Equal(t, cookies[0].Name, codersdk.SignedAppTokenCookie) require.NotEqual(t, cookies[0].Value, badTokenStr) - parsedToken, err := api.AppSecurityKey.VerifySignedToken(cookies[0].Value) + var parsedToken workspaceapps.SignedToken + err = jwtutils.Verify(ctx, api.AppSigningKeyCache, cookies[0].Value, &parsedToken) require.NoError(t, err) require.Equal(t, appNameOwner, parsedToken.AppSlugOrPort) }) diff --git a/coderd/workspaceapps/provider.go b/coderd/workspaceapps/provider.go index 8d4b7fd149800..1887036e35cbf 100644 --- a/coderd/workspaceapps/provider.go +++ b/coderd/workspaceapps/provider.go @@ -38,7 +38,7 @@ type ResolveRequestOptions struct { func ResolveRequest(rw http.ResponseWriter, r *http.Request, opts ResolveRequestOptions) (*SignedToken, bool) { appReq := opts.AppRequest.Normalize() - err := appReq.Validate() + err := appReq.Check() if err != nil { // This is a 500 since it's a coder server or proxy that's making this // request struct based on details from the request. The values should @@ -79,7 +79,7 @@ func ResolveRequest(rw http.ResponseWriter, r *http.Request, opts ResolveRequest Name: codersdk.SignedAppTokenCookie, Value: tokenStr, Path: appReq.BasePath, - Expires: token.Expiry, + Expires: token.Expiry.Time(), }) return token, true diff --git a/coderd/workspaceapps/proxy.go b/coderd/workspaceapps/proxy.go index c6cd01395db5c..a9c60357a009d 100644 --- a/coderd/workspaceapps/proxy.go +++ b/coderd/workspaceapps/proxy.go @@ -11,17 +11,21 @@ import ( "strconv" "strings" "sync" + "time" "github.com/go-chi/chi/v5" + "github.com/go-jose/go-jose/v4/jwt" "github.com/google/uuid" "go.opentelemetry.io/otel/trace" "nhooyr.io/websocket" "cdr.dev/slog" "github.com/coder/coder/v2/agent/agentssh" + "github.com/coder/coder/v2/coderd/cryptokeys" "github.com/coder/coder/v2/coderd/database/dbtime" "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/coderd/httpmw" + "github.com/coder/coder/v2/coderd/jwtutils" "github.com/coder/coder/v2/coderd/tracing" "github.com/coder/coder/v2/coderd/util/slice" "github.com/coder/coder/v2/coderd/workspaceapps/appurl" @@ -97,8 +101,8 @@ type Server struct { HostnameRegex *regexp.Regexp RealIPConfig *httpmw.RealIPConfig - SignedTokenProvider SignedTokenProvider - AppSecurityKey SecurityKey + SignedTokenProvider SignedTokenProvider + APIKeyEncryptionKeycache cryptokeys.EncryptionKeycache // DisablePathApps disables path-based apps. This is a security feature as path // based apps share the same cookie as the dashboard, and are susceptible to XSS @@ -176,7 +180,10 @@ func (s *Server) handleAPIKeySmuggling(rw http.ResponseWriter, r *http.Request, } // Exchange the encoded API key for a real one. - token, err := s.AppSecurityKey.DecryptAPIKey(encryptedAPIKey) + var payload EncryptedAPIKeyPayload + err := jwtutils.Decrypt(ctx, s.APIKeyEncryptionKeycache, encryptedAPIKey, &payload, jwtutils.WithDecryptExpected(jwt.Expected{ + Time: time.Now(), + })) if err != nil { s.Logger.Debug(ctx, "could not decrypt smuggled workspace app API key", slog.Error(err)) site.RenderStaticErrorPage(rw, r, site.ErrorPageData{ @@ -225,7 +232,7 @@ func (s *Server) handleAPIKeySmuggling(rw http.ResponseWriter, r *http.Request, // server using the wrong value. http.SetCookie(rw, &http.Cookie{ Name: AppConnectSessionTokenCookieName(accessMethod), - Value: token, + Value: payload.APIKey, Domain: domain, Path: "/", MaxAge: 0, diff --git a/coderd/workspaceapps/request.go b/coderd/workspaceapps/request.go index 4f6a6f3a64e65..0833ab731fe67 100644 --- a/coderd/workspaceapps/request.go +++ b/coderd/workspaceapps/request.go @@ -124,9 +124,9 @@ func (r Request) Normalize() Request { return req } -// Validate ensures the request is correct and contains the necessary +// Check ensures the request is correct and contains the necessary // parameters. -func (r Request) Validate() error { +func (r Request) Check() error { switch r.AccessMethod { case AccessMethodPath, AccessMethodSubdomain, AccessMethodTerminal: default: diff --git a/coderd/workspaceapps/request_test.go b/coderd/workspaceapps/request_test.go index b6e4bb7a2e65f..fbabc840745e9 100644 --- a/coderd/workspaceapps/request_test.go +++ b/coderd/workspaceapps/request_test.go @@ -279,7 +279,7 @@ func Test_RequestValidate(t *testing.T) { if !c.noNormalize { req = c.req.Normalize() } - err := req.Validate() + err := req.Check() if c.errContains == "" { require.NoError(t, err) } else { diff --git a/coderd/workspaceapps/token.go b/coderd/workspaceapps/token.go index 33428b0e25f13..dcd8c5a0e5c34 100644 --- a/coderd/workspaceapps/token.go +++ b/coderd/workspaceapps/token.go @@ -1,35 +1,27 @@ package workspaceapps import ( - "encoding/base64" - "encoding/hex" - "encoding/json" "net/http" "strings" "time" - "github.com/go-jose/go-jose/v3" + "github.com/go-jose/go-jose/v4/jwt" "github.com/google/uuid" "golang.org/x/xerrors" - "github.com/coder/coder/v2/coderd/database/dbtime" + "github.com/coder/coder/v2/coderd/cryptokeys" + "github.com/coder/coder/v2/coderd/jwtutils" "github.com/coder/coder/v2/codersdk" ) -const ( - tokenSigningAlgorithm = jose.HS512 - apiKeyEncryptionAlgorithm = jose.A256GCMKW -) - // SignedToken is the struct data contained inside a workspace app JWE. It // contains the details of the workspace app that the token is valid for to // avoid database queries. type SignedToken struct { + jwtutils.RegisteredClaims // Request details. Request `json:"request"` - // Trusted resolved details. - Expiry time.Time `json:"expiry"` // set by GenerateToken if unset UserID uuid.UUID `json:"user_id"` WorkspaceID uuid.UUID `json:"workspace_id"` AgentID uuid.UUID `json:"agent_id"` @@ -57,191 +49,32 @@ func (t SignedToken) MatchesRequest(req Request) bool { t.AppSlugOrPort == req.AppSlugOrPort } -// SecurityKey is used for signing and encrypting app tokens and API keys. -// -// The first 64 bytes of the key are used for signing tokens with HMAC-SHA256, -// and the last 32 bytes are used for encrypting API keys with AES-256-GCM. -// We use a single key for both operations to avoid having to store and manage -// two keys. -type SecurityKey [96]byte - -func (k SecurityKey) IsZero() bool { - return k == SecurityKey{} -} - -func (k SecurityKey) String() string { - return hex.EncodeToString(k[:]) -} - -func (k SecurityKey) signingKey() []byte { - return k[:64] -} - -func (k SecurityKey) encryptionKey() []byte { - return k[64:] -} - -func KeyFromString(str string) (SecurityKey, error) { - var key SecurityKey - decoded, err := hex.DecodeString(str) - if err != nil { - return key, xerrors.Errorf("decode key: %w", err) - } - if len(decoded) != len(key) { - return key, xerrors.Errorf("expected key to be %d bytes, got %d", len(key), len(decoded)) - } - copy(key[:], decoded) - - return key, nil -} - -// SignToken generates a signed workspace app token with the given payload. If -// the payload doesn't have an expiry, it will be set to the current time plus -// the default expiry. -func (k SecurityKey) SignToken(payload SignedToken) (string, error) { - if payload.Expiry.IsZero() { - payload.Expiry = time.Now().Add(DefaultTokenExpiry) - } - payloadBytes, err := json.Marshal(payload) - if err != nil { - return "", xerrors.Errorf("marshal payload to JSON: %w", err) - } - - signer, err := jose.NewSigner(jose.SigningKey{ - Algorithm: tokenSigningAlgorithm, - Key: k.signingKey(), - }, nil) - if err != nil { - return "", xerrors.Errorf("create signer: %w", err) - } - - signedObject, err := signer.Sign(payloadBytes) - if err != nil { - return "", xerrors.Errorf("sign payload: %w", err) - } - - serialized, err := signedObject.CompactSerialize() - if err != nil { - return "", xerrors.Errorf("serialize JWS: %w", err) - } - - return serialized, nil -} - -// VerifySignedToken parses a signed workspace app token with the given key and -// returns the payload. If the token is invalid or expired, an error is -// returned. -func (k SecurityKey) VerifySignedToken(str string) (SignedToken, error) { - object, err := jose.ParseSigned(str) - if err != nil { - return SignedToken{}, xerrors.Errorf("parse JWS: %w", err) - } - if len(object.Signatures) != 1 { - return SignedToken{}, xerrors.New("expected 1 signature") - } - if object.Signatures[0].Header.Algorithm != string(tokenSigningAlgorithm) { - return SignedToken{}, xerrors.Errorf("expected token signing algorithm to be %q, got %q", tokenSigningAlgorithm, object.Signatures[0].Header.Algorithm) - } - - output, err := object.Verify(k.signingKey()) - if err != nil { - return SignedToken{}, xerrors.Errorf("verify JWS: %w", err) - } - - var tok SignedToken - err = json.Unmarshal(output, &tok) - if err != nil { - return SignedToken{}, xerrors.Errorf("unmarshal payload: %w", err) - } - if tok.Expiry.Before(time.Now()) { - return SignedToken{}, xerrors.New("signed app token expired") - } - - return tok, nil -} - type EncryptedAPIKeyPayload struct { - APIKey string `json:"api_key"` - ExpiresAt time.Time `json:"expires_at"` + jwtutils.RegisteredClaims + APIKey string `json:"api_key"` } -// EncryptAPIKey encrypts an API key for subdomain token smuggling. -func (k SecurityKey) EncryptAPIKey(payload EncryptedAPIKeyPayload) (string, error) { - if payload.APIKey == "" { - return "", xerrors.New("API key is empty") - } - if payload.ExpiresAt.IsZero() { - // Very short expiry as these keys are only used once as part of an - // automatic redirection flow. - payload.ExpiresAt = dbtime.Now().Add(time.Minute) - } - - payloadBytes, err := json.Marshal(payload) - if err != nil { - return "", xerrors.Errorf("marshal payload: %w", err) - } - - // JWEs seem to apply a nonce themselves. - encrypter, err := jose.NewEncrypter( - jose.A256GCM, - jose.Recipient{ - Algorithm: apiKeyEncryptionAlgorithm, - Key: k.encryptionKey(), - }, - &jose.EncrypterOptions{ - Compression: jose.DEFLATE, - }, - ) - if err != nil { - return "", xerrors.Errorf("initializer jose encrypter: %w", err) - } - encryptedObject, err := encrypter.Encrypt(payloadBytes) - if err != nil { - return "", xerrors.Errorf("encrypt jwe: %w", err) - } - - encrypted := encryptedObject.FullSerialize() - return base64.RawURLEncoding.EncodeToString([]byte(encrypted)), nil +func (e *EncryptedAPIKeyPayload) Fill(now time.Time) { + e.Issuer = "coderd" + e.Audience = jwt.Audience{"wsproxy"} + e.Expiry = jwt.NewNumericDate(now.Add(time.Minute)) + e.NotBefore = jwt.NewNumericDate(now.Add(-time.Minute)) } -// DecryptAPIKey undoes EncryptAPIKey and is used in the subdomain app handler. -func (k SecurityKey) DecryptAPIKey(encryptedAPIKey string) (string, error) { - encrypted, err := base64.RawURLEncoding.DecodeString(encryptedAPIKey) - if err != nil { - return "", xerrors.Errorf("base64 decode encrypted API key: %w", err) +func (e EncryptedAPIKeyPayload) Validate(ex jwt.Expected) error { + if e.NotBefore == nil { + return xerrors.Errorf("not before is required") } - object, err := jose.ParseEncrypted(string(encrypted)) - if err != nil { - return "", xerrors.Errorf("parse encrypted API key: %w", err) - } - if object.Header.Algorithm != string(apiKeyEncryptionAlgorithm) { - return "", xerrors.Errorf("expected API key encryption algorithm to be %q, got %q", apiKeyEncryptionAlgorithm, object.Header.Algorithm) - } - - // Decrypt using the hashed secret. - decrypted, err := object.Decrypt(k.encryptionKey()) - if err != nil { - return "", xerrors.Errorf("decrypt API key: %w", err) - } - - // Unmarshal the payload. - var payload EncryptedAPIKeyPayload - if err := json.Unmarshal(decrypted, &payload); err != nil { - return "", xerrors.Errorf("unmarshal decrypted payload: %w", err) - } - - // Validate expiry. - if payload.ExpiresAt.Before(dbtime.Now()) { - return "", xerrors.New("encrypted API key expired") - } + ex.Issuer = "coderd" + ex.AnyAudience = jwt.Audience{"wsproxy"} - return payload.APIKey, nil + return e.RegisteredClaims.Validate(ex) } // FromRequest returns the signed token from the request, if it exists and is // valid. The caller must check that the token matches the request. -func FromRequest(r *http.Request, key SecurityKey) (*SignedToken, bool) { +func FromRequest(r *http.Request, mgr cryptokeys.SigningKeycache) (*SignedToken, bool) { // Get all signed app tokens from the request. This includes the query // parameter and all matching cookies sent with the request. If there are // somehow multiple signed app token cookies, we want to try all of them @@ -270,8 +103,12 @@ func FromRequest(r *http.Request, key SecurityKey) (*SignedToken, bool) { tokens = tokens[:4] } + ctx := r.Context() for _, tokenStr := range tokens { - token, err := key.VerifySignedToken(tokenStr) + var token SignedToken + err := jwtutils.Verify(ctx, mgr, tokenStr, &token, jwtutils.WithVerifyExpected(jwt.Expected{ + Time: time.Now(), + })) if err == nil { req := token.Request.Normalize() if hasQueryParam && req.AccessMethod != AccessMethodTerminal { @@ -280,7 +117,7 @@ func FromRequest(r *http.Request, key SecurityKey) (*SignedToken, bool) { return nil, false } - err := req.Validate() + err := req.Check() if err == nil { // The request has a valid signed app token, which is a valid // token signed by us. The caller must check that it matches diff --git a/coderd/workspaceapps/token_test.go b/coderd/workspaceapps/token_test.go index c656ae2ab77b8..db070268fa196 100644 --- a/coderd/workspaceapps/token_test.go +++ b/coderd/workspaceapps/token_test.go @@ -1,22 +1,22 @@ package workspaceapps_test import ( - "fmt" + "crypto/rand" "net/http" "net/http/httptest" "testing" "time" + "github.com/go-jose/go-jose/v4/jwt" + "github.com/coder/coder/v2/codersdk" + "github.com/coder/coder/v2/testutil" - "github.com/go-jose/go-jose/v3" "github.com/google/uuid" "github.com/stretchr/testify/require" - "github.com/coder/coder/v2/coderd/coderdtest" - "github.com/coder/coder/v2/coderd/database/dbtime" + "github.com/coder/coder/v2/coderd/jwtutils" "github.com/coder/coder/v2/coderd/workspaceapps" - "github.com/coder/coder/v2/cryptorand" ) func Test_TokenMatchesRequest(t *testing.T) { @@ -283,129 +283,6 @@ func Test_TokenMatchesRequest(t *testing.T) { } } -func Test_GenerateToken(t *testing.T) { - t.Parallel() - - t.Run("SetExpiry", func(t *testing.T) { - t.Parallel() - - tokenStr, err := coderdtest.AppSecurityKey.SignToken(workspaceapps.SignedToken{ - Request: workspaceapps.Request{ - AccessMethod: workspaceapps.AccessMethodPath, - BasePath: "/app", - UsernameOrID: "foo", - WorkspaceNameOrID: "bar", - AgentNameOrID: "baz", - AppSlugOrPort: "qux", - }, - - Expiry: time.Time{}, - UserID: uuid.MustParse("b1530ba9-76f3-415e-b597-4ddd7cd466a4"), - WorkspaceID: uuid.MustParse("1e6802d3-963e-45ac-9d8c-bf997016ffed"), - AgentID: uuid.MustParse("9ec18681-d2c9-4c9e-9186-f136efb4edbe"), - AppURL: "http://127.0.0.1:8080", - }) - require.NoError(t, err) - - token, err := coderdtest.AppSecurityKey.VerifySignedToken(tokenStr) - require.NoError(t, err) - - require.WithinDuration(t, time.Now().Add(time.Minute), token.Expiry, 15*time.Second) - }) - - future := time.Now().Add(time.Hour) - cases := []struct { - name string - token workspaceapps.SignedToken - parseErrContains string - }{ - { - name: "OK1", - token: workspaceapps.SignedToken{ - Request: workspaceapps.Request{ - AccessMethod: workspaceapps.AccessMethodPath, - BasePath: "/app", - UsernameOrID: "foo", - WorkspaceNameOrID: "bar", - AgentNameOrID: "baz", - AppSlugOrPort: "qux", - }, - - Expiry: future, - UserID: uuid.MustParse("b1530ba9-76f3-415e-b597-4ddd7cd466a4"), - WorkspaceID: uuid.MustParse("1e6802d3-963e-45ac-9d8c-bf997016ffed"), - AgentID: uuid.MustParse("9ec18681-d2c9-4c9e-9186-f136efb4edbe"), - AppURL: "http://127.0.0.1:8080", - }, - }, - { - name: "OK2", - token: workspaceapps.SignedToken{ - Request: workspaceapps.Request{ - AccessMethod: workspaceapps.AccessMethodSubdomain, - BasePath: "/", - UsernameOrID: "oof", - WorkspaceNameOrID: "rab", - AgentNameOrID: "zab", - AppSlugOrPort: "xuq", - }, - - Expiry: future, - UserID: uuid.MustParse("6fa684a3-11aa-49fd-8512-ab527bd9b900"), - WorkspaceID: uuid.MustParse("b2d816cc-505c-441d-afdf-dae01781bc0b"), - AgentID: uuid.MustParse("6c4396e1-af88-4a8a-91a3-13ea54fc29fb"), - AppURL: "http://localhost:9090", - }, - }, - { - name: "Expired", - token: workspaceapps.SignedToken{ - Request: workspaceapps.Request{ - AccessMethod: workspaceapps.AccessMethodSubdomain, - BasePath: "/", - UsernameOrID: "foo", - WorkspaceNameOrID: "bar", - AgentNameOrID: "baz", - AppSlugOrPort: "qux", - }, - - Expiry: time.Now().Add(-time.Hour), - UserID: uuid.MustParse("b1530ba9-76f3-415e-b597-4ddd7cd466a4"), - WorkspaceID: uuid.MustParse("1e6802d3-963e-45ac-9d8c-bf997016ffed"), - AgentID: uuid.MustParse("9ec18681-d2c9-4c9e-9186-f136efb4edbe"), - AppURL: "http://127.0.0.1:8080", - }, - parseErrContains: "token expired", - }, - } - - for _, c := range cases { - c := c - - t.Run(c.name, func(t *testing.T) { - t.Parallel() - - str, err := coderdtest.AppSecurityKey.SignToken(c.token) - require.NoError(t, err) - - // Tokens aren't deterministic as they have a random nonce, so we - // can't compare them directly. - - token, err := coderdtest.AppSecurityKey.VerifySignedToken(str) - if c.parseErrContains != "" { - require.Error(t, err) - require.ErrorContains(t, err, c.parseErrContains) - } else { - require.NoError(t, err) - // normalize the expiry - require.WithinDuration(t, c.token.Expiry, token.Expiry, 10*time.Second) - c.token.Expiry = token.Expiry - require.Equal(t, c.token, token) - } - }) - } -} - func Test_FromRequest(t *testing.T) { t.Parallel() @@ -419,7 +296,13 @@ func Test_FromRequest(t *testing.T) { Value: "invalid", }) + ctx := testutil.Context(t, testutil.WaitShort) + signer := newSigner(t) + token := workspaceapps.SignedToken{ + RegisteredClaims: jwtutils.RegisteredClaims{ + Expiry: jwt.NewNumericDate(time.Now().Add(time.Hour)), + }, Request: workspaceapps.Request{ AccessMethod: workspaceapps.AccessMethodSubdomain, BasePath: "/", @@ -429,7 +312,6 @@ func Test_FromRequest(t *testing.T) { AgentNameOrID: "agent", AppSlugOrPort: "app", }, - Expiry: time.Now().Add(time.Hour), UserID: uuid.New(), WorkspaceID: uuid.New(), AgentID: uuid.New(), @@ -438,16 +320,15 @@ func Test_FromRequest(t *testing.T) { // Add an expired cookie expired := token - expired.Expiry = time.Now().Add(time.Hour * -1) - expiredStr, err := coderdtest.AppSecurityKey.SignToken(token) + expired.RegisteredClaims.Expiry = jwt.NewNumericDate(time.Now().Add(time.Hour * -1)) + expiredStr, err := jwtutils.Sign(ctx, signer, expired) require.NoError(t, err) r.AddCookie(&http.Cookie{ Name: codersdk.SignedAppTokenCookie, Value: expiredStr, }) - // Add a valid token - validStr, err := coderdtest.AppSecurityKey.SignToken(token) + validStr, err := jwtutils.Sign(ctx, signer, token) require.NoError(t, err) r.AddCookie(&http.Cookie{ @@ -455,147 +336,27 @@ func Test_FromRequest(t *testing.T) { Value: validStr, }) - signed, ok := workspaceapps.FromRequest(r, coderdtest.AppSecurityKey) + signed, ok := workspaceapps.FromRequest(r, signer) require.True(t, ok, "expected a token to be found") // Confirm it is the correct token. require.Equal(t, signed.UserID, token.UserID) }) } -// The ParseToken fn is tested quite thoroughly in the GenerateToken test as -// well. -func Test_ParseToken(t *testing.T) { - t.Parallel() - - t.Run("InvalidJWS", func(t *testing.T) { - t.Parallel() - - token, err := coderdtest.AppSecurityKey.VerifySignedToken("invalid") - require.Error(t, err) - require.ErrorContains(t, err, "parse JWS") - require.Equal(t, workspaceapps.SignedToken{}, token) - }) - - t.Run("VerifySignature", func(t *testing.T) { - t.Parallel() +func newSigner(t *testing.T) jwtutils.StaticKey { + t.Helper() - // Create a valid token using a different key. - var otherKey workspaceapps.SecurityKey - copy(otherKey[:], coderdtest.AppSecurityKey[:]) - for i := range otherKey { - otherKey[i] ^= 0xff - } - require.NotEqual(t, coderdtest.AppSecurityKey, otherKey) - - tokenStr, err := otherKey.SignToken(workspaceapps.SignedToken{ - Request: workspaceapps.Request{ - AccessMethod: workspaceapps.AccessMethodPath, - BasePath: "/app", - UsernameOrID: "foo", - WorkspaceNameOrID: "bar", - AgentNameOrID: "baz", - AppSlugOrPort: "qux", - }, - - Expiry: time.Now().Add(time.Hour), - UserID: uuid.MustParse("b1530ba9-76f3-415e-b597-4ddd7cd466a4"), - WorkspaceID: uuid.MustParse("1e6802d3-963e-45ac-9d8c-bf997016ffed"), - AgentID: uuid.MustParse("9ec18681-d2c9-4c9e-9186-f136efb4edbe"), - AppURL: "http://127.0.0.1:8080", - }) - require.NoError(t, err) - - // Verify the token is invalid. - token, err := coderdtest.AppSecurityKey.VerifySignedToken(tokenStr) - require.Error(t, err) - require.ErrorContains(t, err, "verify JWS") - require.Equal(t, workspaceapps.SignedToken{}, token) - }) - - t.Run("InvalidBody", func(t *testing.T) { - t.Parallel() - - // Create a signature for an invalid body. - signer, err := jose.NewSigner(jose.SigningKey{Algorithm: jose.HS512, Key: coderdtest.AppSecurityKey[:64]}, nil) - require.NoError(t, err) - signedObject, err := signer.Sign([]byte("hi")) - require.NoError(t, err) - serialized, err := signedObject.CompactSerialize() - require.NoError(t, err) - - token, err := coderdtest.AppSecurityKey.VerifySignedToken(serialized) - require.Error(t, err) - require.ErrorContains(t, err, "unmarshal payload") - require.Equal(t, workspaceapps.SignedToken{}, token) - }) -} - -func TestAPIKeyEncryption(t *testing.T) { - t.Parallel() - - genAPIKey := func(t *testing.T) string { - id, _ := cryptorand.String(10) - secret, _ := cryptorand.String(22) - - return fmt.Sprintf("%s-%s", id, secret) + return jwtutils.StaticKey{ + ID: "test", + Key: generateSecret(t, 64), } +} - t.Run("OK", func(t *testing.T) { - t.Parallel() - - key := genAPIKey(t) - encrypted, err := coderdtest.AppSecurityKey.EncryptAPIKey(workspaceapps.EncryptedAPIKeyPayload{ - APIKey: key, - }) - require.NoError(t, err) - - decryptedKey, err := coderdtest.AppSecurityKey.DecryptAPIKey(encrypted) - require.NoError(t, err) - require.Equal(t, key, decryptedKey) - }) - - t.Run("Verifies", func(t *testing.T) { - t.Parallel() - - t.Run("Expiry", func(t *testing.T) { - t.Parallel() - - key := genAPIKey(t) - encrypted, err := coderdtest.AppSecurityKey.EncryptAPIKey(workspaceapps.EncryptedAPIKeyPayload{ - APIKey: key, - ExpiresAt: dbtime.Now().Add(-1 * time.Hour), - }) - require.NoError(t, err) - - decryptedKey, err := coderdtest.AppSecurityKey.DecryptAPIKey(encrypted) - require.Error(t, err) - require.ErrorContains(t, err, "expired") - require.Empty(t, decryptedKey) - }) - - t.Run("EncryptionKey", func(t *testing.T) { - t.Parallel() - - // Create a valid token using a different key. - var otherKey workspaceapps.SecurityKey - copy(otherKey[:], coderdtest.AppSecurityKey[:]) - for i := range otherKey { - otherKey[i] ^= 0xff - } - require.NotEqual(t, coderdtest.AppSecurityKey, otherKey) - - // Encrypt with the other key. - key := genAPIKey(t) - encrypted, err := otherKey.EncryptAPIKey(workspaceapps.EncryptedAPIKeyPayload{ - APIKey: key, - }) - require.NoError(t, err) +func generateSecret(t *testing.T, size int) []byte { + t.Helper() - // Decrypt with the original key. - decryptedKey, err := coderdtest.AppSecurityKey.DecryptAPIKey(encrypted) - require.Error(t, err) - require.ErrorContains(t, err, "decrypt API key") - require.Empty(t, decryptedKey) - }) - }) + secret := make([]byte, size) + _, err := rand.Read(secret) + require.NoError(t, err) + return secret } diff --git a/coderd/workspaceapps_test.go b/coderd/workspaceapps_test.go index 1d00b7daa7bd9..52b3e18b4e6ad 100644 --- a/coderd/workspaceapps_test.go +++ b/coderd/workspaceapps_test.go @@ -5,16 +5,23 @@ import ( "net/http" "net/url" "testing" + "time" + "github.com/go-jose/go-jose/v4/jwt" "github.com/stretchr/testify/require" + "cdr.dev/slog/sloggers/slogtest" + "github.com/coder/coder/v2/coderd/coderdtest" + "github.com/coder/coder/v2/coderd/cryptokeys" "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/dbgen" "github.com/coder/coder/v2/coderd/database/dbtestutil" + "github.com/coder/coder/v2/coderd/jwtutils" "github.com/coder/coder/v2/coderd/workspaceapps" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/testutil" + "github.com/coder/quartz" ) func TestGetAppHost(t *testing.T) { @@ -181,16 +188,28 @@ func TestWorkspaceApplicationAuth(t *testing.T) { t.Run(c.name, func(t *testing.T) { t.Parallel() - db, pubsub := dbtestutil.NewDB(t) - + ctx := testutil.Context(t, testutil.WaitMedium) + logger := slogtest.Make(t, nil) accessURL, err := url.Parse(c.accessURL) require.NoError(t, err) + db, ps := dbtestutil.NewDB(t) + fetcher := &cryptokeys.DBFetcher{ + DB: db, + } + + kc, err := cryptokeys.NewEncryptionCache(ctx, logger, fetcher, codersdk.CryptoKeyFeatureWorkspaceAppsAPIKey) + require.NoError(t, err) + + clock := quartz.NewMock(t) + client := coderdtest.New(t, &coderdtest.Options{ - Database: db, - Pubsub: pubsub, - AccessURL: accessURL, - AppHostname: c.appHostname, + AccessURL: accessURL, + AppHostname: c.appHostname, + Database: db, + Pubsub: ps, + APIKeyEncryptionCache: kc, + Clock: clock, }) _ = coderdtest.CreateFirstUser(t, client) @@ -240,7 +259,15 @@ func TestWorkspaceApplicationAuth(t *testing.T) { loc.RawQuery = q.Encode() require.Equal(t, c.expectRedirect, loc.String()) - // The decrypted key is verified in the apptest test suite. + var token workspaceapps.EncryptedAPIKeyPayload + err = jwtutils.Decrypt(ctx, kc, encryptedAPIKey, &token, jwtutils.WithDecryptExpected(jwt.Expected{ + Time: clock.Now(), + AnyAudience: jwt.Audience{"wsproxy"}, + Issuer: "coderd", + })) + require.NoError(t, err) + require.Equal(t, jwt.NewNumericDate(clock.Now().Add(time.Minute)), token.Expiry) + require.Equal(t, jwt.NewNumericDate(clock.Now().Add(-time.Minute)), token.NotBefore) }) } } diff --git a/codersdk/deployment.go b/codersdk/deployment.go index 6394deb000d52..6a5f7c52ac8f5 100644 --- a/codersdk/deployment.go +++ b/codersdk/deployment.go @@ -3113,9 +3113,11 @@ func (c *Client) SSHConfiguration(ctx context.Context) (SSHConfigResponse, error type CryptoKeyFeature string const ( - CryptoKeyFeatureWorkspaceApp CryptoKeyFeature = "workspace_apps" - CryptoKeyFeatureOIDCConvert CryptoKeyFeature = "oidc_convert" - CryptoKeyFeatureTailnetResume CryptoKeyFeature = "tailnet_resume" + CryptoKeyFeatureWorkspaceAppsAPIKey CryptoKeyFeature = "workspace_apps_api_key" + //nolint:gosec // This denotes a type of key, not a literal. + CryptoKeyFeatureWorkspaceAppsToken CryptoKeyFeature = "workspace_apps_token" + CryptoKeyFeatureOIDCConvert CryptoKeyFeature = "oidc_convert" + CryptoKeyFeatureTailnetResume CryptoKeyFeature = "tailnet_resume" ) type CryptoKey struct { diff --git a/codersdk/workspacesdk/connector_internal_test.go b/codersdk/workspacesdk/connector_internal_test.go index 7a339a0079ba2..19f1930c89bc5 100644 --- a/codersdk/workspacesdk/connector_internal_test.go +++ b/codersdk/workspacesdk/connector_internal_test.go @@ -25,6 +25,7 @@ import ( "cdr.dev/slog/sloggers/slogtest" "github.com/coder/coder/v2/apiversion" "github.com/coder/coder/v2/coderd/httpapi" + "github.com/coder/coder/v2/coderd/jwtutils" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/tailnet" "github.com/coder/coder/v2/tailnet/proto" @@ -61,7 +62,7 @@ func TestTailnetAPIConnector_Disconnects(t *testing.T) { CoordPtr: &coordPtr, DERPMapUpdateFrequency: time.Millisecond, DERPMapFn: func() *tailcfg.DERPMap { return <-derpMapCh }, - NetworkTelemetryHandler: func(batch []*proto.TelemetryEvent) {}, + NetworkTelemetryHandler: func([]*proto.TelemetryEvent) {}, ResumeTokenProvider: tailnet.NewInsecureTestResumeTokenProvider(), }) require.NoError(t, err) @@ -165,13 +166,17 @@ func TestTailnetAPIConnector_ResumeToken(t *testing.T) { clock := quartz.NewMock(t) resumeTokenSigningKey, err := tailnet.GenerateResumeTokenSigningKey() require.NoError(t, err) - resumeTokenProvider := tailnet.NewResumeTokenKeyProvider(resumeTokenSigningKey, clock, time.Hour) + mgr := jwtutils.StaticKey{ + ID: "123", + Key: resumeTokenSigningKey[:], + } + resumeTokenProvider := tailnet.NewResumeTokenKeyProvider(mgr, clock, time.Hour) svc, err := tailnet.NewClientService(tailnet.ClientServiceOptions{ Logger: logger, CoordPtr: &coordPtr, DERPMapUpdateFrequency: time.Millisecond, DERPMapFn: func() *tailcfg.DERPMap { return <-derpMapCh }, - NetworkTelemetryHandler: func(batch []*proto.TelemetryEvent) {}, + NetworkTelemetryHandler: func([]*proto.TelemetryEvent) {}, ResumeTokenProvider: resumeTokenProvider, }) require.NoError(t, err) @@ -190,7 +195,7 @@ func TestTailnetAPIConnector_ResumeToken(t *testing.T) { t.Logf("received resume token: %s", resumeToken) assert.Equal(t, expectResumeToken, resumeToken) if resumeToken != "" { - peerID, err = resumeTokenProvider.VerifyResumeToken(resumeToken) + peerID, err = resumeTokenProvider.VerifyResumeToken(ctx, resumeToken) assert.NoError(t, err, "failed to parse resume token") if err != nil { httpapi.Write(ctx, w, http.StatusUnauthorized, codersdk.Response{ @@ -280,13 +285,17 @@ func TestTailnetAPIConnector_ResumeTokenFailure(t *testing.T) { clock := quartz.NewMock(t) resumeTokenSigningKey, err := tailnet.GenerateResumeTokenSigningKey() require.NoError(t, err) - resumeTokenProvider := tailnet.NewResumeTokenKeyProvider(resumeTokenSigningKey, clock, time.Hour) + mgr := jwtutils.StaticKey{ + ID: uuid.New().String(), + Key: resumeTokenSigningKey[:], + } + resumeTokenProvider := tailnet.NewResumeTokenKeyProvider(mgr, clock, time.Hour) svc, err := tailnet.NewClientService(tailnet.ClientServiceOptions{ Logger: logger, CoordPtr: &coordPtr, DERPMapUpdateFrequency: time.Millisecond, DERPMapFn: func() *tailcfg.DERPMap { return <-derpMapCh }, - NetworkTelemetryHandler: func(batch []*proto.TelemetryEvent) {}, + NetworkTelemetryHandler: func(_ []*proto.TelemetryEvent) {}, ResumeTokenProvider: resumeTokenProvider, }) require.NoError(t, err) diff --git a/docs/reference/api/schemas.md b/docs/reference/api/schemas.md index ed3800b3a27cd..f4e683305029b 100644 --- a/docs/reference/api/schemas.md +++ b/docs/reference/api/schemas.md @@ -1454,7 +1454,7 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o ```json { "deletes_at": "2019-08-24T14:15:22Z", - "feature": "workspace_apps", + "feature": "workspace_apps_api_key", "secret": "string", "sequence": 0, "starts_at": "2019-08-24T14:15:22Z" @@ -1474,18 +1474,19 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o ## codersdk.CryptoKeyFeature ```json -"workspace_apps" +"workspace_apps_api_key" ``` ### Properties #### Enumerated Values -| Value | -| ---------------- | -| `workspace_apps` | -| `oidc_convert` | -| `tailnet_resume` | +| Value | +| ------------------------ | +| `workspace_apps_api_key` | +| `workspace_apps_token` | +| `oidc_convert` | +| `tailnet_resume` | ## codersdk.CustomRoleRequest @@ -9893,7 +9894,7 @@ _None_ "crypto_keys": [ { "deletes_at": "2019-08-24T14:15:22Z", - "feature": "workspace_apps", + "feature": "workspace_apps_api_key", "secret": "string", "sequence": 0, "starts_at": "2019-08-24T14:15:22Z" @@ -9971,7 +9972,6 @@ _None_ ```json { - "app_security_key": "string", "derp_force_websockets": true, "derp_map": { "homeParams": { @@ -10052,7 +10052,6 @@ _None_ | Name | Type | Required | Restrictions | Description | | ----------------------- | --------------------------------------------- | -------- | ------------ | -------------------------------------------------------------------------------------- | -| `app_security_key` | string | false | | | | `derp_force_websockets` | boolean | false | | | | `derp_map` | [tailcfg.DERPMap](#tailcfgderpmap) | false | | | | `derp_mesh_key` | string | false | | | diff --git a/enterprise/coderd/coderdenttest/proxytest.go b/enterprise/coderd/coderdenttest/proxytest.go index 6e5a822bdf251..a6f2c7384b16f 100644 --- a/enterprise/coderd/coderdenttest/proxytest.go +++ b/enterprise/coderd/coderdenttest/proxytest.go @@ -65,6 +65,8 @@ type WorkspaceProxy struct { // owner client. If a token is provided, the proxy will become a replica of the // existing proxy region. func NewWorkspaceProxyReplica(t *testing.T, coderdAPI *coderd.API, owner *codersdk.Client, options *ProxyOptions) WorkspaceProxy { + t.Helper() + ctx, cancelFunc := context.WithCancel(context.Background()) t.Cleanup(cancelFunc) @@ -142,8 +144,10 @@ func NewWorkspaceProxyReplica(t *testing.T, coderdAPI *coderd.API, owner *coders statsCollectorOptions.Flush = options.FlushStats } + logger := slogtest.Make(t, nil).Leveled(slog.LevelDebug).With(slog.F("server_url", serverURL.String())) + wssrv, err := wsproxy.New(ctx, &wsproxy.Options{ - Logger: slogtest.Make(t, nil).Leveled(slog.LevelDebug).With(slog.F("server_url", serverURL.String())), + Logger: logger, Experiments: options.Experiments, DashboardURL: coderdAPI.AccessURL, AccessURL: accessURL, diff --git a/enterprise/coderd/workspaceproxy.go b/enterprise/coderd/workspaceproxy.go index 47bdf53493489..4008de69e4faa 100644 --- a/enterprise/coderd/workspaceproxy.go +++ b/enterprise/coderd/workspaceproxy.go @@ -7,6 +7,7 @@ import ( "fmt" "net/http" "net/url" + "slices" "strings" "time" @@ -33,6 +34,13 @@ import ( "github.com/coder/coder/v2/enterprise/wsproxy/wsproxysdk" ) +// whitelistedCryptoKeyFeatures is a list of crypto key features that are +// allowed to be queried with workspace proxies. +var whitelistedCryptoKeyFeatures = []database.CryptoKeyFeature{ + database.CryptoKeyFeatureWorkspaceAppsToken, + database.CryptoKeyFeatureWorkspaceAppsAPIKey, +} + // forceWorkspaceProxyHealthUpdate forces an update of the proxy health. // This is useful when a proxy is created or deleted. Errors will be logged. func (api *API) forceWorkspaceProxyHealthUpdate(ctx context.Context) { @@ -700,7 +708,6 @@ func (api *API) workspaceProxyRegister(rw http.ResponseWriter, r *http.Request) } httpapi.Write(ctx, rw, http.StatusCreated, wsproxysdk.RegisterWorkspaceProxyResponse{ - AppSecurityKey: api.AppSecurityKey.String(), DERPMeshKey: api.DERPServer.MeshKey(), DERPRegionID: regionID, DERPMap: api.AGPL.DERPMap(), @@ -721,13 +728,29 @@ func (api *API) workspaceProxyRegister(rw http.ResponseWriter, r *http.Request) // @Security CoderSessionToken // @Produce json // @Tags Enterprise +// @Param feature query string true "Feature key" // @Success 200 {object} wsproxysdk.CryptoKeysResponse // @Router /workspaceproxies/me/crypto-keys [get] // @x-apidocgen {"skip": true} func (api *API) workspaceProxyCryptoKeys(rw http.ResponseWriter, r *http.Request) { ctx := r.Context() - keys, err := api.Database.GetCryptoKeysByFeature(ctx, database.CryptoKeyFeatureWorkspaceApps) + feature := database.CryptoKeyFeature(r.URL.Query().Get("feature")) + if feature == "" { + httpapi.Write(r.Context(), rw, http.StatusBadRequest, codersdk.Response{ + Message: "Missing feature query parameter.", + }) + return + } + + if !slices.Contains(whitelistedCryptoKeyFeatures, feature) { + httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ + Message: fmt.Sprintf("Invalid feature: %q", feature), + }) + return + } + + keys, err := api.Database.GetCryptoKeysByFeature(ctx, feature) if err != nil { httpapi.InternalServerError(rw, err) return diff --git a/enterprise/coderd/workspaceproxy_test.go b/enterprise/coderd/workspaceproxy_test.go index 5231a0b0c4241..0be112b532b7a 100644 --- a/enterprise/coderd/workspaceproxy_test.go +++ b/enterprise/coderd/workspaceproxy_test.go @@ -320,7 +320,6 @@ func TestProxyRegisterDeregister(t *testing.T) { } registerRes1, err := proxyClient.RegisterWorkspaceProxy(ctx, req) require.NoError(t, err) - require.NotEmpty(t, registerRes1.AppSecurityKey) require.NotEmpty(t, registerRes1.DERPMeshKey) require.EqualValues(t, 10001, registerRes1.DERPRegionID) require.Empty(t, registerRes1.SiblingReplicas) @@ -609,11 +608,8 @@ func TestProxyRegisterDeregister(t *testing.T) { func TestIssueSignedAppToken(t *testing.T) { t.Parallel() - db, pubsub := dbtestutil.NewDB(t) client, user := coderdenttest.New(t, &coderdenttest.Options{ Options: &coderdtest.Options{ - Database: db, - Pubsub: pubsub, IncludeProvisionerDaemon: true, }, LicenseOptions: &coderdenttest.LicenseOptions{ @@ -716,6 +712,10 @@ func TestReconnectingPTYSignedToken(t *testing.T) { closer.Close() }) + _ = dbgen.CryptoKey(t, db, database.CryptoKey{ + Feature: database.CryptoKeyFeatureWorkspaceAppsToken, + }) + // Create a workspace + apps authToken := uuid.NewString() version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, &echo.Responses{ @@ -915,51 +915,86 @@ func TestGetCryptoKeys(t *testing.T) { now := time.Now() expectedKey1 := dbgen.CryptoKey(t, db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, StartsAt: now.Add(-time.Hour), Sequence: 2, }) - key1 := db2sdk.CryptoKey(expectedKey1) + encryptionKey := db2sdk.CryptoKey(expectedKey1) expectedKey2 := dbgen.CryptoKey(t, db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsToken, StartsAt: now, Sequence: 3, }) - key2 := db2sdk.CryptoKey(expectedKey2) + signingKey := db2sdk.CryptoKey(expectedKey2) // Create a deleted key. _ = dbgen.CryptoKey(t, db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, StartsAt: now.Add(-time.Hour), Secret: sql.NullString{ String: "secret1", Valid: false, }, - Sequence: 1, - }) - - // Create a key with different features. - _ = dbgen.CryptoKey(t, db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureTailnetResume, - StartsAt: now.Add(-time.Hour), - Sequence: 1, - }) - _ = dbgen.CryptoKey(t, db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureOidcConvert, - StartsAt: now.Add(-time.Hour), - Sequence: 1, + Sequence: 4, }) proxy := coderdenttest.NewWorkspaceProxyReplica(t, api, cclient, &coderdenttest.ProxyOptions{ Name: testutil.GetRandomName(t), }) - keys, err := proxy.SDKClient.CryptoKeys(ctx) + keys, err := proxy.SDKClient.CryptoKeys(ctx, codersdk.CryptoKeyFeatureWorkspaceAppsAPIKey) require.NoError(t, err) require.NotEmpty(t, keys) + // 1 key is generated on startup, the other we manually generated. require.Equal(t, 2, len(keys.CryptoKeys)) - requireContainsKeys(t, keys.CryptoKeys, key1, key2) + requireContainsKeys(t, keys.CryptoKeys, encryptionKey) + requireNotContainsKeys(t, keys.CryptoKeys, signingKey) + + keys, err = proxy.SDKClient.CryptoKeys(ctx, codersdk.CryptoKeyFeatureWorkspaceAppsToken) + require.NoError(t, err) + require.NotEmpty(t, keys) + // 1 key is generated on startup, the other we manually generated. + require.Equal(t, 2, len(keys.CryptoKeys)) + requireContainsKeys(t, keys.CryptoKeys, signingKey) + requireNotContainsKeys(t, keys.CryptoKeys, encryptionKey) + }) + + t.Run("InvalidFeature", func(t *testing.T) { + t.Parallel() + + ctx := testutil.Context(t, testutil.WaitMedium) + db, pubsub := dbtestutil.NewDB(t) + cclient, _, api, _ := coderdenttest.NewWithAPI(t, &coderdenttest.Options{ + Options: &coderdtest.Options{ + Database: db, + Pubsub: pubsub, + IncludeProvisionerDaemon: true, + }, + LicenseOptions: &coderdenttest.LicenseOptions{ + Features: license.Features{ + codersdk.FeatureWorkspaceProxy: 1, + }, + }, + }) + + proxy := coderdenttest.NewWorkspaceProxyReplica(t, api, cclient, &coderdenttest.ProxyOptions{ + Name: testutil.GetRandomName(t), + }) + + _, err := proxy.SDKClient.CryptoKeys(ctx, codersdk.CryptoKeyFeatureOIDCConvert) + require.Error(t, err) + var sdkErr *codersdk.Error + require.ErrorAs(t, err, &sdkErr) + require.Equal(t, http.StatusBadRequest, sdkErr.StatusCode()) + _, err = proxy.SDKClient.CryptoKeys(ctx, codersdk.CryptoKeyFeatureTailnetResume) + require.Error(t, err) + require.ErrorAs(t, err, &sdkErr) + require.Equal(t, http.StatusBadRequest, sdkErr.StatusCode()) + _, err = proxy.SDKClient.CryptoKeys(ctx, "invalid") + require.Error(t, err) + require.ErrorAs(t, err, &sdkErr) + require.Equal(t, http.StatusBadRequest, sdkErr.StatusCode()) }) t.Run("Unauthorized", func(t *testing.T) { @@ -987,7 +1022,7 @@ func TestGetCryptoKeys(t *testing.T) { client := wsproxysdk.New(cclient.URL) client.SetSessionToken(cclient.SessionToken()) - _, err := client.CryptoKeys(ctx) + _, err := client.CryptoKeys(ctx, codersdk.CryptoKeyFeatureWorkspaceAppsAPIKey) require.Error(t, err) var sdkErr *codersdk.Error require.ErrorAs(t, err, &sdkErr) @@ -995,6 +1030,18 @@ func TestGetCryptoKeys(t *testing.T) { }) } +func requireNotContainsKeys(t *testing.T, keys []codersdk.CryptoKey, unexpected ...codersdk.CryptoKey) { + t.Helper() + + for _, unexpectedKey := range unexpected { + for _, key := range keys { + if key.Feature == unexpectedKey.Feature && key.Sequence == unexpectedKey.Sequence { + t.Fatalf("unexpected key %+v found", unexpectedKey) + } + } + } +} + func requireContainsKeys(t *testing.T, keys []codersdk.CryptoKey, expected ...codersdk.CryptoKey) { t.Helper() diff --git a/enterprise/dbcrypt/dbcrypt_internal_test.go b/enterprise/dbcrypt/dbcrypt_internal_test.go index a480fa08930f5..8800180493d12 100644 --- a/enterprise/dbcrypt/dbcrypt_internal_test.go +++ b/enterprise/dbcrypt/dbcrypt_internal_test.go @@ -397,12 +397,12 @@ func TestCryptoKeys(t *testing.T) { _ = dbgen.CryptoKey(t, crypt, database.CryptoKey{ Secret: sql.NullString{String: "test", Valid: true}, }) - key, err := crypt.GetLatestCryptoKeyByFeature(ctx, database.CryptoKeyFeatureWorkspaceApps) + key, err := crypt.GetLatestCryptoKeyByFeature(ctx, database.CryptoKeyFeatureWorkspaceAppsAPIKey) require.NoError(t, err) require.Equal(t, "test", key.Secret.String) require.Equal(t, ciphers[0].HexDigest(), key.SecretKeyID.String) - key, err = db.GetLatestCryptoKeyByFeature(ctx, database.CryptoKeyFeatureWorkspaceApps) + key, err = db.GetLatestCryptoKeyByFeature(ctx, database.CryptoKeyFeatureWorkspaceAppsAPIKey) require.NoError(t, err) requireEncryptedEquals(t, ciphers[0], key.Secret.String, "test") require.Equal(t, ciphers[0].HexDigest(), key.SecretKeyID.String) @@ -415,7 +415,7 @@ func TestCryptoKeys(t *testing.T) { Secret: sql.NullString{String: "test", Valid: true}, }) key, err := crypt.GetCryptoKeyByFeatureAndSequence(ctx, database.GetCryptoKeyByFeatureAndSequenceParams{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, Sequence: key.Sequence, }) require.NoError(t, err) @@ -423,7 +423,7 @@ func TestCryptoKeys(t *testing.T) { require.Equal(t, ciphers[0].HexDigest(), key.SecretKeyID.String) key, err = db.GetCryptoKeyByFeatureAndSequence(ctx, database.GetCryptoKeyByFeatureAndSequenceParams{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, Sequence: key.Sequence, }) require.NoError(t, err) @@ -459,7 +459,7 @@ func TestCryptoKeys(t *testing.T) { Secret: sql.NullString{String: "test", Valid: true}, }) _ = dbgen.CryptoKey(t, crypt, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceApps, + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, Sequence: 43, }) keys, err := crypt.GetCryptoKeysByFeature(ctx, database.CryptoKeyFeatureTailnetResume) diff --git a/enterprise/workspaceapps_test.go b/enterprise/workspaceapps_test.go index f4ba577f13e33..51d0314c45767 100644 --- a/enterprise/workspaceapps_test.go +++ b/enterprise/workspaceapps_test.go @@ -5,6 +5,7 @@ import ( "testing" "github.com/coder/coder/v2/coderd/coderdtest" + "github.com/coder/coder/v2/coderd/database/dbtestutil" "github.com/coder/coder/v2/coderd/httpmw" "github.com/coder/coder/v2/coderd/workspaceapps/apptest" "github.com/coder/coder/v2/codersdk" @@ -36,6 +37,9 @@ func TestWorkspaceApps(t *testing.T) { flushStatsCollectorCh <- flushStatsCollectorDone <-flushStatsCollectorDone } + + db, pubsub := dbtestutil.NewDB(t) + client, _, _, user := coderdenttest.NewWithAPI(t, &coderdenttest.Options{ Options: &coderdtest.Options{ DeploymentValues: deploymentValues, @@ -51,6 +55,8 @@ func TestWorkspaceApps(t *testing.T) { }, }, WorkspaceAppsStatsCollectorOptions: opts.StatsCollectorOptions, + Database: db, + Pubsub: pubsub, }, LicenseOptions: &coderdenttest.LicenseOptions{ Features: license.Features{ diff --git a/enterprise/wsproxy/keyfetcher.go b/enterprise/wsproxy/keyfetcher.go index f30fffb2cd093..1a1745d6ccd2d 100644 --- a/enterprise/wsproxy/keyfetcher.go +++ b/enterprise/wsproxy/keyfetcher.go @@ -13,12 +13,11 @@ import ( var _ cryptokeys.Fetcher = &ProxyFetcher{} type ProxyFetcher struct { - Client *wsproxysdk.Client - Feature codersdk.CryptoKeyFeature + Client *wsproxysdk.Client } -func (p *ProxyFetcher) Fetch(ctx context.Context) ([]codersdk.CryptoKey, error) { - keys, err := p.Client.CryptoKeys(ctx) +func (p *ProxyFetcher) Fetch(ctx context.Context, feature codersdk.CryptoKeyFeature) ([]codersdk.CryptoKey, error) { + keys, err := p.Client.CryptoKeys(ctx, feature) if err != nil { return nil, xerrors.Errorf("crypto keys: %w", err) } diff --git a/enterprise/wsproxy/tokenprovider.go b/enterprise/wsproxy/tokenprovider.go index 38822a4e7a22d..5093c6015725e 100644 --- a/enterprise/wsproxy/tokenprovider.go +++ b/enterprise/wsproxy/tokenprovider.go @@ -7,6 +7,8 @@ import ( "cdr.dev/slog" + "github.com/coder/coder/v2/coderd/cryptokeys" + "github.com/coder/coder/v2/coderd/jwtutils" "github.com/coder/coder/v2/coderd/workspaceapps" "github.com/coder/coder/v2/enterprise/wsproxy/wsproxysdk" ) @@ -18,18 +20,19 @@ type TokenProvider struct { AccessURL *url.URL AppHostname string - Client *wsproxysdk.Client - SecurityKey workspaceapps.SecurityKey - Logger slog.Logger + Client *wsproxysdk.Client + TokenSigningKeycache cryptokeys.SigningKeycache + APIKeyEncryptionKeycache cryptokeys.EncryptionKeycache + Logger slog.Logger } func (p *TokenProvider) FromRequest(r *http.Request) (*workspaceapps.SignedToken, bool) { - return workspaceapps.FromRequest(r, p.SecurityKey) + return workspaceapps.FromRequest(r, p.TokenSigningKeycache) } func (p *TokenProvider) Issue(ctx context.Context, rw http.ResponseWriter, r *http.Request, issueReq workspaceapps.IssueTokenRequest) (*workspaceapps.SignedToken, string, bool) { appReq := issueReq.AppRequest.Normalize() - err := appReq.Validate() + err := appReq.Check() if err != nil { workspaceapps.WriteWorkspaceApp500(p.Logger, p.DashboardURL, rw, r, &appReq, err, "invalid app request") return nil, "", false @@ -42,7 +45,8 @@ func (p *TokenProvider) Issue(ctx context.Context, rw http.ResponseWriter, r *ht } // Check that it verifies properly and matches the string. - token, err := p.SecurityKey.VerifySignedToken(resp.SignedTokenStr) + var token workspaceapps.SignedToken + err = jwtutils.Verify(ctx, p.TokenSigningKeycache, resp.SignedTokenStr, &token) if err != nil { workspaceapps.WriteWorkspaceApp500(p.Logger, p.DashboardURL, rw, r, &appReq, err, "failed to verify newly generated signed token") return nil, "", false diff --git a/enterprise/wsproxy/wsproxy.go b/enterprise/wsproxy/wsproxy.go index 2a7e9e81e0cda..fe900fa433530 100644 --- a/enterprise/wsproxy/wsproxy.go +++ b/enterprise/wsproxy/wsproxy.go @@ -31,6 +31,7 @@ import ( "github.com/coder/coder/v2/buildinfo" "github.com/coder/coder/v2/cli/cliutil" "github.com/coder/coder/v2/coderd" + "github.com/coder/coder/v2/coderd/cryptokeys" "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/coderd/httpmw" "github.com/coder/coder/v2/coderd/tracing" @@ -130,6 +131,13 @@ type Server struct { // the moon's token. SDKClient *wsproxysdk.Client + // apiKeyEncryptionKeycache manages the encryption keys for smuggling API + // tokens to the alternate domain when using workspace apps. + apiKeyEncryptionKeycache cryptokeys.EncryptionKeycache + // appTokenSigningKeycache manages the signing keys for signing the app + // tokens we use for workspace apps. + appTokenSigningKeycache cryptokeys.SigningKeycache + // DERP derpMesh *derpmesh.Mesh derpMeshTLSConfig *tls.Config @@ -195,19 +203,42 @@ func New(ctx context.Context, opts *Options) (*Server, error) { derpServer := derp.NewServer(key.NewNode(), tailnet.Logger(opts.Logger.Named("net.derp"))) ctx, cancel := context.WithCancel(context.Background()) + + encryptionCache, err := cryptokeys.NewEncryptionCache(ctx, + opts.Logger, + &ProxyFetcher{Client: client}, + codersdk.CryptoKeyFeatureWorkspaceAppsAPIKey, + ) + if err != nil { + cancel() + return nil, xerrors.Errorf("create api key encryption cache: %w", err) + } + signingCache, err := cryptokeys.NewSigningCache(ctx, + opts.Logger, + &ProxyFetcher{Client: client}, + codersdk.CryptoKeyFeatureWorkspaceAppsToken, + ) + if err != nil { + cancel() + return nil, xerrors.Errorf("create api token signing cache: %w", err) + } + r := chi.NewRouter() s := &Server{ - Options: opts, - Handler: r, - DashboardURL: opts.DashboardURL, - Logger: opts.Logger.Named("net.workspace-proxy"), - TracerProvider: opts.Tracing, - PrometheusRegistry: opts.PrometheusRegistry, - SDKClient: client, - derpMesh: derpmesh.New(opts.Logger.Named("net.derpmesh"), derpServer, meshTLSConfig), - derpMeshTLSConfig: meshTLSConfig, - ctx: ctx, - cancel: cancel, + ctx: ctx, + cancel: cancel, + + Options: opts, + Handler: r, + DashboardURL: opts.DashboardURL, + Logger: opts.Logger.Named("net.workspace-proxy"), + TracerProvider: opts.Tracing, + PrometheusRegistry: opts.PrometheusRegistry, + SDKClient: client, + derpMesh: derpmesh.New(opts.Logger.Named("net.derpmesh"), derpServer, meshTLSConfig), + derpMeshTLSConfig: meshTLSConfig, + apiKeyEncryptionKeycache: encryptionCache, + appTokenSigningKeycache: signingCache, } // Register the workspace proxy with the primary coderd instance and start a @@ -240,11 +271,6 @@ func New(ctx context.Context, opts *Options) (*Server, error) { return nil, xerrors.Errorf("handle register: %w", err) } - secKey, err := workspaceapps.KeyFromString(regResp.AppSecurityKey) - if err != nil { - return nil, xerrors.Errorf("parse app security key: %w", err) - } - agentProvider, err := coderd.NewServerTailnet(ctx, s.Logger, nil, @@ -277,20 +303,21 @@ func New(ctx context.Context, opts *Options) (*Server, error) { HostnameRegex: opts.AppHostnameRegex, RealIPConfig: opts.RealIPConfig, SignedTokenProvider: &TokenProvider{ - DashboardURL: opts.DashboardURL, - AccessURL: opts.AccessURL, - AppHostname: opts.AppHostname, - Client: client, - SecurityKey: secKey, - Logger: s.Logger.Named("proxy_token_provider"), + DashboardURL: opts.DashboardURL, + AccessURL: opts.AccessURL, + AppHostname: opts.AppHostname, + Client: client, + TokenSigningKeycache: signingCache, + APIKeyEncryptionKeycache: encryptionCache, + Logger: s.Logger.Named("proxy_token_provider"), }, - AppSecurityKey: secKey, DisablePathApps: opts.DisablePathApps, SecureAuthCookie: opts.SecureAuthCookie, - AgentProvider: agentProvider, - StatsCollector: workspaceapps.NewStatsCollector(opts.StatsCollectorOptions), + AgentProvider: agentProvider, + StatsCollector: workspaceapps.NewStatsCollector(opts.StatsCollectorOptions), + APIKeyEncryptionKeycache: encryptionCache, } derpHandler := derphttp.Handler(derpServer) @@ -435,6 +462,8 @@ func (s *Server) Close() error { err = multierror.Append(err, agentProviderErr) } s.SDKClient.SDKClient.HTTPClient.CloseIdleConnections() + _ = s.appTokenSigningKeycache.Close() + _ = s.apiKeyEncryptionKeycache.Close() return err } diff --git a/enterprise/wsproxy/wsproxy_test.go b/enterprise/wsproxy/wsproxy_test.go index 3d3926c5afae7..4add46af9bc0a 100644 --- a/enterprise/wsproxy/wsproxy_test.go +++ b/enterprise/wsproxy/wsproxy_test.go @@ -25,6 +25,9 @@ import ( "github.com/coder/coder/v2/agent/agenttest" "github.com/coder/coder/v2/buildinfo" "github.com/coder/coder/v2/coderd/coderdtest" + "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/database/dbgen" + "github.com/coder/coder/v2/coderd/database/dbtestutil" "github.com/coder/coder/v2/coderd/healthcheck/derphealth" "github.com/coder/coder/v2/coderd/httpmw" "github.com/coder/coder/v2/coderd/workspaceapps/apptest" @@ -932,6 +935,9 @@ func TestWorkspaceProxyWorkspaceApps(t *testing.T) { if opts.PrimaryAppHost == "" { opts.PrimaryAppHost = "*.primary.test.coder.com" } + + db, pubsub := dbtestutil.NewDB(t) + client, closer, api, user := coderdenttest.NewWithAPI(t, &coderdenttest.Options{ Options: &coderdtest.Options{ DeploymentValues: deploymentValues, @@ -947,6 +953,8 @@ func TestWorkspaceProxyWorkspaceApps(t *testing.T) { }, }, WorkspaceAppsStatsCollectorOptions: opts.StatsCollectorOptions, + Database: db, + Pubsub: pubsub, }, LicenseOptions: &coderdenttest.LicenseOptions{ Features: license.Features{ @@ -959,6 +967,13 @@ func TestWorkspaceProxyWorkspaceApps(t *testing.T) { _ = closer.Close() }) + _ = dbgen.CryptoKey(t, db, database.CryptoKey{ + Feature: database.CryptoKeyFeatureWorkspaceAppsToken, + }) + _ = dbgen.CryptoKey(t, db, database.CryptoKey{ + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, + }) + // Create the external proxy if opts.DisableSubdomainApps { opts.AppHost = "" @@ -1002,6 +1017,8 @@ func TestWorkspaceProxyWorkspaceApps_BlockDirect(t *testing.T) { if opts.PrimaryAppHost == "" { opts.PrimaryAppHost = "*.primary.test.coder.com" } + + db, pubsub := dbtestutil.NewDB(t) client, closer, api, user := coderdenttest.NewWithAPI(t, &coderdenttest.Options{ Options: &coderdtest.Options{ DeploymentValues: deploymentValues, @@ -1017,6 +1034,8 @@ func TestWorkspaceProxyWorkspaceApps_BlockDirect(t *testing.T) { }, }, WorkspaceAppsStatsCollectorOptions: opts.StatsCollectorOptions, + Database: db, + Pubsub: pubsub, }, LicenseOptions: &coderdenttest.LicenseOptions{ Features: license.Features{ @@ -1029,6 +1048,13 @@ func TestWorkspaceProxyWorkspaceApps_BlockDirect(t *testing.T) { _ = closer.Close() }) + _ = dbgen.CryptoKey(t, db, database.CryptoKey{ + Feature: database.CryptoKeyFeatureWorkspaceAppsToken, + }) + _ = dbgen.CryptoKey(t, db, database.CryptoKey{ + Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, + }) + // Create the external proxy if opts.DisableSubdomainApps { opts.AppHost = "" diff --git a/enterprise/wsproxy/wsproxysdk/wsproxysdk.go b/enterprise/wsproxy/wsproxysdk/wsproxysdk.go index 77d36561c6de8..a8f22c2b93063 100644 --- a/enterprise/wsproxy/wsproxysdk/wsproxysdk.go +++ b/enterprise/wsproxy/wsproxysdk/wsproxysdk.go @@ -205,7 +205,6 @@ type RegisterWorkspaceProxyRequest struct { } type RegisterWorkspaceProxyResponse struct { - AppSecurityKey string `json:"app_security_key"` DERPMeshKey string `json:"derp_mesh_key"` DERPRegionID int32 `json:"derp_region_id"` DERPMap *tailcfg.DERPMap `json:"derp_map"` @@ -372,12 +371,6 @@ func (l *RegisterWorkspaceProxyLoop) Start(ctx context.Context) (RegisterWorkspa } failedAttempts = 0 - // Check for consistency. - if originalRes.AppSecurityKey != resp.AppSecurityKey { - l.failureFn(xerrors.New("app security key has changed, proxy must be restarted")) - return - } - if originalRes.DERPMeshKey != resp.DERPMeshKey { l.failureFn(xerrors.New("DERP mesh key has changed, proxy must be restarted")) return @@ -586,10 +579,10 @@ type CryptoKeysResponse struct { CryptoKeys []codersdk.CryptoKey `json:"crypto_keys"` } -func (c *Client) CryptoKeys(ctx context.Context) (CryptoKeysResponse, error) { +func (c *Client) CryptoKeys(ctx context.Context, feature codersdk.CryptoKeyFeature) (CryptoKeysResponse, error) { res, err := c.Request(ctx, http.MethodGet, - "/api/v2/workspaceproxies/me/crypto-keys", - nil, + "/api/v2/workspaceproxies/me/crypto-keys", nil, + codersdk.WithQueryParam("feature", string(feature)), ) if err != nil { return CryptoKeysResponse{}, xerrors.Errorf("make request: %w", err) diff --git a/site/src/api/typesGenerated.ts b/site/src/api/typesGenerated.ts index e55167ef03f88..d687fb68ec61f 100644 --- a/site/src/api/typesGenerated.ts +++ b/site/src/api/typesGenerated.ts @@ -2110,8 +2110,8 @@ export type BuildReason = "autostart" | "autostop" | "initiator" export const BuildReasons: BuildReason[] = ["autostart", "autostop", "initiator"] // From codersdk/deployment.go -export type CryptoKeyFeature = "oidc_convert" | "tailnet_resume" | "workspace_apps" -export const CryptoKeyFeatures: CryptoKeyFeature[] = ["oidc_convert", "tailnet_resume", "workspace_apps"] +export type CryptoKeyFeature = "oidc_convert" | "tailnet_resume" | "workspace_apps_api_key" | "workspace_apps_token" +export const CryptoKeyFeatures: CryptoKeyFeature[] = ["oidc_convert", "tailnet_resume", "workspace_apps_api_key", "workspace_apps_token"] // From codersdk/workspaceagents.go export type DisplayApp = "port_forwarding_helper" | "ssh_helper" | "vscode" | "vscode_insiders" | "web_terminal" diff --git a/tailnet/resume.go b/tailnet/resume.go index b9443064a37f9..2975fa35f1674 100644 --- a/tailnet/resume.go +++ b/tailnet/resume.go @@ -3,32 +3,23 @@ package tailnet import ( "context" "crypto/rand" - "database/sql" - "encoding/hex" - "encoding/json" "time" - "github.com/go-jose/go-jose/v3" + "github.com/go-jose/go-jose/v4/jwt" "github.com/google/uuid" "golang.org/x/xerrors" "google.golang.org/protobuf/types/known/durationpb" "google.golang.org/protobuf/types/known/timestamppb" + "github.com/coder/coder/v2/coderd/jwtutils" "github.com/coder/coder/v2/tailnet/proto" "github.com/coder/quartz" ) const ( DefaultResumeTokenExpiry = 24 * time.Hour - - resumeTokenSigningAlgorithm = jose.HS512 ) -// resumeTokenSigningKeyID is a fixed key ID for the resume token signing key. -// If/when we add support for multiple keys (e.g. key rotation), this will move -// to the database instead. -var resumeTokenSigningKeyID = uuid.MustParse("97166747-9309-4d7f-9071-a230e257c2a4") - // NewInsecureTestResumeTokenProvider returns a ResumeTokenProvider that uses a // random key with short expiry for testing purposes. If any errors occur while // generating the key, the function panics. @@ -37,12 +28,15 @@ func NewInsecureTestResumeTokenProvider() ResumeTokenProvider { if err != nil { panic(err) } - return NewResumeTokenKeyProvider(key, quartz.NewReal(), time.Hour) + return NewResumeTokenKeyProvider(jwtutils.StaticKey{ + ID: uuid.New().String(), + Key: key[:], + }, quartz.NewReal(), time.Hour) } type ResumeTokenProvider interface { - GenerateResumeToken(peerID uuid.UUID) (*proto.RefreshResumeTokenResponse, error) - VerifyResumeToken(token string) (uuid.UUID, error) + GenerateResumeToken(ctx context.Context, peerID uuid.UUID) (*proto.RefreshResumeTokenResponse, error) + VerifyResumeToken(ctx context.Context, token string) (uuid.UUID, error) } type ResumeTokenSigningKey [64]byte @@ -56,104 +50,37 @@ func GenerateResumeTokenSigningKey() (ResumeTokenSigningKey, error) { return key, nil } -type ResumeTokenSigningKeyDatabaseStore interface { - GetCoordinatorResumeTokenSigningKey(ctx context.Context) (string, error) - UpsertCoordinatorResumeTokenSigningKey(ctx context.Context, key string) error -} - -// ResumeTokenSigningKeyFromDatabase retrieves the coordinator resume token -// signing key from the database. If the key is not found, a new key is -// generated and inserted into the database. -func ResumeTokenSigningKeyFromDatabase(ctx context.Context, db ResumeTokenSigningKeyDatabaseStore) (ResumeTokenSigningKey, error) { - var resumeTokenKey ResumeTokenSigningKey - resumeTokenKeyStr, err := db.GetCoordinatorResumeTokenSigningKey(ctx) - if err != nil && !xerrors.Is(err, sql.ErrNoRows) { - return resumeTokenKey, xerrors.Errorf("get coordinator resume token key: %w", err) - } - if decoded, err := hex.DecodeString(resumeTokenKeyStr); err != nil || len(decoded) != len(resumeTokenKey) { - newKey, err := GenerateResumeTokenSigningKey() - if err != nil { - return resumeTokenKey, xerrors.Errorf("generate fresh coordinator resume token key: %w", err) - } - - resumeTokenKeyStr = hex.EncodeToString(newKey[:]) - err = db.UpsertCoordinatorResumeTokenSigningKey(ctx, resumeTokenKeyStr) - if err != nil { - return resumeTokenKey, xerrors.Errorf("insert freshly generated coordinator resume token key to database: %w", err) - } - } - - resumeTokenKeyBytes, err := hex.DecodeString(resumeTokenKeyStr) - if err != nil { - return resumeTokenKey, xerrors.Errorf("decode coordinator resume token key from database: %w", err) - } - if len(resumeTokenKeyBytes) != len(resumeTokenKey) { - return resumeTokenKey, xerrors.Errorf("coordinator resume token key in database is not the correct length, expect %d got %d", len(resumeTokenKey), len(resumeTokenKeyBytes)) - } - copy(resumeTokenKey[:], resumeTokenKeyBytes) - if resumeTokenKey == [64]byte{} { - return resumeTokenKey, xerrors.Errorf("coordinator resume token key in database is empty") - } - return resumeTokenKey, nil -} - type ResumeTokenKeyProvider struct { - key ResumeTokenSigningKey + key jwtutils.SigningKeyManager clock quartz.Clock expiry time.Duration } -func NewResumeTokenKeyProvider(key ResumeTokenSigningKey, clock quartz.Clock, expiry time.Duration) ResumeTokenProvider { +func NewResumeTokenKeyProvider(key jwtutils.SigningKeyManager, clock quartz.Clock, expiry time.Duration) ResumeTokenProvider { if expiry <= 0 { expiry = DefaultResumeTokenExpiry } return ResumeTokenKeyProvider{ key: key, clock: clock, - expiry: DefaultResumeTokenExpiry, + expiry: expiry, } } -type resumeTokenPayload struct { - PeerID uuid.UUID `json:"sub"` - Expiry int64 `json:"exp"` -} - -func (p ResumeTokenKeyProvider) GenerateResumeToken(peerID uuid.UUID) (*proto.RefreshResumeTokenResponse, error) { +func (p ResumeTokenKeyProvider) GenerateResumeToken(ctx context.Context, peerID uuid.UUID) (*proto.RefreshResumeTokenResponse, error) { exp := p.clock.Now().Add(p.expiry) - payload := resumeTokenPayload{ - PeerID: peerID, - Expiry: exp.Unix(), - } - payloadBytes, err := json.Marshal(payload) - if err != nil { - return nil, xerrors.Errorf("marshal payload to JSON: %w", err) - } - - signer, err := jose.NewSigner(jose.SigningKey{ - Algorithm: resumeTokenSigningAlgorithm, - Key: p.key[:], - }, &jose.SignerOptions{ - ExtraHeaders: map[jose.HeaderKey]interface{}{ - "kid": resumeTokenSigningKeyID.String(), - }, - }) - if err != nil { - return nil, xerrors.Errorf("create signer: %w", err) + payload := jwtutils.RegisteredClaims{ + Subject: peerID.String(), + Expiry: jwt.NewNumericDate(exp), } - signedObject, err := signer.Sign(payloadBytes) + token, err := jwtutils.Sign(ctx, p.key, payload) if err != nil { return nil, xerrors.Errorf("sign payload: %w", err) } - serialized, err := signedObject.CompactSerialize() - if err != nil { - return nil, xerrors.Errorf("serialize JWS: %w", err) - } - return &proto.RefreshResumeTokenResponse{ - Token: serialized, + Token: token, RefreshIn: durationpb.New(p.expiry / 2), ExpiresAt: timestamppb.New(exp), }, nil @@ -162,35 +89,17 @@ func (p ResumeTokenKeyProvider) GenerateResumeToken(peerID uuid.UUID) (*proto.Re // VerifyResumeToken parses a signed tailnet resume token with the given key and // returns the payload. If the token is invalid or expired, an error is // returned. -func (p ResumeTokenKeyProvider) VerifyResumeToken(str string) (uuid.UUID, error) { - object, err := jose.ParseSigned(str) - if err != nil { - return uuid.Nil, xerrors.Errorf("parse JWS: %w", err) - } - if len(object.Signatures) != 1 { - return uuid.Nil, xerrors.New("expected 1 signature") - } - if object.Signatures[0].Header.Algorithm != string(resumeTokenSigningAlgorithm) { - return uuid.Nil, xerrors.Errorf("expected token signing algorithm to be %q, got %q", resumeTokenSigningAlgorithm, object.Signatures[0].Header.Algorithm) - } - if object.Signatures[0].Header.KeyID != resumeTokenSigningKeyID.String() { - return uuid.Nil, xerrors.Errorf("expected token key ID to be %q, got %q", resumeTokenSigningKeyID, object.Signatures[0].Header.KeyID) - } - - output, err := object.Verify(p.key[:]) +func (p ResumeTokenKeyProvider) VerifyResumeToken(ctx context.Context, str string) (uuid.UUID, error) { + var tok jwt.Claims + err := jwtutils.Verify(ctx, p.key, str, &tok, jwtutils.WithVerifyExpected(jwt.Expected{ + Time: p.clock.Now(), + })) if err != nil { - return uuid.Nil, xerrors.Errorf("verify JWS: %w", err) + return uuid.Nil, xerrors.Errorf("verify payload: %w", err) } - - var tok resumeTokenPayload - err = json.Unmarshal(output, &tok) + parsed, err := uuid.Parse(tok.Subject) if err != nil { - return uuid.Nil, xerrors.Errorf("unmarshal payload: %w", err) + return uuid.Nil, xerrors.Errorf("parse peerID from token: %w", err) } - exp := time.Unix(tok.Expiry, 0) - if exp.Before(p.clock.Now()) { - return uuid.Nil, xerrors.New("signed resume token expired") - } - - return tok.PeerID, nil + return parsed, nil } diff --git a/tailnet/resume_test.go b/tailnet/resume_test.go index 3f63887cbfef3..6f32fba4c511e 100644 --- a/tailnet/resume_test.go +++ b/tailnet/resume_test.go @@ -1,117 +1,20 @@ package tailnet_test import ( - "context" - "encoding/hex" "testing" "time" + "github.com/go-jose/go-jose/v4" + "github.com/go-jose/go-jose/v4/jwt" "github.com/google/uuid" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "go.uber.org/mock/gomock" - "github.com/coder/coder/v2/coderd/database/dbmock" - "github.com/coder/coder/v2/coderd/database/dbtestutil" + "github.com/coder/coder/v2/coderd/jwtutils" "github.com/coder/coder/v2/tailnet" "github.com/coder/coder/v2/testutil" "github.com/coder/quartz" ) -func TestResumeTokenSigningKeyFromDatabase(t *testing.T) { - t.Parallel() - - assertRandomKey := func(t *testing.T, key tailnet.ResumeTokenSigningKey) { - t.Helper() - assert.NotEqual(t, tailnet.ResumeTokenSigningKey{}, key, "key should not be empty") - assert.NotEqualValues(t, [64]byte{1}, key, "key should not be all 1s") - } - - t.Run("GenerateRetrieve", func(t *testing.T) { - t.Parallel() - - db, _ := dbtestutil.NewDB(t) - ctx := testutil.Context(t, testutil.WaitShort) - key1, err := tailnet.ResumeTokenSigningKeyFromDatabase(ctx, db) - require.NoError(t, err) - assertRandomKey(t, key1) - - key2, err := tailnet.ResumeTokenSigningKeyFromDatabase(ctx, db) - require.NoError(t, err) - require.Equal(t, key1, key2, "keys should not be different") - }) - - t.Run("GetError", func(t *testing.T) { - t.Parallel() - - db := dbmock.NewMockStore(gomock.NewController(t)) - db.EXPECT().GetCoordinatorResumeTokenSigningKey(gomock.Any()).Return("", assert.AnError) - - ctx := testutil.Context(t, testutil.WaitShort) - _, err := tailnet.ResumeTokenSigningKeyFromDatabase(ctx, db) - require.ErrorIs(t, err, assert.AnError) - }) - - t.Run("UpsertError", func(t *testing.T) { - t.Parallel() - - db := dbmock.NewMockStore(gomock.NewController(t)) - db.EXPECT().GetCoordinatorResumeTokenSigningKey(gomock.Any()).Return("", nil) - db.EXPECT().UpsertCoordinatorResumeTokenSigningKey(gomock.Any(), gomock.Any()).Return(assert.AnError) - - ctx := testutil.Context(t, testutil.WaitShort) - _, err := tailnet.ResumeTokenSigningKeyFromDatabase(ctx, db) - require.ErrorIs(t, err, assert.AnError) - }) - - t.Run("DecodeErrorShouldRegenerate", func(t *testing.T) { - t.Parallel() - - db := dbmock.NewMockStore(gomock.NewController(t)) - db.EXPECT().GetCoordinatorResumeTokenSigningKey(gomock.Any()).Return("invalid", nil) - - var storedKey tailnet.ResumeTokenSigningKey - db.EXPECT().UpsertCoordinatorResumeTokenSigningKey(gomock.Any(), gomock.Any()).Do(func(_ context.Context, value string) error { - keyBytes, err := hex.DecodeString(value) - require.NoError(t, err) - require.Len(t, keyBytes, len(storedKey)) - copy(storedKey[:], keyBytes) - return nil - }) - - ctx := testutil.Context(t, testutil.WaitShort) - key, err := tailnet.ResumeTokenSigningKeyFromDatabase(ctx, db) - require.NoError(t, err) - assertRandomKey(t, key) - require.Equal(t, storedKey, key, "key should match stored value") - }) - - t.Run("LengthErrorShouldRegenerate", func(t *testing.T) { - t.Parallel() - - db := dbmock.NewMockStore(gomock.NewController(t)) - db.EXPECT().GetCoordinatorResumeTokenSigningKey(gomock.Any()).Return("deadbeef", nil) - db.EXPECT().UpsertCoordinatorResumeTokenSigningKey(gomock.Any(), gomock.Any()).Return(nil) - - ctx := testutil.Context(t, testutil.WaitShort) - key, err := tailnet.ResumeTokenSigningKeyFromDatabase(ctx, db) - require.NoError(t, err) - assertRandomKey(t, key) - }) - - t.Run("EmptyError", func(t *testing.T) { - t.Parallel() - - db := dbmock.NewMockStore(gomock.NewController(t)) - emptyKey := hex.EncodeToString(make([]byte, 64)) - db.EXPECT().GetCoordinatorResumeTokenSigningKey(gomock.Any()).Return(emptyKey, nil) - - ctx := testutil.Context(t, testutil.WaitShort) - _, err := tailnet.ResumeTokenSigningKeyFromDatabase(ctx, db) - require.ErrorContains(t, err, "is empty") - }) -} - func TestResumeTokenKeyProvider(t *testing.T) { t.Parallel() @@ -121,17 +24,18 @@ func TestResumeTokenKeyProvider(t *testing.T) { t.Run("OK", func(t *testing.T) { t.Parallel() + ctx := testutil.Context(t, testutil.WaitShort) id := uuid.New() clock := quartz.NewMock(t) - provider := tailnet.NewResumeTokenKeyProvider(key, clock, tailnet.DefaultResumeTokenExpiry) - token, err := provider.GenerateResumeToken(id) + provider := tailnet.NewResumeTokenKeyProvider(newKeySigner(key), clock, tailnet.DefaultResumeTokenExpiry) + token, err := provider.GenerateResumeToken(ctx, id) require.NoError(t, err) require.NotNil(t, token) require.NotEmpty(t, token.Token) require.Equal(t, tailnet.DefaultResumeTokenExpiry/2, token.RefreshIn.AsDuration()) require.WithinDuration(t, clock.Now().Add(tailnet.DefaultResumeTokenExpiry), token.ExpiresAt.AsTime(), time.Second) - gotID, err := provider.VerifyResumeToken(token.Token) + gotID, err := provider.VerifyResumeToken(ctx, token.Token) require.NoError(t, err) require.Equal(t, id, gotID) }) @@ -139,43 +43,57 @@ func TestResumeTokenKeyProvider(t *testing.T) { t.Run("Expired", func(t *testing.T) { t.Parallel() + ctx := testutil.Context(t, testutil.WaitShort) id := uuid.New() clock := quartz.NewMock(t) - provider := tailnet.NewResumeTokenKeyProvider(key, clock, tailnet.DefaultResumeTokenExpiry) - token, err := provider.GenerateResumeToken(id) + provider := tailnet.NewResumeTokenKeyProvider(newKeySigner(key), clock, tailnet.DefaultResumeTokenExpiry) + token, err := provider.GenerateResumeToken(ctx, id) require.NoError(t, err) require.NotNil(t, token) require.NotEmpty(t, token.Token) require.Equal(t, tailnet.DefaultResumeTokenExpiry/2, token.RefreshIn.AsDuration()) require.WithinDuration(t, clock.Now().Add(tailnet.DefaultResumeTokenExpiry), token.ExpiresAt.AsTime(), time.Second) - // Advance time past expiry - _ = clock.Advance(tailnet.DefaultResumeTokenExpiry + time.Second) + // Advance time past expiry. Account for leeway. + _ = clock.Advance(tailnet.DefaultResumeTokenExpiry + time.Second*61) - _, err = provider.VerifyResumeToken(token.Token) - require.ErrorContains(t, err, "expired") + _, err = provider.VerifyResumeToken(ctx, token.Token) + require.Error(t, err) + require.ErrorIs(t, err, jwt.ErrExpired) }) t.Run("InvalidToken", func(t *testing.T) { t.Parallel() - provider := tailnet.NewResumeTokenKeyProvider(key, quartz.NewMock(t), tailnet.DefaultResumeTokenExpiry) - _, err := provider.VerifyResumeToken("invalid") + ctx := testutil.Context(t, testutil.WaitShort) + provider := tailnet.NewResumeTokenKeyProvider(newKeySigner(key), quartz.NewMock(t), tailnet.DefaultResumeTokenExpiry) + _, err := provider.VerifyResumeToken(ctx, "invalid") require.ErrorContains(t, err, "parse JWS") }) t.Run("VerifyError", func(t *testing.T) { t.Parallel() + ctx := testutil.Context(t, testutil.WaitShort) // Generate a resume token with a different key otherKey, err := tailnet.GenerateResumeTokenSigningKey() require.NoError(t, err) - otherProvider := tailnet.NewResumeTokenKeyProvider(otherKey, quartz.NewMock(t), tailnet.DefaultResumeTokenExpiry) - token, err := otherProvider.GenerateResumeToken(uuid.New()) + otherSigner := newKeySigner(otherKey) + otherProvider := tailnet.NewResumeTokenKeyProvider(otherSigner, quartz.NewMock(t), tailnet.DefaultResumeTokenExpiry) + token, err := otherProvider.GenerateResumeToken(ctx, uuid.New()) require.NoError(t, err) - provider := tailnet.NewResumeTokenKeyProvider(key, quartz.NewMock(t), tailnet.DefaultResumeTokenExpiry) - _, err = provider.VerifyResumeToken(token.Token) - require.ErrorContains(t, err, "verify JWS") + signer := newKeySigner(key) + signer.ID = otherSigner.ID + provider := tailnet.NewResumeTokenKeyProvider(signer, quartz.NewMock(t), tailnet.DefaultResumeTokenExpiry) + _, err = provider.VerifyResumeToken(ctx, token.Token) + require.ErrorIs(t, err, jose.ErrCryptoFailure) }) } + +func newKeySigner(key tailnet.ResumeTokenSigningKey) jwtutils.StaticKey { + return jwtutils.StaticKey{ + ID: "123", + Key: key[:], + } +} diff --git a/tailnet/service.go b/tailnet/service.go index 28a054dd8d671..7f38f63a589b3 100644 --- a/tailnet/service.go +++ b/tailnet/service.go @@ -177,7 +177,7 @@ func (s *DRPCService) RefreshResumeToken(ctx context.Context, _ *proto.RefreshRe return nil, xerrors.New("no Stream ID") } - res, err := s.ResumeTokenProvider.GenerateResumeToken(streamID.ID) + res, err := s.ResumeTokenProvider.GenerateResumeToken(ctx, streamID.ID) if err != nil { return nil, xerrors.Errorf("generate resume token: %w", err) } From 0dd942e19728af9551406f04fa76ad6a807246e1 Mon Sep 17 00:00:00 2001 From: Garrett Delfosse Date: Fri, 25 Oct 2024 12:49:44 -0400 Subject: [PATCH 03/42] fix: stop incrementing activity on empty agent stats (#15204) --- coderd/agentapi/stats_test.go | 60 ++++++++--- coderd/insights_test.go | 6 +- coderd/workspaceagentsrpc_test.go | 13 ++- coderd/workspacestats/batcher.go | 5 +- .../workspacestats/batcher_internal_test.go | 6 +- coderd/workspacestats/reporter.go | 99 ++++++++----------- coderd/workspacestats/tracker.go | 1 - .../workspacestatstest/batcher.go | 3 +- 8 files changed, 107 insertions(+), 86 deletions(-) diff --git a/coderd/agentapi/stats_test.go b/coderd/agentapi/stats_test.go index d2c8e4f163df5..83edb8cccc4e1 100644 --- a/coderd/agentapi/stats_test.go +++ b/coderd/agentapi/stats_test.go @@ -70,6 +70,11 @@ func TestUpdateStates(t *testing.T) { } batcher = &workspacestatstest.StatsBatcher{} updateAgentMetricsFnCalled = false + tickCh = make(chan time.Time) + flushCh = make(chan int, 1) + wut = workspacestats.NewTracker(dbM, + workspacestats.TrackerWithTickFlush(tickCh, flushCh), + ) req = &agentproto.UpdateStatsRequest{ Stats: &agentproto.Stats{ @@ -109,6 +114,7 @@ func TestUpdateStates(t *testing.T) { Database: dbM, Pubsub: ps, StatsBatcher: batcher, + UsageTracker: wut, TemplateScheduleStore: templateScheduleStorePtr(templateScheduleStore), UpdateAgentMetricsFn: func(ctx context.Context, labels prometheusmetrics.AgentMetricLabels, metrics []*agentproto.Stats_Metric) { updateAgentMetricsFnCalled = true @@ -126,10 +132,14 @@ func TestUpdateStates(t *testing.T) { return now }, } + defer wut.Close() // Workspace gets fetched. dbM.EXPECT().GetWorkspaceByAgentID(gomock.Any(), agent.ID).Return(workspace, nil) + // User gets fetched to hit the UpdateAgentMetricsFn. + dbM.EXPECT().GetUserByID(gomock.Any(), user.ID).Return(user, nil) + // We expect an activity bump because ConnectionCount > 0. dbM.EXPECT().ActivityBumpWorkspace(gomock.Any(), database.ActivityBumpWorkspaceParams{ WorkspaceID: workspace.ID, @@ -137,14 +147,11 @@ func TestUpdateStates(t *testing.T) { }).Return(nil) // Workspace last used at gets bumped. - dbM.EXPECT().UpdateWorkspaceLastUsedAt(gomock.Any(), database.UpdateWorkspaceLastUsedAtParams{ - ID: workspace.ID, + dbM.EXPECT().BatchUpdateWorkspaceLastUsedAt(gomock.Any(), database.BatchUpdateWorkspaceLastUsedAtParams{ + IDs: []uuid.UUID{workspace.ID}, LastUsedAt: now, }).Return(nil) - // User gets fetched to hit the UpdateAgentMetricsFn. - dbM.EXPECT().GetUserByID(gomock.Any(), user.ID).Return(user, nil) - // Ensure that pubsub notifications are sent. notifyDescription := make(chan []byte) ps.Subscribe(codersdk.WorkspaceNotifyChannel(workspace.ID), func(_ context.Context, description []byte) { @@ -159,6 +166,10 @@ func TestUpdateStates(t *testing.T) { ReportInterval: durationpb.New(10 * time.Second), }, resp) + tickCh <- now + count := <-flushCh + require.Equal(t, 1, count, "expected one flush with one id") + batcher.Mu.Lock() defer batcher.Mu.Unlock() require.Equal(t, int64(1), batcher.Called) @@ -211,6 +222,7 @@ func TestUpdateStates(t *testing.T) { StatsReporter: workspacestats.NewReporter(workspacestats.ReporterOptions{ Database: dbM, Pubsub: ps, + UsageTracker: workspacestats.NewTracker(dbM), StatsBatcher: batcher, TemplateScheduleStore: templateScheduleStorePtr(templateScheduleStore), // Ignored when nil. @@ -225,12 +237,6 @@ func TestUpdateStates(t *testing.T) { // Workspace gets fetched. dbM.EXPECT().GetWorkspaceByAgentID(gomock.Any(), agent.ID).Return(workspace, nil) - // Workspace last used at gets bumped. - dbM.EXPECT().UpdateWorkspaceLastUsedAt(gomock.Any(), database.UpdateWorkspaceLastUsedAtParams{ - ID: workspace.ID, - LastUsedAt: now, - }).Return(nil) - _, err := api.UpdateStats(context.Background(), req) require.NoError(t, err) }) @@ -306,6 +312,11 @@ func TestUpdateStates(t *testing.T) { } batcher = &workspacestatstest.StatsBatcher{} updateAgentMetricsFnCalled = false + tickCh = make(chan time.Time) + flushCh = make(chan int, 1) + wut = workspacestats.NewTracker(dbM, + workspacestats.TrackerWithTickFlush(tickCh, flushCh), + ) req = &agentproto.UpdateStatsRequest{ Stats: &agentproto.Stats{ @@ -325,6 +336,7 @@ func TestUpdateStates(t *testing.T) { StatsReporter: workspacestats.NewReporter(workspacestats.ReporterOptions{ Database: dbM, Pubsub: ps, + UsageTracker: wut, StatsBatcher: batcher, TemplateScheduleStore: templateScheduleStorePtr(templateScheduleStore), UpdateAgentMetricsFn: func(ctx context.Context, labels prometheusmetrics.AgentMetricLabels, metrics []*agentproto.Stats_Metric) { @@ -343,6 +355,7 @@ func TestUpdateStates(t *testing.T) { return now }, } + defer wut.Close() // Workspace gets fetched. dbM.EXPECT().GetWorkspaceByAgentID(gomock.Any(), agent.ID).Return(workspace, nil) @@ -355,9 +368,9 @@ func TestUpdateStates(t *testing.T) { }).Return(nil) // Workspace last used at gets bumped. - dbM.EXPECT().UpdateWorkspaceLastUsedAt(gomock.Any(), database.UpdateWorkspaceLastUsedAtParams{ - ID: workspace.ID, - LastUsedAt: now, + dbM.EXPECT().BatchUpdateWorkspaceLastUsedAt(gomock.Any(), database.BatchUpdateWorkspaceLastUsedAtParams{ + IDs: []uuid.UUID{workspace.ID}, + LastUsedAt: now.UTC(), }).Return(nil) // User gets fetched to hit the UpdateAgentMetricsFn. @@ -369,6 +382,10 @@ func TestUpdateStates(t *testing.T) { ReportInterval: durationpb.New(15 * time.Second), }, resp) + tickCh <- now + count := <-flushCh + require.Equal(t, 1, count, "expected one flush with one id") + require.True(t, updateAgentMetricsFnCalled) }) @@ -392,6 +409,11 @@ func TestUpdateStates(t *testing.T) { } batcher = &workspacestatstest.StatsBatcher{} updateAgentMetricsFnCalled = false + tickCh = make(chan time.Time) + flushCh = make(chan int, 1) + wut = workspacestats.NewTracker(dbM, + workspacestats.TrackerWithTickFlush(tickCh, flushCh), + ) req = &agentproto.UpdateStatsRequest{ Stats: &agentproto.Stats{ @@ -422,6 +444,7 @@ func TestUpdateStates(t *testing.T) { }, } ) + defer wut.Close() api := agentapi.StatsAPI{ AgentFn: func(context.Context) (database.WorkspaceAgent, error) { return agent, nil @@ -431,6 +454,7 @@ func TestUpdateStates(t *testing.T) { Database: dbM, Pubsub: ps, StatsBatcher: batcher, + UsageTracker: wut, TemplateScheduleStore: templateScheduleStorePtr(templateScheduleStore), UpdateAgentMetricsFn: func(ctx context.Context, labels prometheusmetrics.AgentMetricLabels, metrics []*agentproto.Stats_Metric) { updateAgentMetricsFnCalled = true @@ -462,8 +486,8 @@ func TestUpdateStates(t *testing.T) { }).Return(nil) // Workspace last used at gets bumped. - dbM.EXPECT().UpdateWorkspaceLastUsedAt(gomock.Any(), database.UpdateWorkspaceLastUsedAtParams{ - ID: workspace.ID, + dbM.EXPECT().BatchUpdateWorkspaceLastUsedAt(gomock.Any(), database.BatchUpdateWorkspaceLastUsedAtParams{ + IDs: []uuid.UUID{workspace.ID}, LastUsedAt: now, }).Return(nil) @@ -484,6 +508,10 @@ func TestUpdateStates(t *testing.T) { ReportInterval: durationpb.New(10 * time.Second), }, resp) + tickCh <- now + count := <-flushCh + require.Equal(t, 1, count, "expected one flush with one id") + batcher.Mu.Lock() defer batcher.Mu.Unlock() require.EqualValues(t, 1, batcher.Called) diff --git a/coderd/insights_test.go b/coderd/insights_test.go index 06fe8d46ca5ac..bf8aa4bc44506 100644 --- a/coderd/insights_test.go +++ b/coderd/insights_test.go @@ -700,14 +700,13 @@ func TestTemplateInsights_Golden(t *testing.T) { connectionCount = 0 } for createdAt.Before(stat.endedAt) { - err = batcher.Add(createdAt, workspace.agentID, workspace.template.id, workspace.user.(*testUser).sdk.ID, workspace.id, &agentproto.Stats{ + batcher.Add(createdAt, workspace.agentID, workspace.template.id, workspace.user.(*testUser).sdk.ID, workspace.id, &agentproto.Stats{ ConnectionCount: connectionCount, SessionCountVscode: stat.sessionCountVSCode, SessionCountJetbrains: stat.sessionCountJetBrains, SessionCountReconnectingPty: stat.sessionCountReconnectingPTY, SessionCountSsh: stat.sessionCountSSH, }, false) - require.NoError(t, err, "want no error inserting agent stats") createdAt = createdAt.Add(30 * time.Second) } } @@ -1599,14 +1598,13 @@ func TestUserActivityInsights_Golden(t *testing.T) { connectionCount = 0 } for createdAt.Before(stat.endedAt) { - err = batcher.Add(createdAt, workspace.agentID, workspace.template.id, workspace.user.(*testUser).sdk.ID, workspace.id, &agentproto.Stats{ + batcher.Add(createdAt, workspace.agentID, workspace.template.id, workspace.user.(*testUser).sdk.ID, workspace.id, &agentproto.Stats{ ConnectionCount: connectionCount, SessionCountVscode: stat.sessionCountVSCode, SessionCountJetbrains: stat.sessionCountJetBrains, SessionCountReconnectingPty: stat.sessionCountReconnectingPTY, SessionCountSsh: stat.sessionCountSSH, }, false) - require.NoError(t, err, "want no error inserting agent stats") createdAt = createdAt.Add(30 * time.Second) } } diff --git a/coderd/workspaceagentsrpc_test.go b/coderd/workspaceagentsrpc_test.go index 817aa11c4c292..3f1f1a2b8a764 100644 --- a/coderd/workspaceagentsrpc_test.go +++ b/coderd/workspaceagentsrpc_test.go @@ -3,6 +3,7 @@ package coderd_test import ( "context" "testing" + "time" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -11,6 +12,7 @@ import ( "github.com/coder/coder/v2/coderd/coderdtest" "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/dbfake" + "github.com/coder/coder/v2/coderd/database/dbtime" "github.com/coder/coder/v2/codersdk/agentsdk" "github.com/coder/coder/v2/provisionersdk/proto" "github.com/coder/coder/v2/testutil" @@ -20,7 +22,12 @@ import ( func TestWorkspaceAgentReportStats(t *testing.T) { t.Parallel() - client, db := coderdtest.NewWithDatabase(t, nil) + tickCh := make(chan time.Time) + flushCh := make(chan int, 1) + client, db := coderdtest.NewWithDatabase(t, &coderdtest.Options{ + WorkspaceUsageTrackerFlush: flushCh, + WorkspaceUsageTrackerTick: tickCh, + }) user := coderdtest.CreateFirstUser(t, client) r := dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{ OrganizationID: user.OrganizationID, @@ -53,6 +60,10 @@ func TestWorkspaceAgentReportStats(t *testing.T) { }) require.NoError(t, err) + tickCh <- dbtime.Now() + count := <-flushCh + require.Equal(t, 1, count, "expected one flush with one id") + newWorkspace, err := client.Workspace(context.Background(), r.Workspace.ID) require.NoError(t, err) diff --git a/coderd/workspacestats/batcher.go b/coderd/workspacestats/batcher.go index 1f14c5cec5a17..46efc69170562 100644 --- a/coderd/workspacestats/batcher.go +++ b/coderd/workspacestats/batcher.go @@ -25,7 +25,7 @@ const ( ) type Batcher interface { - Add(now time.Time, agentID uuid.UUID, templateID uuid.UUID, userID uuid.UUID, workspaceID uuid.UUID, st *agentproto.Stats, usage bool) error + Add(now time.Time, agentID uuid.UUID, templateID uuid.UUID, userID uuid.UUID, workspaceID uuid.UUID, st *agentproto.Stats, usage bool) } // DBBatcher holds a buffer of agent stats and periodically flushes them to @@ -139,7 +139,7 @@ func (b *DBBatcher) Add( workspaceID uuid.UUID, st *agentproto.Stats, usage bool, -) error { +) { b.mu.Lock() defer b.mu.Unlock() @@ -176,7 +176,6 @@ func (b *DBBatcher) Add( b.flushLever <- struct{}{} b.flushForced.Store(true) } - return nil } // Run runs the batcher. diff --git a/coderd/workspacestats/batcher_internal_test.go b/coderd/workspacestats/batcher_internal_test.go index 3e106f07e4e2f..874acd7667dce 100644 --- a/coderd/workspacestats/batcher_internal_test.go +++ b/coderd/workspacestats/batcher_internal_test.go @@ -63,7 +63,7 @@ func TestBatchStats(t *testing.T) { // Given: a single data point is added for workspace t2 := t1.Add(time.Second) t.Logf("inserting 1 stat") - require.NoError(t, b.Add(t2.Add(time.Millisecond), deps1.Agent.ID, deps1.User.ID, deps1.Template.ID, deps1.Workspace.ID, randStats(t), false)) + b.Add(t2.Add(time.Millisecond), deps1.Agent.ID, deps1.User.ID, deps1.Template.ID, deps1.Workspace.ID, randStats(t), false) // When: it becomes time to report stats // Signal a tick and wait for a flush to complete. @@ -87,9 +87,9 @@ func TestBatchStats(t *testing.T) { t.Logf("inserting %d stats", defaultBufferSize) for i := 0; i < defaultBufferSize; i++ { if i%2 == 0 { - require.NoError(t, b.Add(t3.Add(time.Millisecond), deps1.Agent.ID, deps1.User.ID, deps1.Template.ID, deps1.Workspace.ID, randStats(t), false)) + b.Add(t3.Add(time.Millisecond), deps1.Agent.ID, deps1.User.ID, deps1.Template.ID, deps1.Workspace.ID, randStats(t), false) } else { - require.NoError(t, b.Add(t3.Add(time.Millisecond), deps2.Agent.ID, deps2.User.ID, deps2.Template.ID, deps2.Workspace.ID, randStats(t), false)) + b.Add(t3.Add(time.Millisecond), deps2.Agent.ID, deps2.User.ID, deps2.Template.ID, deps2.Workspace.ID, randStats(t), false) } } }() diff --git a/coderd/workspacestats/reporter.go b/coderd/workspacestats/reporter.go index fecfd1b1eda92..6bb1b2dea4028 100644 --- a/coderd/workspacestats/reporter.go +++ b/coderd/workspacestats/reporter.go @@ -6,7 +6,6 @@ import ( "time" "github.com/google/uuid" - "golang.org/x/sync/errgroup" "golang.org/x/xerrors" "cdr.dev/slog" @@ -119,69 +118,57 @@ func (r *Reporter) ReportAppStats(ctx context.Context, stats []workspaceapps.Sta } func (r *Reporter) ReportAgentStats(ctx context.Context, now time.Time, workspace database.Workspace, workspaceAgent database.WorkspaceAgent, templateName string, stats *agentproto.Stats, usage bool) error { - if stats.ConnectionCount > 0 { - var nextAutostart time.Time - if workspace.AutostartSchedule.String != "" { - templateSchedule, err := (*(r.opts.TemplateScheduleStore.Load())).Get(ctx, r.opts.Database, workspace.TemplateID) - // If the template schedule fails to load, just default to bumping - // without the next transition and log it. - if err != nil { - r.opts.Logger.Error(ctx, "failed to load template schedule bumping activity, defaulting to bumping by 60min", - slog.F("workspace_id", workspace.ID), - slog.F("template_id", workspace.TemplateID), - slog.Error(err), - ) - } else { - next, allowed := schedule.NextAutostart(now, workspace.AutostartSchedule.String, templateSchedule) - if allowed { - nextAutostart = next - } - } - } - ActivityBumpWorkspace(ctx, r.opts.Logger.Named("activity_bump"), r.opts.Database, workspace.ID, nextAutostart) - } + // update agent stats + r.opts.StatsBatcher.Add(now, workspaceAgent.ID, workspace.TemplateID, workspace.OwnerID, workspace.ID, stats, usage) - var errGroup errgroup.Group - errGroup.Go(func() error { - err := r.opts.StatsBatcher.Add(now, workspaceAgent.ID, workspace.TemplateID, workspace.OwnerID, workspace.ID, stats, usage) + // update prometheus metrics + if r.opts.UpdateAgentMetricsFn != nil { + user, err := r.opts.Database.GetUserByID(ctx, workspace.OwnerID) if err != nil { - r.opts.Logger.Error(ctx, "add agent stats to batcher", slog.Error(err)) - return xerrors.Errorf("insert workspace agent stats batch: %w", err) + return xerrors.Errorf("get user: %w", err) } + + r.opts.UpdateAgentMetricsFn(ctx, prometheusmetrics.AgentMetricLabels{ + Username: user.Username, + WorkspaceName: workspace.Name, + AgentName: workspaceAgent.Name, + TemplateName: templateName, + }, stats.Metrics) + } + + // if no active connections we do not bump activity + if stats.ConnectionCount == 0 { return nil - }) - errGroup.Go(func() error { - err := r.opts.Database.UpdateWorkspaceLastUsedAt(ctx, database.UpdateWorkspaceLastUsedAtParams{ - ID: workspace.ID, - LastUsedAt: now, - }) + } + + // check next autostart + var nextAutostart time.Time + if workspace.AutostartSchedule.String != "" { + templateSchedule, err := (*(r.opts.TemplateScheduleStore.Load())).Get(ctx, r.opts.Database, workspace.TemplateID) + // If the template schedule fails to load, just default to bumping + // without the next transition and log it. if err != nil { - return xerrors.Errorf("update workspace LastUsedAt: %w", err) - } - return nil - }) - if r.opts.UpdateAgentMetricsFn != nil { - errGroup.Go(func() error { - user, err := r.opts.Database.GetUserByID(ctx, workspace.OwnerID) - if err != nil { - return xerrors.Errorf("get user: %w", err) + r.opts.Logger.Error(ctx, "failed to load template schedule bumping activity, defaulting to bumping by 60min", + slog.F("workspace_id", workspace.ID), + slog.F("template_id", workspace.TemplateID), + slog.Error(err), + ) + } else { + next, allowed := schedule.NextAutostart(now, workspace.AutostartSchedule.String, templateSchedule) + if allowed { + nextAutostart = next } - - r.opts.UpdateAgentMetricsFn(ctx, prometheusmetrics.AgentMetricLabels{ - Username: user.Username, - WorkspaceName: workspace.Name, - AgentName: workspaceAgent.Name, - TemplateName: templateName, - }, stats.Metrics) - return nil - }) - } - err := errGroup.Wait() - if err != nil { - return xerrors.Errorf("update stats in database: %w", err) + } } - err = r.opts.Pubsub.Publish(codersdk.WorkspaceNotifyChannel(workspace.ID), []byte{}) + // bump workspace activity + ActivityBumpWorkspace(ctx, r.opts.Logger.Named("activity_bump"), r.opts.Database, workspace.ID, nextAutostart) + + // bump workspace last_used_at + r.opts.UsageTracker.Add(workspace.ID) + + // notify workspace update + err := r.opts.Pubsub.Publish(codersdk.WorkspaceNotifyChannel(workspace.ID), []byte{}) if err != nil { r.opts.Logger.Warn(ctx, "failed to publish workspace agent stats", slog.F("workspace_id", workspace.ID), slog.Error(err)) diff --git a/coderd/workspacestats/tracker.go b/coderd/workspacestats/tracker.go index 33532247b36e0..f55edde3b57e6 100644 --- a/coderd/workspacestats/tracker.go +++ b/coderd/workspacestats/tracker.go @@ -130,7 +130,6 @@ func (tr *UsageTracker) flush(now time.Time) { authCtx := dbauthz.AsSystemRestricted(ctx) tr.flushLock.Lock() defer tr.flushLock.Unlock() - // nolint:gocritic // (#13146) Will be moved soon as part of refactor. if err := tr.s.BatchUpdateWorkspaceLastUsedAt(authCtx, database.BatchUpdateWorkspaceLastUsedAtParams{ LastUsedAt: now, IDs: ids, diff --git a/coderd/workspacestats/workspacestatstest/batcher.go b/coderd/workspacestats/workspacestatstest/batcher.go index 2f5dd7d13aa0a..592e244518790 100644 --- a/coderd/workspacestats/workspacestatstest/batcher.go +++ b/coderd/workspacestats/workspacestatstest/batcher.go @@ -25,7 +25,7 @@ type StatsBatcher struct { var _ workspacestats.Batcher = &StatsBatcher{} -func (b *StatsBatcher) Add(now time.Time, agentID uuid.UUID, templateID uuid.UUID, userID uuid.UUID, workspaceID uuid.UUID, st *agentproto.Stats, usage bool) error { +func (b *StatsBatcher) Add(now time.Time, agentID uuid.UUID, templateID uuid.UUID, userID uuid.UUID, workspaceID uuid.UUID, st *agentproto.Stats, usage bool) { b.Mu.Lock() defer b.Mu.Unlock() b.Called++ @@ -36,5 +36,4 @@ func (b *StatsBatcher) Add(now time.Time, agentID uuid.UUID, templateID uuid.UUI b.LastWorkspaceID = workspaceID b.LastStats = st b.LastUsage = usage - return nil } From 487b37b228b83736b61dfe32510b7dd14a792100 Mon Sep 17 00:00:00 2001 From: Colin Adler Date: Fri, 25 Oct 2024 11:52:57 -0500 Subject: [PATCH 04/42] feat(enterprise): support bearer tokens in SCIM authentication (#15233) --- enterprise/coderd/scim.go | 5 +++ enterprise/coderd/scim_test.go | 62 ++++++++++++++++++++++++++++++++++ 2 files changed, 67 insertions(+) diff --git a/enterprise/coderd/scim.go b/enterprise/coderd/scim.go index 45390b6014a6a..5db1ed52bbc42 100644 --- a/enterprise/coderd/scim.go +++ b/enterprise/coderd/scim.go @@ -35,8 +35,13 @@ func (api *API) scimEnabledMW(next http.Handler) http.Handler { } func (api *API) scimVerifyAuthHeader(r *http.Request) bool { + bearer := []byte("Bearer ") hdr := []byte(r.Header.Get("Authorization")) + if len(hdr) >= len(bearer) && subtle.ConstantTimeCompare(hdr[:len(bearer)], bearer) == 1 { + hdr = hdr[len(bearer):] + } + return len(api.SCIMAPIKey) != 0 && subtle.ConstantTimeCompare(hdr, api.SCIMAPIKey) == 1 } diff --git a/enterprise/coderd/scim_test.go b/enterprise/coderd/scim_test.go index 8d65d9bb34531..c45ded27d6226 100644 --- a/enterprise/coderd/scim_test.go +++ b/enterprise/coderd/scim_test.go @@ -56,6 +56,12 @@ func setScimAuth(key []byte) func(*http.Request) { } } +func setScimAuthBearer(key []byte) func(*http.Request) { + return func(r *http.Request) { + r.Header.Set("Authorization", "Bearer "+string(key)) + } +} + //nolint:gocritic // SCIM authenticates via a special header and bypasses internal RBAC. func TestScim(t *testing.T) { t.Parallel() @@ -163,6 +169,62 @@ func TestScim(t *testing.T) { require.Empty(t, notifyEnq.Sent) }) + t.Run("OK_Bearer", func(t *testing.T) { + t.Parallel() + + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) + defer cancel() + + // given + scimAPIKey := []byte("hi") + mockAudit := audit.NewMock() + notifyEnq := &testutil.FakeNotificationsEnqueuer{} + client, _ := coderdenttest.New(t, &coderdenttest.Options{ + Options: &coderdtest.Options{ + Auditor: mockAudit, + NotificationsEnqueuer: notifyEnq, + }, + SCIMAPIKey: scimAPIKey, + AuditLogging: true, + LicenseOptions: &coderdenttest.LicenseOptions{ + AccountID: "coolin", + Features: license.Features{ + codersdk.FeatureSCIM: 1, + codersdk.FeatureAuditLog: 1, + }, + }, + }) + mockAudit.ResetLogs() + + // when + sUser := makeScimUser(t) + res, err := client.Request(ctx, "POST", "/scim/v2/Users", sUser, setScimAuthBearer(scimAPIKey)) + require.NoError(t, err) + defer res.Body.Close() + require.Equal(t, http.StatusOK, res.StatusCode) + + // then + // Expect audit logs + aLogs := mockAudit.AuditLogs() + require.Len(t, aLogs, 1) + af := map[string]string{} + err = json.Unmarshal([]byte(aLogs[0].AdditionalFields), &af) + require.NoError(t, err) + assert.Equal(t, coderd.SCIMAuditAdditionalFields, af) + assert.Equal(t, database.AuditActionCreate, aLogs[0].Action) + + // Expect users exposed over API + userRes, err := client.Users(ctx, codersdk.UsersRequest{Search: sUser.Emails[0].Value}) + require.NoError(t, err) + require.Len(t, userRes.Users, 1) + assert.Equal(t, sUser.Emails[0].Value, userRes.Users[0].Email) + assert.Equal(t, sUser.UserName, userRes.Users[0].Username) + assert.Len(t, userRes.Users[0].OrganizationIDs, 1) + + // Expect zero notifications (SkipNotifications = true) + require.Empty(t, notifyEnq.Sent) + }) + t.Run("OKNoDefault", func(t *testing.T) { t.Parallel() From 900e2cd39c2184562dee7e2224954ac8c767243e Mon Sep 17 00:00:00 2001 From: Steven Masley Date: Fri, 25 Oct 2024 13:23:12 -0400 Subject: [PATCH 05/42] chore: implement better 404 for unimplemented scim endpoints (#15232) Prior to this, html was returned. --- enterprise/coderd/coderd.go | 9 ++++++++- enterprise/coderd/scim.go | 11 ----------- enterprise/coderd/scim_test.go | 4 ++-- 3 files changed, 10 insertions(+), 14 deletions(-) diff --git a/enterprise/coderd/coderd.go b/enterprise/coderd/coderd.go index 79453d617ed6e..2549a008e5f66 100644 --- a/enterprise/coderd/coderd.go +++ b/enterprise/coderd/coderd.go @@ -455,7 +455,7 @@ func New(ctx context.Context, options *Options) (_ *API, err error) { if len(options.SCIMAPIKey) != 0 { api.AGPL.RootHandler.Route("/scim/v2", func(r chi.Router) { r.Use( - api.scimEnabledMW, + api.RequireFeatureMW(codersdk.FeatureSCIM), ) r.Post("/Users", api.scimPostUser) r.Route("/Users", func(r chi.Router) { @@ -464,6 +464,13 @@ func New(ctx context.Context, options *Options) (_ *API, err error) { r.Get("/{id}", api.scimGetUser) r.Patch("/{id}", api.scimPatchUser) }) + r.NotFound(func(w http.ResponseWriter, r *http.Request) { + u := r.URL.String() + httpapi.Write(r.Context(), w, http.StatusNotFound, codersdk.Response{ + Message: fmt.Sprintf("SCIM endpoint %s not found", u), + Detail: "This endpoint is not implemented. If it is correct and required, please contact support.", + }) + }) }) } else { // Show a helpful 404 error. Because this is not under the /api/v2 routes, diff --git a/enterprise/coderd/scim.go b/enterprise/coderd/scim.go index 5db1ed52bbc42..28a40dd842d21 100644 --- a/enterprise/coderd/scim.go +++ b/enterprise/coderd/scim.go @@ -23,17 +23,6 @@ import ( "github.com/coder/coder/v2/codersdk" ) -func (api *API) scimEnabledMW(next http.Handler) http.Handler { - return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { - if !api.Entitlements.Enabled(codersdk.FeatureSCIM) { - httpapi.RouteNotFound(rw) - return - } - - next.ServeHTTP(rw, r) - }) -} - func (api *API) scimVerifyAuthHeader(r *http.Request) bool { bearer := []byte("Bearer ") hdr := []byte(r.Header.Get("Authorization")) diff --git a/enterprise/coderd/scim_test.go b/enterprise/coderd/scim_test.go index c45ded27d6226..016c75d095484 100644 --- a/enterprise/coderd/scim_test.go +++ b/enterprise/coderd/scim_test.go @@ -88,7 +88,7 @@ func TestScim(t *testing.T) { res, err := client.Request(ctx, "POST", "/scim/v2/Users", struct{}{}) require.NoError(t, err) defer res.Body.Close() - assert.Equal(t, http.StatusNotFound, res.StatusCode) + assert.Equal(t, http.StatusForbidden, res.StatusCode) }) t.Run("noAuth", func(t *testing.T) { @@ -424,7 +424,7 @@ func TestScim(t *testing.T) { require.NoError(t, err) _, _ = io.Copy(io.Discard, res.Body) _ = res.Body.Close() - assert.Equal(t, http.StatusNotFound, res.StatusCode) + assert.Equal(t, http.StatusForbidden, res.StatusCode) }) t.Run("noAuth", func(t *testing.T) { From 27f5ff2dd119a2f6cb2a77dd1c3a080be7f436c4 Mon Sep 17 00:00:00 2001 From: Colin Adler Date: Fri, 25 Oct 2024 12:40:13 -0500 Subject: [PATCH 06/42] chore: correctly document SCIM authentication (#15234) --- coderd/apidoc/docs.go | 13 +++++++++---- coderd/apidoc/swagger.json | 13 +++++++++---- coderd/coderd.go | 4 ++++ docs/reference/api/enterprise.md | 8 ++++---- enterprise/coderd/scim.go | 8 ++++---- 5 files changed, 30 insertions(+), 16 deletions(-) diff --git a/coderd/apidoc/docs.go b/coderd/apidoc/docs.go index 09f070046066a..27514c3a56186 100644 --- a/coderd/apidoc/docs.go +++ b/coderd/apidoc/docs.go @@ -3627,7 +3627,7 @@ const docTemplate = `{ "get": { "security": [ { - "CoderSessionToken": [] + "Authorization": [] } ], "produces": [ @@ -3647,7 +3647,7 @@ const docTemplate = `{ "post": { "security": [ { - "CoderSessionToken": [] + "Authorization": [] } ], "produces": [ @@ -3683,7 +3683,7 @@ const docTemplate = `{ "get": { "security": [ { - "CoderSessionToken": [] + "Authorization": [] } ], "produces": [ @@ -3713,7 +3713,7 @@ const docTemplate = `{ "patch": { "security": [ { - "CoderSessionToken": [] + "Authorization": [] } ], "produces": [ @@ -16289,6 +16289,11 @@ const docTemplate = `{ } }, "securityDefinitions": { + "Authorization": { + "type": "apiKey", + "name": "Authorizaiton", + "in": "header" + }, "CoderSessionToken": { "type": "apiKey", "name": "Coder-Session-Token", diff --git a/coderd/apidoc/swagger.json b/coderd/apidoc/swagger.json index 42b34d576509a..9457184c6d48a 100644 --- a/coderd/apidoc/swagger.json +++ b/coderd/apidoc/swagger.json @@ -3193,7 +3193,7 @@ "get": { "security": [ { - "CoderSessionToken": [] + "Authorization": [] } ], "produces": ["application/scim+json"], @@ -3209,7 +3209,7 @@ "post": { "security": [ { - "CoderSessionToken": [] + "Authorization": [] } ], "produces": ["application/json"], @@ -3241,7 +3241,7 @@ "get": { "security": [ { - "CoderSessionToken": [] + "Authorization": [] } ], "produces": ["application/scim+json"], @@ -3267,7 +3267,7 @@ "patch": { "security": [ { - "CoderSessionToken": [] + "Authorization": [] } ], "produces": ["application/scim+json"], @@ -14902,6 +14902,11 @@ } }, "securityDefinitions": { + "Authorization": { + "type": "apiKey", + "name": "Authorizaiton", + "in": "header" + }, "CoderSessionToken": { "type": "apiKey", "name": "Coder-Session-Token", diff --git a/coderd/coderd.go b/coderd/coderd.go index 3011c2d58d39c..bd844d7ca13c3 100644 --- a/coderd/coderd.go +++ b/coderd/coderd.go @@ -271,6 +271,10 @@ type Options struct { // @BasePath /api/v2 +// @securitydefinitions.apiKey Authorization +// @in header +// @name Authorizaiton + // @securitydefinitions.apiKey CoderSessionToken // @in header // @name Coder-Session-Token diff --git a/docs/reference/api/enterprise.md b/docs/reference/api/enterprise.md index 96256b30aeed6..a5c0857edee1d 100644 --- a/docs/reference/api/enterprise.md +++ b/docs/reference/api/enterprise.md @@ -2014,7 +2014,7 @@ To perform this operation, you must be authenticated. [Learn more](authenticatio ```shell # Example request using curl curl -X GET http://coder-server:8080/api/v2/scim/v2/Users \ - -H 'Coder-Session-Token: API_KEY' + -H 'Authorizaiton: API_KEY' ``` `GET /scim/v2/Users` @@ -2036,7 +2036,7 @@ To perform this operation, you must be authenticated. [Learn more](authenticatio curl -X POST http://coder-server:8080/api/v2/scim/v2/Users \ -H 'Content-Type: application/json' \ -H 'Accept: application/json' \ - -H 'Coder-Session-Token: API_KEY' + -H 'Authorizaiton: API_KEY' ``` `POST /scim/v2/Users` @@ -2118,7 +2118,7 @@ To perform this operation, you must be authenticated. [Learn more](authenticatio ```shell # Example request using curl curl -X GET http://coder-server:8080/api/v2/scim/v2/Users/{id} \ - -H 'Coder-Session-Token: API_KEY' + -H 'Authorizaiton: API_KEY' ``` `GET /scim/v2/Users/{id}` @@ -2146,7 +2146,7 @@ To perform this operation, you must be authenticated. [Learn more](authenticatio curl -X PATCH http://coder-server:8080/api/v2/scim/v2/Users/{id} \ -H 'Content-Type: application/json' \ -H 'Accept: application/scim+json' \ - -H 'Coder-Session-Token: API_KEY' + -H 'Authorizaiton: API_KEY' ``` `PATCH /scim/v2/Users/{id}` diff --git a/enterprise/coderd/scim.go b/enterprise/coderd/scim.go index 28a40dd842d21..1e2f70b57b733 100644 --- a/enterprise/coderd/scim.go +++ b/enterprise/coderd/scim.go @@ -40,7 +40,7 @@ func (api *API) scimVerifyAuthHeader(r *http.Request) bool { // // @Summary SCIM 2.0: Get users // @ID scim-get-users -// @Security CoderSessionToken +// @Security Authorization // @Produce application/scim+json // @Tags Enterprise // @Success 200 @@ -67,7 +67,7 @@ func (api *API) scimGetUsers(rw http.ResponseWriter, r *http.Request) { // // @Summary SCIM 2.0: Get user by ID // @ID scim-get-user-by-id -// @Security CoderSessionToken +// @Security Authorization // @Produce application/scim+json // @Tags Enterprise // @Param id path string true "User ID" format(uuid) @@ -118,7 +118,7 @@ var SCIMAuditAdditionalFields = map[string]string{ // // @Summary SCIM 2.0: Create new user // @ID scim-create-new-user -// @Security CoderSessionToken +// @Security Authorization // @Produce json // @Tags Enterprise // @Param request body coderd.SCIMUser true "New user" @@ -254,7 +254,7 @@ func (api *API) scimPostUser(rw http.ResponseWriter, r *http.Request) { // // @Summary SCIM 2.0: Update user account // @ID scim-update-user-status -// @Security CoderSessionToken +// @Security Authorization // @Produce application/scim+json // @Tags Enterprise // @Param id path string true "User ID" format(uuid) From e03ef62a13615f74571d2d550edd93ce0a45b419 Mon Sep 17 00:00:00 2001 From: Steven Masley Date: Fri, 25 Oct 2024 18:27:34 -0400 Subject: [PATCH 07/42] chore: add scim service provider config endpoint (#15235) Adds a static `/scim/v2/ServiceProviderConfig` endpoint. Our scim support is static, so the response config is also defined statically. --- coderd/apidoc/docs.go | 17 ++++++++ coderd/apidoc/swagger.json | 13 ++++++ docs/reference/api/enterprise.md | 18 ++++++++ enterprise/coderd/coderd.go | 1 + enterprise/coderd/scim.go | 65 +++++++++++++++++++++++++++++ enterprise/coderd/scim/scimtypes.go | 46 ++++++++++++++++++++ enterprise/coderd/scim_test.go | 8 +++- 7 files changed, 167 insertions(+), 1 deletion(-) create mode 100644 enterprise/coderd/scim/scimtypes.go diff --git a/coderd/apidoc/docs.go b/coderd/apidoc/docs.go index 27514c3a56186..83d1fdc2c492a 100644 --- a/coderd/apidoc/docs.go +++ b/coderd/apidoc/docs.go @@ -3623,6 +3623,23 @@ const docTemplate = `{ } } }, + "/scim/v2/ServiceProviderConfig": { + "get": { + "produces": [ + "application/scim+json" + ], + "tags": [ + "Enterprise" + ], + "summary": "SCIM 2.0: Service Provider Config", + "operationId": "scim-get-service-provider-config", + "responses": { + "200": { + "description": "OK" + } + } + } + }, "/scim/v2/Users": { "get": { "security": [ diff --git a/coderd/apidoc/swagger.json b/coderd/apidoc/swagger.json index 9457184c6d48a..9861e195b7a69 100644 --- a/coderd/apidoc/swagger.json +++ b/coderd/apidoc/swagger.json @@ -3189,6 +3189,19 @@ } } }, + "/scim/v2/ServiceProviderConfig": { + "get": { + "produces": ["application/scim+json"], + "tags": ["Enterprise"], + "summary": "SCIM 2.0: Service Provider Config", + "operationId": "scim-get-service-provider-config", + "responses": { + "200": { + "description": "OK" + } + } + } + }, "/scim/v2/Users": { "get": { "security": [ diff --git a/docs/reference/api/enterprise.md b/docs/reference/api/enterprise.md index a5c0857edee1d..57ffa5260edde 100644 --- a/docs/reference/api/enterprise.md +++ b/docs/reference/api/enterprise.md @@ -2007,6 +2007,24 @@ Status Code **200** To perform this operation, you must be authenticated. [Learn more](authentication.md). +## SCIM 2.0: Service Provider Config + +### Code samples + +```shell +# Example request using curl +curl -X GET http://coder-server:8080/api/v2/scim/v2/ServiceProviderConfig + +``` + +`GET /scim/v2/ServiceProviderConfig` + +### Responses + +| Status | Meaning | Description | Schema | +| ------ | ------------------------------------------------------- | ----------- | ------ | +| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | | + ## SCIM 2.0: Get users ### Code samples diff --git a/enterprise/coderd/coderd.go b/enterprise/coderd/coderd.go index 2549a008e5f66..7e59eb341411f 100644 --- a/enterprise/coderd/coderd.go +++ b/enterprise/coderd/coderd.go @@ -457,6 +457,7 @@ func New(ctx context.Context, options *Options) (_ *API, err error) { r.Use( api.RequireFeatureMW(codersdk.FeatureSCIM), ) + r.Get("/ServiceProviderConfig", api.scimServiceProviderConfig) r.Post("/Users", api.scimPostUser) r.Route("/Users", func(r chi.Router) { r.Get("/", api.scimGetUsers) diff --git a/enterprise/coderd/scim.go b/enterprise/coderd/scim.go index 1e2f70b57b733..439e6ca3225de 100644 --- a/enterprise/coderd/scim.go +++ b/enterprise/coderd/scim.go @@ -5,6 +5,7 @@ import ( "database/sql" "encoding/json" "net/http" + "time" "github.com/go-chi/chi/v5" "github.com/google/uuid" @@ -21,6 +22,7 @@ import ( "github.com/coder/coder/v2/coderd/database/dbtime" "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/codersdk" + "github.com/coder/coder/v2/enterprise/coderd/scim" ) func (api *API) scimVerifyAuthHeader(r *http.Request) bool { @@ -34,6 +36,69 @@ func (api *API) scimVerifyAuthHeader(r *http.Request) bool { return len(api.SCIMAPIKey) != 0 && subtle.ConstantTimeCompare(hdr, api.SCIMAPIKey) == 1 } +// scimServiceProviderConfig returns a static SCIM service provider configuration. +// +// @Summary SCIM 2.0: Service Provider Config +// @ID scim-get-service-provider-config +// @Produce application/scim+json +// @Tags Enterprise +// @Success 200 +// @Router /scim/v2/ServiceProviderConfig [get] +func (api *API) scimServiceProviderConfig(rw http.ResponseWriter, _ *http.Request) { + // No auth needed to query this endpoint. + + rw.Header().Set("Content-Type", spec.ApplicationScimJson) + rw.WriteHeader(http.StatusOK) + + // providerUpdated is the last time the static provider config was updated. + // Increment this time if you make any changes to the provider config. + providerUpdated := time.Date(2024, 10, 25, 17, 0, 0, 0, time.UTC) + var location string + locURL, err := api.AccessURL.Parse("/scim/v2/ServiceProviderConfig") + if err == nil { + location = locURL.String() + } + + enc := json.NewEncoder(rw) + enc.SetEscapeHTML(true) + _ = enc.Encode(scim.ServiceProviderConfig{ + Schemas: []string{"urn:ietf:params:scim:schemas:core:2.0:ServiceProviderConfig"}, + DocURI: "https://coder.com/docs/admin/users/oidc-auth#scim-enterprise-premium", + Patch: scim.Supported{ + Supported: true, + }, + Bulk: scim.BulkSupported{ + Supported: false, + }, + Filter: scim.FilterSupported{ + Supported: false, + }, + ChangePassword: scim.Supported{ + Supported: false, + }, + Sort: scim.Supported{ + Supported: false, + }, + ETag: scim.Supported{ + Supported: false, + }, + AuthSchemes: []scim.AuthenticationScheme{ + { + Type: "oauthbearertoken", + Name: "HTTP Header Authentication", + Description: "Authentication scheme using the Authorization header with the shared token", + DocURI: "https://coder.com/docs/admin/users/oidc-auth#scim-enterprise-premium", + }, + }, + Meta: scim.ServiceProviderMeta{ + Created: providerUpdated, + LastModified: providerUpdated, + Location: location, + ResourceType: "ServiceProviderConfig", + }, + }) +} + // scimGetUsers intentionally always returns no users. This is done to always force // Okta to try and create each user individually, this way we don't need to // implement fetching users twice. diff --git a/enterprise/coderd/scim/scimtypes.go b/enterprise/coderd/scim/scimtypes.go new file mode 100644 index 0000000000000..e78b70b3e9f3f --- /dev/null +++ b/enterprise/coderd/scim/scimtypes.go @@ -0,0 +1,46 @@ +package scim + +import "time" + +type ServiceProviderConfig struct { + Schemas []string `json:"schemas"` + DocURI string `json:"documentationUri"` + Patch Supported `json:"patch"` + Bulk BulkSupported `json:"bulk"` + Filter FilterSupported `json:"filter"` + ChangePassword Supported `json:"changePassword"` + Sort Supported `json:"sort"` + ETag Supported `json:"etag"` + AuthSchemes []AuthenticationScheme `json:"authenticationSchemes"` + Meta ServiceProviderMeta `json:"meta"` +} + +type ServiceProviderMeta struct { + Created time.Time `json:"created"` + LastModified time.Time `json:"lastModified"` + Location string `json:"location"` + ResourceType string `json:"resourceType"` +} + +type Supported struct { + Supported bool `json:"supported"` +} + +type BulkSupported struct { + Supported bool `json:"supported"` + MaxOp int `json:"maxOperations"` + MaxPayload int `json:"maxPayloadSize"` +} + +type FilterSupported struct { + Supported bool `json:"supported"` + MaxResults int `json:"maxResults"` +} + +type AuthenticationScheme struct { + Type string `json:"type"` + Name string `json:"name"` + Description string `json:"description"` + SpecURI string `json:"specUri"` + DocURI string `json:"documentationUri"` +} diff --git a/enterprise/coderd/scim_test.go b/enterprise/coderd/scim_test.go index 016c75d095484..82355c3a3b9c0 100644 --- a/enterprise/coderd/scim_test.go +++ b/enterprise/coderd/scim_test.go @@ -140,9 +140,15 @@ func TestScim(t *testing.T) { }) mockAudit.ResetLogs() + // verify scim is enabled + res, err := client.Request(ctx, http.MethodGet, "/scim/v2/ServiceProviderConfig", nil) + require.NoError(t, err) + defer res.Body.Close() + require.Equal(t, http.StatusOK, res.StatusCode) + // when sUser := makeScimUser(t) - res, err := client.Request(ctx, "POST", "/scim/v2/Users", sUser, setScimAuth(scimAPIKey)) + res, err = client.Request(ctx, http.MethodPost, "/scim/v2/Users", sUser, setScimAuth(scimAPIKey)) require.NoError(t, err) defer res.Body.Close() require.Equal(t, http.StatusOK, res.StatusCode) From 9308331d9ab03ce4d0e73b660afe67f394843096 Mon Sep 17 00:00:00 2001 From: Steven Masley Date: Fri, 25 Oct 2024 18:50:42 -0400 Subject: [PATCH 08/42] chore: change promtheus label to 'tx_id' (#15238) the 'id' label was not coming through. Maybe it's reserved? Or used in the chain somewhere. --- coderd/database/dbmetrics/dbmetrics.go | 10 +++++----- coderd/database/dbmetrics/dbmetrics_test.go | 6 +++--- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/coderd/database/dbmetrics/dbmetrics.go b/coderd/database/dbmetrics/dbmetrics.go index 404a685876bc0..8708787f57dc7 100644 --- a/coderd/database/dbmetrics/dbmetrics.go +++ b/coderd/database/dbmetrics/dbmetrics.go @@ -41,7 +41,7 @@ func NewDBMetrics(s database.Store, logger slog.Logger, reg prometheus.Registere // retries = Executions - 1 (as 1 execute is expected) "retries", // Uniquely naming some transactions can help debug reoccurring errors. - "id", + "tx_id", }) reg.MustRegister(txRetries) @@ -54,7 +54,7 @@ func NewDBMetrics(s database.Store, logger slog.Logger, reg prometheus.Registere }, []string{ "success", // Did the InTx function return an error? // Uniquely naming some transactions can help debug reoccurring errors. - "id", + "tx_id", }) reg.MustRegister(txDuration) return &metricsStore{ @@ -82,13 +82,13 @@ func (m metricsStore) InTx(f func(database.Store) error, options *database.TxOpt // So IDs should be used sparingly to prevent too much bloat. m.txDuration.With(prometheus.Labels{ "success": strconv.FormatBool(err == nil), - "id": options.TxIdentifier, // Can be empty string for unlabeled + "tx_id": options.TxIdentifier, // Can be empty string for unlabeled }).Observe(dur.Seconds()) m.txRetries.With(prometheus.Labels{ "success": strconv.FormatBool(err == nil), "retries": strconv.FormatInt(int64(options.ExecutionCount()-1), 10), - "id": options.TxIdentifier, // Can be empty string for unlabeled + "tx_id": options.TxIdentifier, // Can be empty string for unlabeled }).Inc() // Log all serializable transactions that are retried. @@ -109,7 +109,7 @@ func (m metricsStore) InTx(f func(database.Store) error, options *database.TxOpt // since the first error was a serialization error. slog.Error(err), // Might be nil, that is ok! slog.F("executions", options.ExecutionCount()), - slog.F("id", options.TxIdentifier), + slog.F("tx_id", options.TxIdentifier), slog.F("duration", dur), ) } diff --git a/coderd/database/dbmetrics/dbmetrics_test.go b/coderd/database/dbmetrics/dbmetrics_test.go index 2b8d2979b1cfe..7eed7035ee281 100644 --- a/coderd/database/dbmetrics/dbmetrics_test.go +++ b/coderd/database/dbmetrics/dbmetrics_test.go @@ -22,7 +22,7 @@ func TestInTxMetrics(t *testing.T) { successLabels := prometheus.Labels{ "success": "true", - "id": "", + "tx_id": "", } const inTxHistMetricName = "coderd_db_tx_duration_seconds" const inTxCountMetricName = "coderd_db_tx_executions_count" @@ -86,7 +86,7 @@ func TestInTxMetrics(t *testing.T) { // Check that the metrics are registered inTxHistMetric := promhelp.HistogramValue(t, reg, inTxHistMetricName, prometheus.Labels{ "success": "false", - "id": id, + "tx_id": id, }) require.NotNil(t, inTxHistMetric) require.Equal(t, uint64(1), inTxHistMetric.GetSampleCount()) @@ -94,7 +94,7 @@ func TestInTxMetrics(t *testing.T) { inTxCountMetric := promhelp.CounterValue(t, reg, inTxCountMetricName, prometheus.Labels{ "success": "false", "retries": "1", - "id": id, + "tx_id": id, }) require.NotNil(t, inTxCountMetric) require.Equal(t, 1, inTxCountMetric) From 91c337a2ff5a62b023dd4e22bd4086ff979b79a5 Mon Sep 17 00:00:00 2001 From: Phorcys <57866459+phorcys420@users.noreply.github.com> Date: Sun, 27 Oct 2024 14:17:03 +0100 Subject: [PATCH 09/42] feat: use `hashicorp/cloud-init` provider in AWS devcontainer template (#15050) This PR makes templates uses the [hashicorp/cloud-init](https://registry.terraform.io/providers/hashicorp/cloudinit/latest/docs) provider instead of hardcoding a cloud-init config. --- .../cloud-init/cloud-config.yaml.tftpl | 15 ++ .../cloud-init/userdata.sh.tftpl | 37 +++++ examples/templates/aws-devcontainer/main.tf | 139 +++++++----------- 3 files changed, 103 insertions(+), 88 deletions(-) create mode 100644 examples/templates/aws-devcontainer/cloud-init/cloud-config.yaml.tftpl create mode 100644 examples/templates/aws-devcontainer/cloud-init/userdata.sh.tftpl diff --git a/examples/templates/aws-devcontainer/cloud-init/cloud-config.yaml.tftpl b/examples/templates/aws-devcontainer/cloud-init/cloud-config.yaml.tftpl new file mode 100644 index 0000000000000..af6b35171ca30 --- /dev/null +++ b/examples/templates/aws-devcontainer/cloud-init/cloud-config.yaml.tftpl @@ -0,0 +1,15 @@ +#cloud-config +cloud_final_modules: + - [scripts-user, always] +hostname: ${hostname} +users: + - name: ${linux_user} + sudo: ALL=(ALL) NOPASSWD:ALL + shell: /bin/bash + ssh_authorized_keys: + - "${ssh_pubkey}" +# Automatically grow the partition +growpart: + mode: auto + devices: ['/'] + ignore_growroot_disabled: false diff --git a/examples/templates/aws-devcontainer/cloud-init/userdata.sh.tftpl b/examples/templates/aws-devcontainer/cloud-init/userdata.sh.tftpl new file mode 100644 index 0000000000000..67c166cb6c164 --- /dev/null +++ b/examples/templates/aws-devcontainer/cloud-init/userdata.sh.tftpl @@ -0,0 +1,37 @@ +#!/bin/bash +# Install Docker +if ! command -v docker &> /dev/null +then + echo "Docker not found, installing..." + curl -fsSL https://get.docker.com -o get-docker.sh && sh get-docker.sh 2>&1 >/dev/null + usermod -aG docker ${linux_user} + newgrp docker +else + echo "Docker is already installed." +fi + +# Set up Docker credentials +mkdir -p "/home/${linux_user}/.docker" + +if [ -n "${docker_config_json_base64}" ]; then + # Write the Docker config JSON to disk if it is provided. + printf "%s" "${docker_config_json_base64}" | base64 -d | tee "/home/${linux_user}/.docker/config.json" +else + # Assume that we're going to use the instance IAM role to pull from the cache repo if we need to. + # Set up the ecr credential helper. + apt-get update -y && apt-get install -y amazon-ecr-credential-helper + mkdir -p .docker + printf '{"credsStore": "ecr-login"}' | tee "/home/${linux_user}/.docker/config.json" +fi +chown -R ${linux_user}:${linux_user} "/home/${linux_user}/.docker" + +# Start envbuilder +sudo -u coder docker run \ + --rm \ + --net=host \ + -h ${hostname} \ + -v /home/${linux_user}/envbuilder:/workspaces \ + %{ for key, value in environment ~} + -e ${key}="${value}" \ + %{ endfor ~} + ${builder_image} diff --git a/examples/templates/aws-devcontainer/main.tf b/examples/templates/aws-devcontainer/main.tf index 27434385c647b..a8f6a2bbd4b46 100644 --- a/examples/templates/aws-devcontainer/main.tf +++ b/examples/templates/aws-devcontainer/main.tf @@ -6,6 +6,9 @@ terraform { aws = { source = "hashicorp/aws" } + cloudinit = { + source = "hashicorp/cloudinit" + } envbuilder = { source = "coder/envbuilder" } @@ -153,13 +156,16 @@ data "aws_iam_instance_profile" "vm_instance_profile" { locals { # TODO: provide a way to pick the availability zone. aws_availability_zone = "${module.aws_region.value}a" - linux_user = "coder" - # Name the container after the workspace and owner. - container_name = "coder-${data.coder_workspace_owner.me.name}-${lower(data.coder_workspace.me.name)}" + + hostname = lower(data.coder_workspace.me.name) + linux_user = "coder" + # The devcontainer builder image is the image that will build the devcontainer. devcontainer_builder_image = data.coder_parameter.devcontainer_builder.value + # We may need to authenticate with a registry. If so, the user will provide a path to a docker config.json. docker_config_json_base64 = try(data.local_sensitive_file.cache_repo_dockerconfigjson[0].content_base64, "") + # The envbuilder provider requires a key-value map of environment variables. Build this here. envbuilder_env = { # ENVBUILDER_GIT_URL and ENVBUILDER_CACHE_REPO will be overridden by the provider @@ -172,7 +178,7 @@ locals { # The agent init script is required for the agent to start up. We base64 encode it here # to avoid quoting issues. "ENVBUILDER_INIT_SCRIPT" : "echo ${base64encode(try(coder_agent.dev[0].init_script, ""))} | base64 -d | sh", - "ENVBUILDER_DOCKER_CONFIG_BASE64" : try(data.local_sensitive_file.cache_repo_dockerconfigjson[0].content_base64, ""), + "ENVBUILDER_DOCKER_CONFIG_BASE64" : local.docker_config_json_base64, # The fallback image is the image that will run if the devcontainer fails to build. "ENVBUILDER_FALLBACK_IMAGE" : data.coder_parameter.fallback_image.value, # The following are used to push the image to the cache repo, if defined. @@ -181,87 +187,6 @@ locals { # You can add other required environment variables here. # See: https://github.com/coder/envbuilder/?tab=readme-ov-file#environment-variables } - # If we have a cached image, use the cached image's environment variables. Otherwise, just use - # the environment variables we've defined above. - docker_env_input = try(envbuilder_cached_image.cached.0.env_map, local.envbuilder_env) - # Convert the above to the list of arguments for the Docker run command. - # The startup script will write this to a file, which the Docker run command will reference. - docker_env_list_base64 = base64encode(join("\n", [for k, v in local.docker_env_input : "${k}=${v}"])) - # Builder image will either be the builder image parameter, or the cached image, if cache is provided. - builder_image = try(envbuilder_cached_image.cached[0].image, data.coder_parameter.devcontainer_builder.value) - # User data to start the workspace. - user_data = <<-EOT - Content-Type: multipart/mixed; boundary="//" - MIME-Version: 1.0 - - --// - Content-Type: text/cloud-config; charset="us-ascii" - MIME-Version: 1.0 - Content-Transfer-Encoding: 7bit - Content-Disposition: attachment; filename="cloud-config.txt" - - #cloud-config - cloud_final_modules: - - [scripts-user, always] - hostname: ${lower(data.coder_workspace.me.name)} - users: - - name: ${local.linux_user} - sudo: ALL=(ALL) NOPASSWD:ALL - shell: /bin/bash - ssh_authorized_keys: - - "${data.coder_parameter.ssh_pubkey.value}" - # Automatically grow the partition - growpart: - mode: auto - devices: ['/'] - ignore_growroot_disabled: false - - --// - Content-Type: text/x-shellscript; charset="us-ascii" - MIME-Version: 1.0 - Content-Transfer-Encoding: 7bit - Content-Disposition: attachment; filename="userdata.txt" - - #!/bin/bash - # Install Docker - if ! command -v docker &> /dev/null - then - echo "Docker not found, installing..." - curl -fsSL https://get.docker.com -o get-docker.sh && sh get-docker.sh 2>&1 >/dev/null - usermod -aG docker ${local.linux_user} - newgrp docker - else - echo "Docker is already installed." - fi - - # Set up Docker credentials - mkdir -p "/home/${local.linux_user}/.docker" - if [ -n "${local.docker_config_json_base64}" ]; then - # Write the Docker config JSON to disk if it is provided. - printf "%s" "${local.docker_config_json_base64}" | base64 -d | tee "/home/${local.linux_user}/.docker/config.json" - else - # Assume that we're going to use the instance IAM role to pull from the cache repo if we need to. - # Set up the ecr credential helper. - apt-get update -y && apt-get install -y amazon-ecr-credential-helper - mkdir -p .docker - printf '{"credsStore": "ecr-login"}' | tee "/home/${local.linux_user}/.docker/config.json" - fi - chown -R ${local.linux_user}:${local.linux_user} "/home/${local.linux_user}/.docker" - - # Write the container env to disk. - printf "%s" "${local.docker_env_list_base64}" | base64 -d | tee "/home/${local.linux_user}/env.txt" - - # Start envbuilder - sudo -u coder docker run \ - --rm \ - --net=host \ - -h ${lower(data.coder_workspace.me.name)} \ - -v /home/${local.linux_user}/envbuilder:/workspaces \ - -v /var/run/docker.sock:/var/run/docker.sock \ - --env-file /home/${local.linux_user}/env.txt \ - ${local.builder_image} - --//-- - EOT } # Check for the presence of a prebuilt image in the cache repo @@ -274,9 +199,47 @@ resource "envbuilder_cached_image" "cached" { extra_env = local.envbuilder_env } +data "cloudinit_config" "user_data" { + gzip = false + base64_encode = false + + boundary = "//" + + part { + filename = "cloud-config.yaml" + content_type = "text/cloud-config" + + content = templatefile("${path.module}/cloud-init/cloud-config.yaml.tftpl", { + hostname = local.hostname + linux_user = local.linux_user + + ssh_pubkey = data.coder_parameter.ssh_pubkey.value + }) + } + + part { + filename = "userdata.sh" + content_type = "text/x-shellscript" + + content = templatefile("${path.module}/cloud-init/userdata.sh.tftpl", { + hostname = local.hostname + linux_user = local.linux_user + + # If we have a cached image, use the cached image's environment variables. + # Otherwise, just use the environment variables we've defined in locals. + environment = try(envbuilder_cached_image.cached[0].env_map, local.envbuilder_env) + + # Builder image will either be the builder image parameter, or the cached image, if cache is provided. + builder_image = try(envbuilder_cached_image.cached[0].image, data.coder_parameter.devcontainer_builder.value) + + docker_config_json_base64 = local.docker_config_json_base64 + }) + } +} + # This is useful for debugging the startup script. Left here for reference. # resource local_file "startup_script" { -# content = local.user_data +# content = data.cloudinit_config.user_data.rendered # filename = "${path.module}/user_data.txt" # } @@ -289,9 +252,9 @@ resource "aws_instance" "vm" { volume_size = data.coder_parameter.root_volume_size_gb.value } - user_data = local.user_data + user_data = data.cloudinit_config.user_data.rendered tags = { - Name = "coder-${data.coder_workspace_owner.me.name}-${data.coder_workspace.me.name}" + Name = "coder-${data.coder_workspace_owner.me.name}-${lower(data.coder_workspace.me.name)}" # Required if you are using our example policy, see template README Coder_Provisioned = "true" } From c8f68cbc4690fbcf1318b6f83b0cdca9b1ed0e3e Mon Sep 17 00:00:00 2001 From: Phorcys <57866459+phorcys420@users.noreply.github.com> Date: Mon, 28 Oct 2024 07:43:45 +0000 Subject: [PATCH 10/42] feat: use `hashicorp/cloud-init` provider for AWS-linux example (#15240) Same as #15050 but for the `aws-linux` template. Tested, works as expected. --- .../cloud-init/cloud-config.yaml.tftpl | 8 +++ .../aws-linux/cloud-init/userdata.sh.tftpl | 2 + examples/templates/aws-linux/main.tf | 55 +++++++++---------- 3 files changed, 37 insertions(+), 28 deletions(-) create mode 100644 examples/templates/aws-linux/cloud-init/cloud-config.yaml.tftpl create mode 100644 examples/templates/aws-linux/cloud-init/userdata.sh.tftpl diff --git a/examples/templates/aws-linux/cloud-init/cloud-config.yaml.tftpl b/examples/templates/aws-linux/cloud-init/cloud-config.yaml.tftpl new file mode 100644 index 0000000000000..14da769454eda --- /dev/null +++ b/examples/templates/aws-linux/cloud-init/cloud-config.yaml.tftpl @@ -0,0 +1,8 @@ +#cloud-config +cloud_final_modules: + - [scripts-user, always] +hostname: ${hostname} +users: + - name: ${linux_user} + sudo: ALL=(ALL) NOPASSWD:ALL + shell: /bin/bash diff --git a/examples/templates/aws-linux/cloud-init/userdata.sh.tftpl b/examples/templates/aws-linux/cloud-init/userdata.sh.tftpl new file mode 100644 index 0000000000000..2070bc4df3de7 --- /dev/null +++ b/examples/templates/aws-linux/cloud-init/userdata.sh.tftpl @@ -0,0 +1,2 @@ +#!/bin/bash +sudo -u '${linux_user}' sh -c '${init_script}' diff --git a/examples/templates/aws-linux/main.tf b/examples/templates/aws-linux/main.tf index 5f0f87420ccfb..b5979ef89e3e4 100644 --- a/examples/templates/aws-linux/main.tf +++ b/examples/templates/aws-linux/main.tf @@ -140,8 +140,7 @@ provider "aws" { region = data.coder_parameter.region.value } -data "coder_workspace" "me" { -} +data "coder_workspace" "me" {} data "coder_workspace_owner" "me" {} data "aws_ami" "ubuntu" { @@ -214,36 +213,36 @@ resource "coder_app" "code-server" { } locals { + hostname = lower(data.coder_workspace.me.name) linux_user = "coder" - user_data = <<-EOT - Content-Type: multipart/mixed; boundary="//" - MIME-Version: 1.0 +} - --// - Content-Type: text/cloud-config; charset="us-ascii" - MIME-Version: 1.0 - Content-Transfer-Encoding: 7bit - Content-Disposition: attachment; filename="cloud-config.txt" +data "cloudinit_config" "user_data" { + gzip = false + base64_encode = false - #cloud-config - cloud_final_modules: - - [scripts-user, always] - hostname: ${lower(data.coder_workspace.me.name)} - users: - - name: ${local.linux_user} - sudo: ALL=(ALL) NOPASSWD:ALL - shell: /bin/bash + boundary = "//" - --// - Content-Type: text/x-shellscript; charset="us-ascii" - MIME-Version: 1.0 - Content-Transfer-Encoding: 7bit - Content-Disposition: attachment; filename="userdata.txt" + part { + filename = "cloud-config.yaml" + content_type = "text/cloud-config" - #!/bin/bash - sudo -u ${local.linux_user} sh -c '${try(coder_agent.dev[0].init_script, "")}' - --//-- - EOT + content = templatefile("${path.module}/cloud-init/cloud-config.yaml.tftpl", { + hostname = local.hostname + linux_user = local.linux_user + }) + } + + part { + filename = "userdata.sh" + content_type = "text/x-shellscript" + + content = templatefile("${path.module}/cloud-init/userdata.sh.tftpl", { + linux_user = local.linux_user + + init_script = try(coder_agent.dev[0].init_script, "") + }) + } } resource "aws_instance" "dev" { @@ -251,7 +250,7 @@ resource "aws_instance" "dev" { availability_zone = "${data.coder_parameter.region.value}a" instance_type = data.coder_parameter.instance_type.value - user_data = local.user_data + user_data = data.cloudinit_config.user_data.rendered tags = { Name = "coder-${data.coder_workspace_owner.me.name}-${data.coder_workspace.me.name}" # Required if you are using our example policy, see template README From 007f0a35a4ffcd1147e9d2899b4ae3305d1e0d46 Mon Sep 17 00:00:00 2001 From: Edward Angert Date: Mon, 28 Oct 2024 07:43:30 -0400 Subject: [PATCH 11/42] fix: adjust instances of Github to GitHub (#15203) s/Github/GitHub Co-authored-by: EdwardAngert <17991901+EdwardAngert@users.noreply.github.com> --- docs/admin/infrastructure/validated-architectures/index.md | 2 +- docs/admin/integrations/opentofu.md | 2 +- docs/changelogs/v2.1.5.md | 2 +- docs/install/releases.md | 4 ++-- docs/reference/api/schemas.md | 4 ++-- docs/tutorials/example-guide.md | 4 ++-- docs/tutorials/faqs.md | 6 +++--- docs/tutorials/index.md | 2 +- docs/user-guides/workspace-access/vscode.md | 4 ++-- 9 files changed, 15 insertions(+), 15 deletions(-) diff --git a/docs/admin/infrastructure/validated-architectures/index.md b/docs/admin/infrastructure/validated-architectures/index.md index 85cbe430cc566..f0baa7c632b98 100644 --- a/docs/admin/infrastructure/validated-architectures/index.md +++ b/docs/admin/infrastructure/validated-architectures/index.md @@ -340,7 +340,7 @@ could affect workspace users experience once the platform is live. 1. Maintain Coder templates using [version control](../../templates/managing-templates/change-management.md). 1. Consider implementing a GitOps workflow to automatically push new template - versions into Coder from git. For example, on Github, you can use the + versions into Coder from git. For example, on GitHub, you can use the [Setup Coder](https://github.com/marketplace/actions/setup-coder) action. 1. Evaluate enabling [automatic template updates](../../templates/managing-templates/index.md#template-update-policies-enterprise-premium) diff --git a/docs/admin/integrations/opentofu.md b/docs/admin/integrations/opentofu.md index 6268a228e5d03..1867f03e8e2ed 100644 --- a/docs/admin/integrations/opentofu.md +++ b/docs/admin/integrations/opentofu.md @@ -4,7 +4,7 @@ > ⚠️ This guide is a work in progress. We do not officially support using custom > Terraform binaries in your Coder deployment. To track progress on the work, -> see this related [Github Issue](https://github.com/coder/coder/issues/12009). +> see this related [GitHub Issue](https://github.com/coder/coder/issues/12009). Coder deployments support any custom Terraform binary, including [OpenTofu](https://opentofu.org/docs/) - an open source alternative to diff --git a/docs/changelogs/v2.1.5.md b/docs/changelogs/v2.1.5.md index bb73d31f9acff..f23eff4b67b25 100644 --- a/docs/changelogs/v2.1.5.md +++ b/docs/changelogs/v2.1.5.md @@ -36,7 +36,7 @@ (@spikecurtis) - Fix null pointer on external provisioner daemons with daily_cost (#9401) (@spikecurtis) -- Hide OIDC and Github auth settings when they are disabled (#9447) (@aslilac) +- Hide OIDC and GitHub auth settings when they are disabled (#9447) (@aslilac) - Generate username with uuid to prevent collision (#9496) (@kylecarbs) - Make 'NoRefresh' honor unlimited tokens in gitauth (#9472) (@Emyrk) - Dotfiles: add an exception for `.gitconfig` (#9515) (@matifali) diff --git a/docs/install/releases.md b/docs/install/releases.md index 261d8c43dc42c..51950f9d1edc6 100644 --- a/docs/install/releases.md +++ b/docs/install/releases.md @@ -1,7 +1,7 @@ # Releases Coder releases are cut directly from main in our -[Github](https://github.com/coder/coder) on the first Tuesday of each month. +[GitHub](https://github.com/coder/coder) on the first Tuesday of each month. We recommend enterprise customers test the compatibility of new releases with their infrastructure on a staging environment before upgrading a production @@ -38,7 +38,7 @@ only for security issues or CVEs. ## Installing stable When installing Coder, we generally advise specifying the desired version from -our Github [releases page](https://github.com/coder/coder/releases). +our GitHub [releases page](https://github.com/coder/coder/releases). You can also use our `install.sh` script with the `stable` flag to install the latest stable release: diff --git a/docs/reference/api/schemas.md b/docs/reference/api/schemas.md index f4e683305029b..121620fc98fd6 100644 --- a/docs/reference/api/schemas.md +++ b/docs/reference/api/schemas.md @@ -3605,9 +3605,9 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o | Name | Type | Required | Restrictions | Description | | -------- | ---------------------------------------------------------- | -------- | ------------ | ----------- | -| `github` | [codersdk.OAuth2GithubConfig](#codersdkoauth2githubconfig) | false | | | +| `github` | [codersdk.OAuth2GitHubConfig](#codersdkoauth2githubconfig) | false | | | -## codersdk.OAuth2GithubConfig +## codersdk.OAuth2GitHubConfig ```json { diff --git a/docs/tutorials/example-guide.md b/docs/tutorials/example-guide.md index b0a9de5e8dafd..f60ce6972710b 100644 --- a/docs/tutorials/example-guide.md +++ b/docs/tutorials/example-guide.md @@ -1,4 +1,4 @@ -# Guide Title (Only Visible in Github) +# Guide Title (Only Visible in GitHub)
@@ -28,7 +28,7 @@ Use relative imports in the markdown and store photos in ### Setting the author data At the top of this example you will find a small html snippet that nicely -renders the author's name and photo, while linking to their Github profile. +renders the author's name and photo, while linking to their GitHub profile. Before submitting your guide in a PR, replace `your_github_handle`, `your_github_profile_photo_url` and "Your Name". The entire `` element can be omitted. diff --git a/docs/tutorials/faqs.md b/docs/tutorials/faqs.md index 96dccf8047334..29eb86ea39b04 100644 --- a/docs/tutorials/faqs.md +++ b/docs/tutorials/faqs.md @@ -5,7 +5,7 @@ come from our community and enterprise customers, feel free to [contribute to this page](https://github.com/coder/coder/edit/main/docs/tutorials/faqs.md). For other community resources, see our -[Github discussions](https://github.com/coder/coder/discussions), or join our +[GitHub discussions](https://github.com/coder/coder/discussions), or join our [Discord server](https://discord.gg/coder). ### How do I add a Premium trial license? @@ -291,8 +291,8 @@ tar -cvh -C ./template_1 | coder templates -d - References: -- [Public Github Issue 6117](https://github.com/coder/coder/issues/6117) -- [Public Github Issue 5677](https://github.com/coder/coder/issues/5677) +- [Public GitHub Issue 6117](https://github.com/coder/coder/issues/6117) +- [Public GitHub Issue 5677](https://github.com/coder/coder/issues/5677) - [Coder docs: Templates/Change Management](../admin/templates/managing-templates/change-management.md) ### Can I run Coder in an air-gapped or offline mode? (no Internet)? diff --git a/docs/tutorials/index.md b/docs/tutorials/index.md index b849120f8497e..6a38fe2b1cb96 100644 --- a/docs/tutorials/index.md +++ b/docs/tutorials/index.md @@ -2,7 +2,7 @@ Here you can find a list of employee-written guides on Coder for OSS and Enterprise. These tutorials are hosted on our -[Github](https://github.com/coder/coder/) where you can leave feedback or +[GitHub](https://github.com/coder/coder/) where you can leave feedback or request new topics to be covered. diff --git a/docs/user-guides/workspace-access/vscode.md b/docs/user-guides/workspace-access/vscode.md index 54d3fcf9c0aad..dc3cac46be0e8 100644 --- a/docs/user-guides/workspace-access/vscode.md +++ b/docs/user-guides/workspace-access/vscode.md @@ -99,7 +99,7 @@ Web or using the workspace's terminal. ```tf resource "coder_agent" "main" { ... - startup_script = "code-server --install-extension /vsix/Github.copilot.vsix" + startup_script = "code-server --install-extension /vsix/GitHub.copilot.vsix" } ``` @@ -130,7 +130,7 @@ Using the workspace's terminal or the terminal available inside `code-server`, you can install an extension whose files you've downloaded from a marketplace: ```console -/path/to/code-server --install-extension /vsix/Github.copilot.vsix +/path/to/code-server --install-extension /vsix/GitHub.copilot.vsix ``` ### Installing from a marketplace at the command line From 03940f5fef4815c823d1c6066444e589365fec5a Mon Sep 17 00:00:00 2001 From: Ethan <39577870+ethanndickson@users.noreply.github.com> Date: Tue, 29 Oct 2024 02:22:37 +1100 Subject: [PATCH 12/42] chore: ensure `make gen` runs on CI when docs are updated (#15252) https://github.com/coder/coder/pull/15203 was merged with a failing `make gen`, as it only updated the docs. This makes it so this can't happen again. The capitalization of the Go type used in the auto-generated docs (`codersdk.OAuth2GithubConfig`) wasn't updated as it would technically be a breaking change for the sdk. --- .github/workflows/ci.yaml | 2 +- docs/reference/api/schemas.md | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index fa5164b91caa4..dbfe10242ff3e 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -234,7 +234,7 @@ jobs: timeout-minutes: 8 runs-on: ${{ github.repository_owner == 'coder' && 'depot-ubuntu-22.04-8' || 'ubuntu-latest' }} needs: changes - if: needs.changes.outputs.docs-only == 'false' || needs.changes.outputs.ci == 'true' || github.ref == 'refs/heads/main' + if: needs.changes.outputs.docs-only == 'true' || needs.changes.outputs.ci == 'true' || github.ref == 'refs/heads/main' steps: - name: Harden Runner uses: step-security/harden-runner@91182cccc01eb5e619899d80e4e971d6181294a7 # v2.10.1 diff --git a/docs/reference/api/schemas.md b/docs/reference/api/schemas.md index 121620fc98fd6..f4e683305029b 100644 --- a/docs/reference/api/schemas.md +++ b/docs/reference/api/schemas.md @@ -3605,9 +3605,9 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o | Name | Type | Required | Restrictions | Description | | -------- | ---------------------------------------------------------- | -------- | ------------ | ----------- | -| `github` | [codersdk.OAuth2GitHubConfig](#codersdkoauth2githubconfig) | false | | | +| `github` | [codersdk.OAuth2GithubConfig](#codersdkoauth2githubconfig) | false | | | -## codersdk.OAuth2GitHubConfig +## codersdk.OAuth2GithubConfig ```json { From 95a348ecc7df7ba9d2c5fea7fb93639a1ef3b17c Mon Sep 17 00:00:00 2001 From: Vincent Vielle Date: Mon, 28 Oct 2024 16:53:20 +0100 Subject: [PATCH 13/42] fix(coderd): improve use case handling in notifier for appearance fetchers (#15242) Fixing #15241 & add tests. --- coderd/notifications/fetcher.go | 8 + coderd/notifications/fetcher_internal_test.go | 231 ++++++++++++++++++ 2 files changed, 239 insertions(+) create mode 100644 coderd/notifications/fetcher_internal_test.go diff --git a/coderd/notifications/fetcher.go b/coderd/notifications/fetcher.go index a579275d127bf..0688b88907981 100644 --- a/coderd/notifications/fetcher.go +++ b/coderd/notifications/fetcher.go @@ -38,6 +38,10 @@ func (n *notifier) fetchAppName(ctx context.Context) (string, error) { } return "", xerrors.Errorf("get application name: %w", err) } + + if appName == "" { + appName = notificationsDefaultAppName + } return appName, nil } @@ -49,5 +53,9 @@ func (n *notifier) fetchLogoURL(ctx context.Context) (string, error) { } return "", xerrors.Errorf("get logo URL: %w", err) } + + if logoURL == "" { + logoURL = notificationsDefaultLogoURL + } return logoURL, nil } diff --git a/coderd/notifications/fetcher_internal_test.go b/coderd/notifications/fetcher_internal_test.go new file mode 100644 index 0000000000000..a8d0149c883b8 --- /dev/null +++ b/coderd/notifications/fetcher_internal_test.go @@ -0,0 +1,231 @@ +package notifications + +import ( + "context" + "database/sql" + "testing" + "text/template" + + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" + "golang.org/x/xerrors" + + "github.com/coder/coder/v2/coderd/database/dbmock" +) + +func TestNotifier_FetchHelpers(t *testing.T) { + t.Parallel() + + t.Run("ok", func(t *testing.T) { + t.Parallel() + + ctrl := gomock.NewController(t) + dbmock := dbmock.NewMockStore(ctrl) + + n := ¬ifier{ + store: dbmock, + helpers: template.FuncMap{}, + } + + dbmock.EXPECT().GetApplicationName(gomock.Any()).Return("ACME Inc.", nil) + dbmock.EXPECT().GetLogoURL(gomock.Any()).Return("https://example.com/logo.png", nil) + + ctx := context.Background() + helpers, err := n.fetchHelpers(ctx) + require.NoError(t, err) + + appName, ok := helpers["app_name"].(func() string) + require.True(t, ok) + require.Equal(t, "ACME Inc.", appName()) + + logoURL, ok := helpers["logo_url"].(func() string) + require.True(t, ok) + require.Equal(t, "https://example.com/logo.png", logoURL()) + }) + + t.Run("failed to fetch app name", func(t *testing.T) { + t.Parallel() + + ctrl := gomock.NewController(t) + dbmock := dbmock.NewMockStore(ctrl) + + n := ¬ifier{ + store: dbmock, + helpers: template.FuncMap{}, + } + + dbmock.EXPECT().GetApplicationName(gomock.Any()).Return("", xerrors.New("internal error")) + + ctx := context.Background() + _, err := n.fetchHelpers(ctx) + require.Error(t, err) + require.ErrorContains(t, err, "get application name") + }) + + t.Run("failed to fetch logo URL", func(t *testing.T) { + t.Parallel() + + ctrl := gomock.NewController(t) + dbmock := dbmock.NewMockStore(ctrl) + + n := ¬ifier{ + store: dbmock, + helpers: template.FuncMap{}, + } + + dbmock.EXPECT().GetApplicationName(gomock.Any()).Return("ACME Inc.", nil) + dbmock.EXPECT().GetLogoURL(gomock.Any()).Return("", xerrors.New("internal error")) + + ctx := context.Background() + _, err := n.fetchHelpers(ctx) + require.ErrorContains(t, err, "get logo URL") + }) +} + +func TestNotifier_FetchAppName(t *testing.T) { + t.Parallel() + + t.Run("ok", func(t *testing.T) { + t.Parallel() + + ctrl := gomock.NewController(t) + dbmock := dbmock.NewMockStore(ctrl) + + n := ¬ifier{ + store: dbmock, + } + + dbmock.EXPECT().GetApplicationName(gomock.Any()).Return("ACME Inc.", nil) + + ctx := context.Background() + appName, err := n.fetchAppName(ctx) + require.NoError(t, err) + require.Equal(t, "ACME Inc.", appName) + }) + + t.Run("No rows", func(t *testing.T) { + t.Parallel() + ctrl := gomock.NewController(t) + dbmock := dbmock.NewMockStore(ctrl) + + n := ¬ifier{ + store: dbmock, + } + + dbmock.EXPECT().GetApplicationName(gomock.Any()).Return("", sql.ErrNoRows) + + ctx := context.Background() + appName, err := n.fetchAppName(ctx) + require.NoError(t, err) + require.Equal(t, notificationsDefaultAppName, appName) + }) + + t.Run("Empty string", func(t *testing.T) { + t.Parallel() + + ctrl := gomock.NewController(t) + dbmock := dbmock.NewMockStore(ctrl) + + n := ¬ifier{ + store: dbmock, + } + + dbmock.EXPECT().GetApplicationName(gomock.Any()).Return("", nil) + + ctx := context.Background() + appName, err := n.fetchAppName(ctx) + require.NoError(t, err) + require.Equal(t, notificationsDefaultAppName, appName) + }) + + t.Run("internal error", func(t *testing.T) { + t.Parallel() + + ctrl := gomock.NewController(t) + dbmock := dbmock.NewMockStore(ctrl) + + n := ¬ifier{ + store: dbmock, + } + + dbmock.EXPECT().GetApplicationName(gomock.Any()).Return("", xerrors.New("internal error")) + + ctx := context.Background() + _, err := n.fetchAppName(ctx) + require.Error(t, err) + }) +} + +func TestNotifier_FetchLogoURL(t *testing.T) { + t.Parallel() + + t.Run("ok", func(t *testing.T) { + t.Parallel() + + ctrl := gomock.NewController(t) + dbmock := dbmock.NewMockStore(ctrl) + + n := ¬ifier{ + store: dbmock, + } + + dbmock.EXPECT().GetLogoURL(gomock.Any()).Return("https://example.com/logo.png", nil) + + ctx := context.Background() + logoURL, err := n.fetchLogoURL(ctx) + require.NoError(t, err) + require.Equal(t, "https://example.com/logo.png", logoURL) + }) + + t.Run("No rows", func(t *testing.T) { + t.Parallel() + ctrl := gomock.NewController(t) + dbmock := dbmock.NewMockStore(ctrl) + + n := ¬ifier{ + store: dbmock, + } + + dbmock.EXPECT().GetLogoURL(gomock.Any()).Return("", sql.ErrNoRows) + + ctx := context.Background() + logoURL, err := n.fetchLogoURL(ctx) + require.NoError(t, err) + require.Equal(t, notificationsDefaultLogoURL, logoURL) + }) + + t.Run("Empty string", func(t *testing.T) { + t.Parallel() + + ctrl := gomock.NewController(t) + dbmock := dbmock.NewMockStore(ctrl) + + n := ¬ifier{ + store: dbmock, + } + + dbmock.EXPECT().GetLogoURL(gomock.Any()).Return("", nil) + + ctx := context.Background() + logoURL, err := n.fetchLogoURL(ctx) + require.NoError(t, err) + require.Equal(t, notificationsDefaultLogoURL, logoURL) + }) + + t.Run("internal error", func(t *testing.T) { + t.Parallel() + + ctrl := gomock.NewController(t) + dbmock := dbmock.NewMockStore(ctrl) + + n := ¬ifier{ + store: dbmock, + } + + dbmock.EXPECT().GetLogoURL(gomock.Any()).Return("", xerrors.New("internal error")) + + ctx := context.Background() + _, err := n.fetchLogoURL(ctx) + require.Error(t, err) + }) +} From 1d33990e78983eb2d4d199b4830a6f67246a3f09 Mon Sep 17 00:00:00 2001 From: Ethan <39577870+ethanndickson@users.noreply.github.com> Date: Tue, 29 Oct 2024 03:46:18 +1100 Subject: [PATCH 14/42] fix: ensure `make gen` runs on any changes (#15253) Previously, `make gen` ran on CI whenever a non-docs change was made. Based off the problem described in #15252, it sounds like CI should *always* be running `gen`. (Because I broke it, currently PR `gen` is getting skipped unless the `ci` category is updated) --- .github/workflows/ci.yaml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index dbfe10242ff3e..e2b95c7699757 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -233,8 +233,7 @@ jobs: gen: timeout-minutes: 8 runs-on: ${{ github.repository_owner == 'coder' && 'depot-ubuntu-22.04-8' || 'ubuntu-latest' }} - needs: changes - if: needs.changes.outputs.docs-only == 'true' || needs.changes.outputs.ci == 'true' || github.ref == 'refs/heads/main' + if: always() steps: - name: Harden Runner uses: step-security/harden-runner@91182cccc01eb5e619899d80e4e971d6181294a7 # v2.10.1 From cdd40fb29271f25fc9f47b9e73885024c33489e7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 28 Oct 2024 21:59:30 +0500 Subject: [PATCH 15/42] ci: bump the github-actions group with 2 updates (#15245) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci.yaml | 2 +- .github/workflows/scorecard.yml | 2 +- .github/workflows/security.yaml | 6 +++--- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index e2b95c7699757..e6d105d8890f4 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -197,7 +197,7 @@ jobs: # Check for any typos - name: Check for typos - uses: crate-ci/typos@6802cc60d4e7f78b9d5454f6cf3935c042d5e1e3 # v1.26.0 + uses: crate-ci/typos@0d9e0c2c1bd7f770f6eb90f87780848ca02fc12c # v1.26.8 with: config: .github/workflows/typos.toml diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml index 5913c0349e99a..77a8d36a6a6f3 100644 --- a/.github/workflows/scorecard.yml +++ b/.github/workflows/scorecard.yml @@ -47,6 +47,6 @@ jobs: # Upload the results to GitHub's code scanning dashboard. - name: "Upload to code-scanning" - uses: github/codeql-action/upload-sarif@f779452ac5af1c261dce0346a8f964149f49322b # v3.26.13 + uses: github/codeql-action/upload-sarif@662472033e021d55d94146f66f6058822b0b39fd # v3.27.0 with: sarif_file: results.sarif diff --git a/.github/workflows/security.yaml b/.github/workflows/security.yaml index 5ae6de7b2fe7d..f71119afb22d7 100644 --- a/.github/workflows/security.yaml +++ b/.github/workflows/security.yaml @@ -37,7 +37,7 @@ jobs: uses: ./.github/actions/setup-go - name: Initialize CodeQL - uses: github/codeql-action/init@f779452ac5af1c261dce0346a8f964149f49322b # v3.26.13 + uses: github/codeql-action/init@662472033e021d55d94146f66f6058822b0b39fd # v3.27.0 with: languages: go, javascript @@ -47,7 +47,7 @@ jobs: rm Makefile - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@f779452ac5af1c261dce0346a8f964149f49322b # v3.26.13 + uses: github/codeql-action/analyze@662472033e021d55d94146f66f6058822b0b39fd # v3.27.0 - name: Send Slack notification on failure if: ${{ failure() }} @@ -132,7 +132,7 @@ jobs: severity: "CRITICAL,HIGH" - name: Upload Trivy scan results to GitHub Security tab - uses: github/codeql-action/upload-sarif@f779452ac5af1c261dce0346a8f964149f49322b # v3.26.13 + uses: github/codeql-action/upload-sarif@662472033e021d55d94146f66f6058822b0b39fd # v3.27.0 with: sarif_file: trivy-results.sarif category: "Trivy" From 1636124ed171615b87ae9c4c252357cf8cb32e7a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 28 Oct 2024 17:11:31 +0000 Subject: [PATCH 16/42] chore: bump github.com/fatih/color from 1.17.0 to 1.18.0 (#15248) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index 2a3f6477480fe..724a2125587ca 100644 --- a/go.mod +++ b/go.mod @@ -98,7 +98,7 @@ require ( github.com/dave/dst v0.27.2 github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc github.com/elastic/go-sysinfo v1.14.0 - github.com/fatih/color v1.17.0 + github.com/fatih/color v1.18.0 github.com/fatih/structs v1.1.0 github.com/fatih/structtag v1.2.0 github.com/fergusstrange/embedded-postgres v1.29.0 diff --git a/go.sum b/go.sum index c0af699d2fb08..baa1eceddadf4 100644 --- a/go.sum +++ b/go.sum @@ -309,8 +309,8 @@ github.com/evanw/esbuild v0.23.1 h1:ociewhY6arjTarKLdrXfDTgy25oxhTZmzP8pfuBTfTA= github.com/evanw/esbuild v0.23.1/go.mod h1:D2vIQZqV/vIf/VRHtViaUtViZmG7o+kKmlBfVQuRi48= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= -github.com/fatih/color v1.17.0 h1:GlRw1BRJxkpqUCBKzKOw098ed57fEsKeNjpTe3cSjK4= -github.com/fatih/color v1.17.0/go.mod h1:YZ7TlrGPkiz6ku9fK3TLD/pl3CpsiFyu8N92HLgmosI= +github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM= +github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU= github.com/fatih/structs v1.1.0 h1:Q7juDM0QtcnhCpeyLGQKyg4TOIghuNXrkL32pHAUMxo= github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= github.com/fatih/structtag v1.2.0 h1:/OdNE99OxoI/PqaW/SuSK9uxxT3f/tcSZgon/ssNSx4= From ecb22461bbb97b6140e7e9fa1170c2f0645043df Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 28 Oct 2024 17:13:51 +0000 Subject: [PATCH 17/42] chore: bump github.com/gohugoio/hugo from 0.134.1 to 0.136.5 (#15247) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 4 ++-- go.sum | 32 ++++++++++++++++---------------- 2 files changed, 18 insertions(+), 18 deletions(-) diff --git a/go.mod b/go.mod index 724a2125587ca..bfc9228fb6337 100644 --- a/go.mod +++ b/go.mod @@ -114,7 +114,7 @@ require ( github.com/go-ping/ping v1.1.0 github.com/go-playground/validator/v10 v10.22.0 github.com/gofrs/flock v0.12.0 - github.com/gohugoio/hugo v0.134.1 + github.com/gohugoio/hugo v0.136.5 github.com/golang-jwt/jwt/v4 v4.5.0 github.com/golang-migrate/migrate/v4 v4.18.1 github.com/google/go-cmp v0.6.0 @@ -422,7 +422,7 @@ require ( github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 // indirect github.com/yashtewari/glob-intersection v0.2.0 // indirect github.com/yuin/goldmark v1.7.4 // indirect - github.com/yuin/goldmark-emoji v1.0.3 // indirect + github.com/yuin/goldmark-emoji v1.0.4 // indirect github.com/zclconf/go-cty v1.15.0 github.com/zeebo/errs v1.3.0 // indirect go.opencensus.io v0.24.0 // indirect diff --git a/go.sum b/go.sum index baa1eceddadf4..e00fe9c715490 100644 --- a/go.sum +++ b/go.sum @@ -305,8 +305,8 @@ github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1m github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f h1:Y/CXytFA4m6baUTXGLOoWe4PQhGxaX0KpnayAqC48p4= github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f/go.mod h1:vw97MGsxSvLiUE2X8qFplwetxpGLQrlU1Q9AUEIzCaM= -github.com/evanw/esbuild v0.23.1 h1:ociewhY6arjTarKLdrXfDTgy25oxhTZmzP8pfuBTfTA= -github.com/evanw/esbuild v0.23.1/go.mod h1:D2vIQZqV/vIf/VRHtViaUtViZmG7o+kKmlBfVQuRi48= +github.com/evanw/esbuild v0.24.0 h1:GZ78naTLp7FKr+K7eNuM/SLs5maeiHYRPsTg6kmdsSE= +github.com/evanw/esbuild v0.24.0/go.mod h1:D2vIQZqV/vIf/VRHtViaUtViZmG7o+kKmlBfVQuRi48= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM= @@ -415,8 +415,8 @@ github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4 h1:qZNfIGkIANxGv/Oq github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4/go.mod h1:kW3HQ4UdaAyrUCSSDR4xUzBKW6O2iA4uHhk7AtyYp10= github.com/go-viper/mapstructure/v2 v2.0.0 h1:dhn8MZ1gZ0mzeodTG3jt5Vj/o87xZKuNAprG2mQfMfc= github.com/go-viper/mapstructure/v2 v2.0.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= -github.com/gobuffalo/flect v1.0.2 h1:eqjPGSo2WmjgY2XlpGwo2NXgL3RucAKo4k4qQMNA5sA= -github.com/gobuffalo/flect v1.0.2/go.mod h1:A5msMlrHtLqh9umBSnvabjsMrCcCpAyzglnDvkbYKHs= +github.com/gobuffalo/flect v1.0.3 h1:xeWBM2nui+qnVvNM4S3foBhCAL2XgPU+a7FdpelbTq4= +github.com/gobuffalo/flect v1.0.3/go.mod h1:A5msMlrHtLqh9umBSnvabjsMrCcCpAyzglnDvkbYKHs= github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y= github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee/go.mod h1:L0fX3K22YWvt/FAX9NnzrNzcI4wNYi9Yku4O0LKYflo= @@ -442,8 +442,8 @@ github.com/gohugoio/hashstructure v0.1.0 h1:kBSTMLMyTXbrJVAxaKI+wv30MMJJxn9Q8kfQ github.com/gohugoio/hashstructure v0.1.0/go.mod h1:8ohPTAfQLTs2WdzB6k9etmQYclDUeNsIHGPAFejbsEA= github.com/gohugoio/httpcache v0.7.0 h1:ukPnn04Rgvx48JIinZvZetBfHaWE7I01JR2Q2RrQ3Vs= github.com/gohugoio/httpcache v0.7.0/go.mod h1:fMlPrdY/vVJhAriLZnrF5QpN3BNAcoBClgAyQd+lGFI= -github.com/gohugoio/hugo v0.134.1 h1:tLFRqDJuAlifwXispNvIHh6K3CT7ughxbBxzfUTStXY= -github.com/gohugoio/hugo v0.134.1/go.mod h1:/1gnGxlWfAzQarxcQ+tMvKw4e/IMBwy0DFbRxORwOtY= +github.com/gohugoio/hugo v0.136.5 h1:1IEDb0jWamc+LL/2dwDzdsGW67d5BxGcvu3gBkg7KQc= +github.com/gohugoio/hugo v0.136.5/go.mod h1:SarsIX7a9RqYY4VbDqIFrqSt57dIst+B1XKh+Q/lC7w= github.com/gohugoio/hugo-goldmark-extensions/extras v0.2.0 h1:MNdY6hYCTQEekY0oAfsxWZU1CDt6iH+tMLgyMJQh/sg= github.com/gohugoio/hugo-goldmark-extensions/extras v0.2.0/go.mod h1:oBdBVuiZ0fv9xd8xflUgt53QxW5jOCb1S+xntcN4SKo= github.com/gohugoio/hugo-goldmark-extensions/passthrough v0.3.0 h1:7PY5PIJ2mck7v6R52yCFvvYHvsPMEbulgRviw3I9lP4= @@ -533,8 +533,8 @@ github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aN github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= github.com/grpc-ecosystem/grpc-gateway/v2 v2.20.0 h1:bkypFPDjIYGfCYD5mRBvpqxfYX1YCS1PXdKYWi8FsN0= github.com/grpc-ecosystem/grpc-gateway/v2 v2.20.0/go.mod h1:P+Lt/0by1T8bfcF3z737NnSbmxQAppXMRziHUxPOC8k= -github.com/hairyhenderson/go-codeowners v0.5.0 h1:dpQB+hVHiRc2VVvc2BHxkuM+tmu9Qej/as3apqUbsWc= -github.com/hairyhenderson/go-codeowners v0.5.0/go.mod h1:R3uW1OQXEj2Gu6/OvZ7bt6hr0qdkLvUWPiqNaWnexpo= +github.com/hairyhenderson/go-codeowners v0.6.0 h1:cRCtmNf9Ni1GIeiAAlHX5IEEB2gr61813Kx5JmXxAAk= +github.com/hairyhenderson/go-codeowners v0.6.0/go.mod h1:RFWbGcjlXhRKNezt7AQHmJucY0alk4osN0+RKOsIAa8= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= @@ -850,8 +850,8 @@ github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUc github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs= github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro= github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= -github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= -github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= +github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII= +github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o= github.com/ryanuber/columnize v2.1.0+incompatible/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= github.com/ryanuber/go-glob v1.0.0 h1:iQh3xXAumdQ+4Ufa5b25cRpC5TYKlno6hsv6Cb3pkBk= github.com/ryanuber/go-glob v1.0.0/go.mod h1:807d1WSdnB0XRJzKNil9Om6lcp/3a0v4qIHxIXzX/Yc= @@ -925,8 +925,8 @@ github.com/tdewolff/parse/v2 v2.7.15/go.mod h1:3FbJWZp3XT9OWVN3Hmfp0p/a08v4h8J9W github.com/tdewolff/test v1.0.11-0.20231101010635-f1265d231d52/go.mod h1:6DAvZliBAAnD7rhVgwaM7DE5/d9NMOAJ09SqYqeK4QE= github.com/tdewolff/test v1.0.11-0.20240106005702-7de5f7df4739 h1:IkjBCtQOOjIn03u/dMQK9g+Iw9ewps4mCl1nB8Sscbo= github.com/tdewolff/test v1.0.11-0.20240106005702-7de5f7df4739/go.mod h1:XPuWBzvdUzhCuxWO1ojpXsyzsA5bFoS3tO/Q3kFuTG8= -github.com/tetratelabs/wazero v1.8.0 h1:iEKu0d4c2Pd+QSRieYbnQC9yiFlMS9D+Jr0LsRmcF4g= -github.com/tetratelabs/wazero v1.8.0/go.mod h1:yAI0XTsMBhREkM/YDAK/zNou3GoiAce1P6+rp/wQhjs= +github.com/tetratelabs/wazero v1.8.1 h1:NrcgVbWfkWvVc4UtT4LRLDf91PsOzDzefMdwhLfA550= +github.com/tetratelabs/wazero v1.8.1/go.mod h1:yAI0XTsMBhREkM/YDAK/zNou3GoiAce1P6+rp/wQhjs= github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY= github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA= @@ -994,8 +994,8 @@ github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5t github.com/yuin/goldmark v1.7.1/go.mod h1:uzxRWxtg69N339t3louHJ7+O03ezfj6PlliRlaOzY1E= github.com/yuin/goldmark v1.7.4 h1:BDXOHExt+A7gwPCJgPIIq7ENvceR7we7rOS9TNoLZeg= github.com/yuin/goldmark v1.7.4/go.mod h1:uzxRWxtg69N339t3louHJ7+O03ezfj6PlliRlaOzY1E= -github.com/yuin/goldmark-emoji v1.0.3 h1:aLRkLHOuBR2czCY4R8olwMjID+tENfhyFDMCRhbIQY4= -github.com/yuin/goldmark-emoji v1.0.3/go.mod h1:tTkZEbwu5wkPmgTcitqddVxY9osFZiavD+r4AzQrh1U= +github.com/yuin/goldmark-emoji v1.0.4 h1:vCwMkPZSNefSUnOW2ZKRUjBSD5Ok3W78IXhGxxAEF90= +github.com/yuin/goldmark-emoji v1.0.4/go.mod h1:tTkZEbwu5wkPmgTcitqddVxY9osFZiavD+r4AzQrh1U= github.com/zclconf/go-cty v1.1.0/go.mod h1:xnAOWiHeOqg2nWS62VtQ7pbOu17FtxJNW8RLEih+O3s= github.com/zclconf/go-cty v1.15.0 h1:tTCRWxsexYUmtt/wVxgDClUe+uQusuI443uL6e+5sXQ= github.com/zclconf/go-cty v1.15.0/go.mod h1:VvMs5i0vgZdhYawQNq5kePSpLAoz8u1xvZgrPIxfnZE= @@ -1067,8 +1067,8 @@ golang.org/x/crypto v0.28.0/go.mod h1:rmgy+3RHxRZMyY0jjAJShp2zgEdOqj2AO7U0pYmeQ7 golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20240808152545-0cdaa3abc0fa h1:ELnwvuAXPNtPk1TJRuGkI9fDTwym6AYBu0qzT8AcHdI= golang.org/x/exp v0.0.0-20240808152545-0cdaa3abc0fa/go.mod h1:akd2r19cwCdwSwWeIdzYQGa/EZZyqcOdwWiwj5L5eKQ= -golang.org/x/image v0.19.0 h1:D9FX4QWkLfkeqaC62SonffIIuYdOk/UE2XKUBgRIBIQ= -golang.org/x/image v0.19.0/go.mod h1:y0zrRqlQRWQ5PXaYCOMLTW2fpsxZ8Qh9I/ohnInJEys= +golang.org/x/image v0.21.0 h1:c5qV36ajHpdj4Qi0GnE0jUc/yuo33OLFaa0d+crTD5s= +golang.org/x/image v0.21.0/go.mod h1:vUbsLavqK/W303ZroQQVKQ+Af3Yl6Uz1Ppu5J/cLz78= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= From 7cb20d7b2625286de792c307cf60301f2cccedac Mon Sep 17 00:00:00 2001 From: Steven Masley Date: Mon, 28 Oct 2024 13:15:29 -0400 Subject: [PATCH 18/42] chore: name unlabeled db transaction metrics (#15251) --- coderd/database/dbmetrics/dbmetrics.go | 9 +++++++-- coderd/database/dbmetrics/dbmetrics_test.go | 2 +- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/coderd/database/dbmetrics/dbmetrics.go b/coderd/database/dbmetrics/dbmetrics.go index 8708787f57dc7..b0309f9f2e2eb 100644 --- a/coderd/database/dbmetrics/dbmetrics.go +++ b/coderd/database/dbmetrics/dbmetrics.go @@ -74,6 +74,11 @@ func (m metricsStore) InTx(f func(database.Store) error, options *database.TxOpt options = database.DefaultTXOptions() } + if options.TxIdentifier == "" { + // empty strings are hard to deal with in grafana + options.TxIdentifier = "unlabeled" + } + start := time.Now() err := m.Store.InTx(f, options) dur := time.Since(start) @@ -82,13 +87,13 @@ func (m metricsStore) InTx(f func(database.Store) error, options *database.TxOpt // So IDs should be used sparingly to prevent too much bloat. m.txDuration.With(prometheus.Labels{ "success": strconv.FormatBool(err == nil), - "tx_id": options.TxIdentifier, // Can be empty string for unlabeled + "tx_id": options.TxIdentifier, }).Observe(dur.Seconds()) m.txRetries.With(prometheus.Labels{ "success": strconv.FormatBool(err == nil), "retries": strconv.FormatInt(int64(options.ExecutionCount()-1), 10), - "tx_id": options.TxIdentifier, // Can be empty string for unlabeled + "tx_id": options.TxIdentifier, }).Inc() // Log all serializable transactions that are retried. diff --git a/coderd/database/dbmetrics/dbmetrics_test.go b/coderd/database/dbmetrics/dbmetrics_test.go index 7eed7035ee281..bd6566d054aae 100644 --- a/coderd/database/dbmetrics/dbmetrics_test.go +++ b/coderd/database/dbmetrics/dbmetrics_test.go @@ -22,7 +22,7 @@ func TestInTxMetrics(t *testing.T) { successLabels := prometheus.Labels{ "success": "true", - "tx_id": "", + "tx_id": "unlabeled", } const inTxHistMetricName = "coderd_db_tx_duration_seconds" const inTxCountMetricName = "coderd_db_tx_executions_count" From 516ba9e28e973f80e1a03dcf7a3ee410f4d790d0 Mon Sep 17 00:00:00 2001 From: Colin Adler Date: Mon, 28 Oct 2024 15:09:43 -0500 Subject: [PATCH 19/42] chore: update Go to 1.22.8 (#15255) --- .github/actions/setup-go/action.yaml | 2 +- flake.lock | 12 ++++++------ flake.nix | 2 +- go.mod | 2 +- 4 files changed, 9 insertions(+), 9 deletions(-) diff --git a/.github/actions/setup-go/action.yaml b/.github/actions/setup-go/action.yaml index d4777e32a9bdf..2fa5c7dcfa9de 100644 --- a/.github/actions/setup-go/action.yaml +++ b/.github/actions/setup-go/action.yaml @@ -4,7 +4,7 @@ description: | inputs: version: description: "The Go version to use." - default: "1.22.6" + default: "1.22.8" runs: using: "composite" steps: diff --git a/flake.lock b/flake.lock index 4c7c29d41aa79..b492e1dc9d04c 100644 --- a/flake.lock +++ b/flake.lock @@ -29,11 +29,11 @@ "systems": "systems" }, "locked": { - "lastModified": 1710146030, - "narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=", + "lastModified": 1726560853, + "narHash": "sha256-X6rJYSESBVr3hBoH0WbKE5KvhPU5bloyZ2L4K60/fPQ=", "owner": "numtide", "repo": "flake-utils", - "rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a", + "rev": "c1dfcf08411b08f6b8615f7d8971a2bfa81d5e8a", "type": "github" }, "original": { @@ -44,11 +44,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1720957393, - "narHash": "sha256-oedh2RwpjEa+TNxhg5Je9Ch6d3W1NKi7DbRO1ziHemA=", + "lastModified": 1729880355, + "narHash": "sha256-RP+OQ6koQQLX5nw0NmcDrzvGL8HDLnyXt/jHhL1jwjM=", "owner": "nixos", "repo": "nixpkgs", - "rev": "693bc46d169f5af9c992095736e82c3488bf7dbb", + "rev": "18536bf04cd71abd345f9579158841376fdd0c5a", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index f2d138c2b3aa7..e943ecc99df48 100644 --- a/flake.nix +++ b/flake.nix @@ -138,7 +138,7 @@ name = "coder-${osArch}"; # Updated with ./scripts/update-flake.sh`. # This should be updated whenever go.mod changes! - vendorHash = "sha256-kPXRp7l05iJd4IdvQeOFOgg2UNzBcloy3tA9Meep9VI="; + vendorHash = "sha256-Tsajkkp+NMjYRCpRX5HlSy/sCSpuABIGDM1jeavVe+w="; proxyVendor = true; src = ./.; nativeBuildInputs = with pkgs; [ getopt openssl zstd ]; diff --git a/go.mod b/go.mod index bfc9228fb6337..4bb08217de645 100644 --- a/go.mod +++ b/go.mod @@ -1,6 +1,6 @@ module github.com/coder/coder/v2 -go 1.22.6 +go 1.22.8 // Required until a v3 of chroma is created to lazily initialize all XML files. // None of our dependencies seem to use the registries anyways, so this From 074faec7d7f4486618dee3dd611a75c372e95d6f Mon Sep 17 00:00:00 2001 From: Colin Adler Date: Mon, 28 Oct 2024 15:24:57 -0500 Subject: [PATCH 20/42] chore: update Terraform to 1.9.8 (#15256) --- .github/actions/setup-tf/action.yaml | 2 +- docs/install/offline.md | 2 +- dogfood/contents/Dockerfile | 4 +-- install.sh | 2 +- provisioner/terraform/install.go | 2 +- .../calling-module/calling-module.tfplan.json | 4 +-- .../calling-module.tfstate.json | 10 +++---- .../chaining-resources.tfplan.json | 4 +-- .../chaining-resources.tfstate.json | 10 +++---- .../conflicting-resources.tfplan.json | 4 +-- .../conflicting-resources.tfstate.json | 10 +++---- .../display-apps-disabled.tfplan.json | 4 +-- .../display-apps-disabled.tfstate.json | 8 +++--- .../display-apps/display-apps.tfplan.json | 4 +-- .../display-apps/display-apps.tfstate.json | 8 +++--- .../external-auth-providers.tfplan.json | 6 ++-- .../external-auth-providers.tfstate.json | 8 +++--- provisioner/terraform/testdata/generate.sh | 5 ++++ .../git-auth-providers.tfplan.json | 6 ++-- .../git-auth-providers.tfstate.json | 8 +++--- .../instance-id/instance-id.tfplan.json | 4 +-- .../instance-id/instance-id.tfstate.json | 12 ++++---- .../mapped-apps/mapped-apps.tfplan.json | 4 +-- .../mapped-apps/mapped-apps.tfstate.json | 16 +++++------ .../multiple-agents-multiple-apps.tfplan.json | 8 +++--- ...multiple-agents-multiple-apps.tfstate.json | 26 ++++++++--------- .../multiple-agents-multiple-envs.tfplan.json | 8 +++--- ...multiple-agents-multiple-envs.tfstate.json | 26 ++++++++--------- ...ltiple-agents-multiple-scripts.tfplan.json | 8 +++--- ...tiple-agents-multiple-scripts.tfstate.json | 26 ++++++++--------- .../multiple-agents.tfplan.json | 4 +-- .../multiple-agents.tfstate.json | 20 ++++++------- .../multiple-apps/multiple-apps.tfplan.json | 4 +-- .../multiple-apps/multiple-apps.tfstate.json | 20 ++++++------- .../resource-metadata-duplicate.tfplan.json | 4 +-- .../resource-metadata-duplicate.tfstate.json | 16 +++++------ .../resource-metadata.tfplan.json | 4 +-- .../resource-metadata.tfstate.json | 12 ++++---- .../rich-parameters-order.tfplan.json | 10 +++---- .../rich-parameters-order.tfstate.json | 12 ++++---- .../rich-parameters-validation.tfplan.json | 18 ++++++------ .../rich-parameters-validation.tfstate.json | 20 ++++++------- .../rich-parameters.tfplan.json | 26 ++++++++--------- .../rich-parameters.tfstate.json | 28 +++++++++---------- provisioner/terraform/testdata/version.txt | 2 +- scripts/Dockerfile.base | 2 +- 46 files changed, 228 insertions(+), 223 deletions(-) diff --git a/.github/actions/setup-tf/action.yaml b/.github/actions/setup-tf/action.yaml index 12ee87f5a5c9f..c52f1138e03ca 100644 --- a/.github/actions/setup-tf/action.yaml +++ b/.github/actions/setup-tf/action.yaml @@ -7,5 +7,5 @@ runs: - name: Install Terraform uses: hashicorp/setup-terraform@b9cd54a3c349d3f38e8881555d616ced269862dd # v3.1.2 with: - terraform_version: 1.9.2 + terraform_version: 1.9.8 terraform_wrapper: false diff --git a/docs/install/offline.md b/docs/install/offline.md index 5a06388a992ee..6a4aae1af0daa 100644 --- a/docs/install/offline.md +++ b/docs/install/offline.md @@ -54,7 +54,7 @@ RUN mkdir -p /opt/terraform # The below step is optional if you wish to keep the existing version. # See https://github.com/coder/coder/blob/main/provisioner/terraform/install.go#L23-L24 # for supported Terraform versions. -ARG TERRAFORM_VERSION=1.9.2 +ARG TERRAFORM_VERSION=1.9.8 RUN apk update && \ apk del terraform && \ curl -LOs https://releases.hashicorp.com/terraform/${TERRAFORM_VERSION}/terraform_${TERRAFORM_VERSION}_linux_amd64.zip \ diff --git a/dogfood/contents/Dockerfile b/dogfood/contents/Dockerfile index 059b76dcf0d68..bef5bccbaa423 100644 --- a/dogfood/contents/Dockerfile +++ b/dogfood/contents/Dockerfile @@ -189,9 +189,9 @@ RUN apt-get update --quiet && apt-get install --yes \ # Configure FIPS-compliant policies update-crypto-policies --set FIPS -# NOTE: In scripts/Dockerfile.base we specifically install Terraform version 1.9.2. +# NOTE: In scripts/Dockerfile.base we specifically install Terraform version 1.9.8. # Installing the same version here to match. -RUN wget -O /tmp/terraform.zip "https://releases.hashicorp.com/terraform/1.9.2/terraform_1.9.2_linux_amd64.zip" && \ +RUN wget -O /tmp/terraform.zip "https://releases.hashicorp.com/terraform/1.9.8/terraform_1.9.8_linux_amd64.zip" && \ unzip /tmp/terraform.zip -d /usr/local/bin && \ rm -f /tmp/terraform.zip && \ chmod +x /usr/local/bin/terraform && \ diff --git a/install.sh b/install.sh index 8fbc4e58f000c..257576ae4d57a 100755 --- a/install.sh +++ b/install.sh @@ -250,7 +250,7 @@ EOF main() { MAINLINE=1 STABLE=0 - TERRAFORM_VERSION="1.9.2" + TERRAFORM_VERSION="1.9.8" if [ "${TRACE-}" ]; then set -x diff --git a/provisioner/terraform/install.go b/provisioner/terraform/install.go index 8c96be6452a22..af425ec307724 100644 --- a/provisioner/terraform/install.go +++ b/provisioner/terraform/install.go @@ -20,7 +20,7 @@ var ( // when Terraform is not available on the system. // NOTE: Keep this in sync with the version in scripts/Dockerfile.base. // NOTE: Keep this in sync with the version in install.sh. - TerraformVersion = version.Must(version.NewVersion("1.9.2")) + TerraformVersion = version.Must(version.NewVersion("1.9.8")) minTerraformVersion = version.Must(version.NewVersion("1.1.0")) maxTerraformVersion = version.Must(version.NewVersion("1.9.9")) // use .9 to automatically allow patch releases diff --git a/provisioner/terraform/testdata/calling-module/calling-module.tfplan.json b/provisioner/terraform/testdata/calling-module/calling-module.tfplan.json index 7f9464857f723..30bc360bb1940 100644 --- a/provisioner/terraform/testdata/calling-module/calling-module.tfplan.json +++ b/provisioner/terraform/testdata/calling-module/calling-module.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "planned_values": { "root_module": { "resources": [ @@ -260,7 +260,7 @@ ] } ], - "timestamp": "2024-07-15T17:48:23Z", + "timestamp": "2024-10-28T20:07:49Z", "applyable": true, "complete": true, "errored": false diff --git a/provisioner/terraform/testdata/calling-module/calling-module.tfstate.json b/provisioner/terraform/testdata/calling-module/calling-module.tfstate.json index e30cc7513c92b..5ead2c6ace0d5 100644 --- a/provisioner/terraform/testdata/calling-module/calling-module.tfstate.json +++ b/provisioner/terraform/testdata/calling-module/calling-module.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -26,7 +26,7 @@ } ], "env": null, - "id": "487890be-5e3c-4b06-a95b-a1d0a26f45c3", + "id": "04d66dc4-e25a-4f65-af6f-a9af6b907430", "init_script": "", "login_before_ready": true, "metadata": [], @@ -38,7 +38,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "d50589ba-d3df-48e7-8fea-1ce92ea1e4e2", + "token": "10fbd765-b0cc-4d6f-b5de-e5a036b2cb4b", "troubleshooting_url": null }, "sensitive_values": { @@ -69,7 +69,7 @@ "outputs": { "script": "" }, - "random": "2660912917742059845" + "random": "7917595776755902204" }, "sensitive_values": { "inputs": {}, @@ -84,7 +84,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "7409017517144186812", + "id": "2669991968036854745", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/chaining-resources/chaining-resources.tfplan.json b/provisioner/terraform/testdata/chaining-resources/chaining-resources.tfplan.json index 01ebff551b463..38af6827019e7 100644 --- a/provisioner/terraform/testdata/chaining-resources/chaining-resources.tfplan.json +++ b/provisioner/terraform/testdata/chaining-resources/chaining-resources.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "planned_values": { "root_module": { "resources": [ @@ -205,7 +205,7 @@ ] } }, - "timestamp": "2024-07-15T17:48:25Z", + "timestamp": "2024-10-28T20:07:50Z", "applyable": true, "complete": true, "errored": false diff --git a/provisioner/terraform/testdata/chaining-resources/chaining-resources.tfstate.json b/provisioner/terraform/testdata/chaining-resources/chaining-resources.tfstate.json index 109f1a816e7c8..0cee8567db250 100644 --- a/provisioner/terraform/testdata/chaining-resources/chaining-resources.tfstate.json +++ b/provisioner/terraform/testdata/chaining-resources/chaining-resources.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -26,7 +26,7 @@ } ], "env": null, - "id": "d700ca89-c521-478d-a430-833580e60941", + "id": "bcf4bae1-0870-48e9-8bb4-af2f652c4d54", "init_script": "", "login_before_ready": true, "metadata": [], @@ -38,7 +38,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "1ffba24c-49cd-44ca-9855-08086c8f665f", + "token": "afe98f25-25a2-4892-b921-be04bcd71efc", "troubleshooting_url": null }, "sensitive_values": { @@ -57,7 +57,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "8823809151721173831", + "id": "6598177855275264799", "triggers": null }, "sensitive_values": {}, @@ -74,7 +74,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "6260983806355230616", + "id": "4663187895457986148", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tfplan.json b/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tfplan.json index b57638172a90d..3fe9f6c41fa9b 100644 --- a/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tfplan.json +++ b/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "planned_values": { "root_module": { "resources": [ @@ -205,7 +205,7 @@ ] } }, - "timestamp": "2024-07-15T17:48:26Z", + "timestamp": "2024-10-28T20:07:52Z", "applyable": true, "complete": true, "errored": false diff --git a/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tfstate.json b/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tfstate.json index 4e138f7476405..ffd0690db2263 100644 --- a/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tfstate.json +++ b/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -26,7 +26,7 @@ } ], "env": null, - "id": "d2d1c3a3-3315-47ed-a200-290455966190", + "id": "d047c7b6-b69e-4029-ab82-67468a0364f7", "init_script": "", "login_before_ready": true, "metadata": [], @@ -38,7 +38,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "e2076595-5316-47ec-a305-215f2f2a901c", + "token": "ceff37e3-52b9-4c80-af1b-1f9f99184590", "troubleshooting_url": null }, "sensitive_values": { @@ -57,7 +57,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "2887811124246756573", + "id": "3120105803817695206", "triggers": null }, "sensitive_values": {}, @@ -73,7 +73,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "6007238228767050576", + "id": "2942451035046396496", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tfplan.json b/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tfplan.json index 8929284177be8..598d6f1735a84 100644 --- a/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tfplan.json +++ b/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "planned_values": { "root_module": { "resources": [ @@ -204,7 +204,7 @@ ] } }, - "timestamp": "2024-07-15T17:48:30Z", + "timestamp": "2024-10-28T20:07:55Z", "applyable": true, "complete": true, "errored": false diff --git a/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tfstate.json b/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tfstate.json index 4e56df9aa0d7b..7e9bdad7a02bb 100644 --- a/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tfstate.json +++ b/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -26,7 +26,7 @@ } ], "env": null, - "id": "51c9236c-7146-4e6b-85c2-b21361a6a359", + "id": "6ba13739-4a9c-456f-90cf-feba8f194853", "init_script": "", "login_before_ready": true, "metadata": [], @@ -38,7 +38,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "0779e4d7-d9cf-4fa6-b3f7-92e6b83e52ca", + "token": "6e348a4c-ef00-40ab-9732-817fb828045c", "troubleshooting_url": null }, "sensitive_values": { @@ -57,7 +57,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "5801369723993496133", + "id": "3123606937441446452", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/display-apps/display-apps.tfplan.json b/provisioner/terraform/testdata/display-apps/display-apps.tfplan.json index 0371606e527fc..3331a8f282c2b 100644 --- a/provisioner/terraform/testdata/display-apps/display-apps.tfplan.json +++ b/provisioner/terraform/testdata/display-apps/display-apps.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "planned_values": { "root_module": { "resources": [ @@ -204,7 +204,7 @@ ] } }, - "timestamp": "2024-07-15T17:48:28Z", + "timestamp": "2024-10-28T20:07:54Z", "applyable": true, "complete": true, "errored": false diff --git a/provisioner/terraform/testdata/display-apps/display-apps.tfstate.json b/provisioner/terraform/testdata/display-apps/display-apps.tfstate.json index 49efca3f597ce..2b04222e751f2 100644 --- a/provisioner/terraform/testdata/display-apps/display-apps.tfstate.json +++ b/provisioner/terraform/testdata/display-apps/display-apps.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -26,7 +26,7 @@ } ], "env": null, - "id": "ba5352ad-c833-442b-93c8-86e330a65192", + "id": "b7e8dd7a-34aa-41e2-977e-e38577ab2476", "init_script": "", "login_before_ready": true, "metadata": [], @@ -38,7 +38,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "364b1d92-7a4f-475e-956a-90f4b2cfd2eb", + "token": "c6aeeb35-2766-4524-9818-687f7687831d", "troubleshooting_url": null }, "sensitive_values": { @@ -57,7 +57,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "3169937457521011358", + "id": "2407243137316459395", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tfplan.json b/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tfplan.json index b0cacf1cc79f0..5ba9e7b6af80f 100644 --- a/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tfplan.json +++ b/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "planned_values": { "root_module": { "resources": [ @@ -119,7 +119,7 @@ ], "prior_state": { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -228,7 +228,7 @@ ] } }, - "timestamp": "2024-07-15T17:48:32Z", + "timestamp": "2024-10-28T20:07:57Z", "applyable": true, "complete": true, "errored": false diff --git a/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tfstate.json b/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tfstate.json index 5b0424973a840..875d8c9aaf439 100644 --- a/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tfstate.json +++ b/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -54,7 +54,7 @@ } ], "env": null, - "id": "186d9525-cebc-476f-888a-4fb43d443938", + "id": "ec5d36c9-8690-4246-8ab3-2d85a3eacee6", "init_script": "", "login_before_ready": true, "metadata": [], @@ -66,7 +66,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "bdb44728-6909-4b52-ba86-ed6c058b5820", + "token": "78c55fa2-8e3c-4564-950d-e022c76cf05a", "troubleshooting_url": null }, "sensitive_values": { @@ -85,7 +85,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "848898101208151671", + "id": "455343782636271645", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/generate.sh b/provisioner/terraform/testdata/generate.sh index 04ac7bdef3c64..6cc79568582ee 100755 --- a/provisioner/terraform/testdata/generate.sh +++ b/provisioner/terraform/testdata/generate.sh @@ -19,6 +19,11 @@ for d in */; do continue fi + if [[ $name == "timings-aggregation" ]]; then + popd + continue + fi + terraform init -upgrade terraform plan -out terraform.tfplan terraform show -json ./terraform.tfplan | jq >"$name".tfplan.json diff --git a/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tfplan.json b/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tfplan.json index 6ca82aedf141c..fba34f1cb5f4d 100644 --- a/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tfplan.json +++ b/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "planned_values": { "root_module": { "resources": [ @@ -119,7 +119,7 @@ ], "prior_state": { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -223,7 +223,7 @@ ] } }, - "timestamp": "2024-07-15T17:48:34Z", + "timestamp": "2024-10-28T20:07:58Z", "applyable": true, "complete": true, "errored": false diff --git a/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tfstate.json b/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tfstate.json index 0087c31316519..3cf905c0a2948 100644 --- a/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tfstate.json +++ b/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -52,7 +52,7 @@ } ], "env": null, - "id": "30e31610-1801-4837-957e-93bdbbc64ea3", + "id": "ffa1f524-0350-4891-868d-93cad369318a", "init_script": "", "login_before_ready": true, "metadata": [], @@ -64,7 +64,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "825b23c4-4243-4991-ac33-483ee4c50575", + "token": "8ba649af-b498-4f20-8055-b6a0b995837e", "troubleshooting_url": null }, "sensitive_values": { @@ -83,7 +83,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "8892771970332750063", + "id": "7420557451345159984", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/instance-id/instance-id.tfplan.json b/provisioner/terraform/testdata/instance-id/instance-id.tfplan.json index 4c22ab424aeb0..527a2fa05769d 100644 --- a/provisioner/terraform/testdata/instance-id/instance-id.tfplan.json +++ b/provisioner/terraform/testdata/instance-id/instance-id.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "planned_values": { "root_module": { "resources": [ @@ -225,7 +225,7 @@ ] } ], - "timestamp": "2024-07-15T17:48:36Z", + "timestamp": "2024-10-28T20:08:00Z", "applyable": true, "complete": true, "errored": false diff --git a/provisioner/terraform/testdata/instance-id/instance-id.tfstate.json b/provisioner/terraform/testdata/instance-id/instance-id.tfstate.json index 513fe487d181b..929d72365502c 100644 --- a/provisioner/terraform/testdata/instance-id/instance-id.tfstate.json +++ b/provisioner/terraform/testdata/instance-id/instance-id.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -26,7 +26,7 @@ } ], "env": null, - "id": "da0d9673-d232-47f5-8869-ebd78444dde0", + "id": "0389c8a5-cc5c-485d-959c-8738bada65ff", "init_script": "", "login_before_ready": true, "metadata": [], @@ -38,7 +38,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "df57eefc-83d5-444e-bbb5-47b5603156fa", + "token": "097b6128-8d60-4849-969b-03f0b463ac2c", "troubleshooting_url": null }, "sensitive_values": { @@ -57,8 +57,8 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "da0d9673-d232-47f5-8869-ebd78444dde0", - "id": "f4b242e6-f0c9-4cd4-adb0-06062ed8a1b7", + "agent_id": "0389c8a5-cc5c-485d-959c-8738bada65ff", + "id": "0ae6bb98-871c-4091-8098-d32f256d8c05", "instance_id": "example" }, "sensitive_values": {}, @@ -74,7 +74,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "7960015436996479556", + "id": "5569763710827889183", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/mapped-apps/mapped-apps.tfplan.json b/provisioner/terraform/testdata/mapped-apps/mapped-apps.tfplan.json index 100d89f57a080..2151b4631647a 100644 --- a/provisioner/terraform/testdata/mapped-apps/mapped-apps.tfplan.json +++ b/provisioner/terraform/testdata/mapped-apps/mapped-apps.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "planned_values": { "root_module": { "resources": [ @@ -327,7 +327,7 @@ ] } ], - "timestamp": "2024-07-15T17:48:38Z", + "timestamp": "2024-10-28T20:08:02Z", "applyable": true, "complete": true, "errored": false diff --git a/provisioner/terraform/testdata/mapped-apps/mapped-apps.tfstate.json b/provisioner/terraform/testdata/mapped-apps/mapped-apps.tfstate.json index 079f9c54fd818..9aaa7b352f518 100644 --- a/provisioner/terraform/testdata/mapped-apps/mapped-apps.tfstate.json +++ b/provisioner/terraform/testdata/mapped-apps/mapped-apps.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -26,7 +26,7 @@ } ], "env": null, - "id": "ae638ce3-e9a0-4331-ad0d-b81d93975725", + "id": "b3d3e1d7-1f1f-4abf-8475-2058f73f3437", "init_script": "", "login_before_ready": true, "metadata": [], @@ -38,7 +38,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "fdd8d060-455d-471f-a025-72937e049ccd", + "token": "56420fd5-57e5-44e0-a264-53395b74505a", "troubleshooting_url": null }, "sensitive_values": { @@ -58,13 +58,13 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "ae638ce3-e9a0-4331-ad0d-b81d93975725", + "agent_id": "b3d3e1d7-1f1f-4abf-8475-2058f73f3437", "command": null, "display_name": "app1", "external": false, "healthcheck": [], "icon": null, - "id": "65739639-3a6a-43ae-b95b-ba0d5ce07ce8", + "id": "e8163eb0-e56e-46e7-8848-8c6c250ce5b9", "name": null, "order": null, "relative_path": null, @@ -89,13 +89,13 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "ae638ce3-e9a0-4331-ad0d-b81d93975725", + "agent_id": "b3d3e1d7-1f1f-4abf-8475-2058f73f3437", "command": null, "display_name": "app2", "external": false, "healthcheck": [], "icon": null, - "id": "37f6ea39-3c4a-458d-9f0d-1c036bc5f1d7", + "id": "0971e625-7a23-4108-9765-78f7ad045b38", "name": null, "order": null, "relative_path": null, @@ -119,7 +119,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "2485965605399142745", + "id": "60927265551659604", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/multiple-agents-multiple-apps/multiple-agents-multiple-apps.tfplan.json b/provisioner/terraform/testdata/multiple-agents-multiple-apps/multiple-agents-multiple-apps.tfplan.json index 94cf2e79ec738..d8f5a4763518b 100644 --- a/provisioner/terraform/testdata/multiple-agents-multiple-apps/multiple-agents-multiple-apps.tfplan.json +++ b/provisioner/terraform/testdata/multiple-agents-multiple-apps/multiple-agents-multiple-apps.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "planned_values": { "root_module": { "resources": [ @@ -575,19 +575,19 @@ }, "relevant_attributes": [ { - "resource": "coder_agent.dev1", + "resource": "coder_agent.dev2", "attribute": [ "id" ] }, { - "resource": "coder_agent.dev2", + "resource": "coder_agent.dev1", "attribute": [ "id" ] } ], - "timestamp": "2024-07-15T17:48:43Z", + "timestamp": "2024-10-28T20:08:05Z", "applyable": true, "complete": true, "errored": false diff --git a/provisioner/terraform/testdata/multiple-agents-multiple-apps/multiple-agents-multiple-apps.tfstate.json b/provisioner/terraform/testdata/multiple-agents-multiple-apps/multiple-agents-multiple-apps.tfstate.json index db066d1078bbd..4a94e05baa29d 100644 --- a/provisioner/terraform/testdata/multiple-agents-multiple-apps/multiple-agents-multiple-apps.tfstate.json +++ b/provisioner/terraform/testdata/multiple-agents-multiple-apps/multiple-agents-multiple-apps.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -26,7 +26,7 @@ } ], "env": null, - "id": "74d75dac-6a80-4cac-9153-3a387bde6824", + "id": "571523c7-e7a3-420a-b65d-39d15f5f3267", "init_script": "", "login_before_ready": true, "metadata": [], @@ -38,7 +38,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "9683bf91-8de9-419d-8c60-294a81995ad6", + "token": "c18d762d-062d-43d4-b7c2-98be546b39a6", "troubleshooting_url": null }, "sensitive_values": { @@ -71,7 +71,7 @@ } ], "env": null, - "id": "27e6d9dd-6136-42ae-980a-eb299030111e", + "id": "e94994f2-cab5-4288-8ff3-a290c95e4e25", "init_script": "", "login_before_ready": true, "metadata": [], @@ -83,7 +83,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "102429e0-a63a-4b75-9499-596c90f954ea", + "token": "c0757e3a-4be4-4643-b3ba-b27234169eb1", "troubleshooting_url": null }, "sensitive_values": { @@ -102,13 +102,13 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "74d75dac-6a80-4cac-9153-3a387bde6824", + "agent_id": "571523c7-e7a3-420a-b65d-39d15f5f3267", "command": null, "display_name": null, "external": false, "healthcheck": [], "icon": null, - "id": "37e01326-a44b-4042-b042-5b3bd26dff1d", + "id": "bf2b3c44-1b1d-49c5-9149-4f2f18590c60", "name": null, "order": null, "relative_path": null, @@ -132,7 +132,7 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "74d75dac-6a80-4cac-9153-3a387bde6824", + "agent_id": "571523c7-e7a3-420a-b65d-39d15f5f3267", "command": null, "display_name": null, "external": false, @@ -144,7 +144,7 @@ } ], "icon": null, - "id": "31576d00-cd93-452c-a385-ef91d8ebabc1", + "id": "580cf864-a64d-4430-98b7-fa37c44083f8", "name": null, "order": null, "relative_path": null, @@ -170,13 +170,13 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "27e6d9dd-6136-42ae-980a-eb299030111e", + "agent_id": "e94994f2-cab5-4288-8ff3-a290c95e4e25", "command": null, "display_name": null, "external": false, "healthcheck": [], "icon": null, - "id": "c8bb967e-4a36-4ccb-89f6-93cabfba150d", + "id": "182dca7b-12ab-4c58-9424-23b7d61135a9", "name": null, "order": null, "relative_path": null, @@ -200,7 +200,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "4919579386937214358", + "id": "3778543820798621894", "triggers": null }, "sensitive_values": {}, @@ -216,7 +216,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "4338309449618140876", + "id": "1094622314762410115", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/multiple-agents-multiple-envs/multiple-agents-multiple-envs.tfplan.json b/provisioner/terraform/testdata/multiple-agents-multiple-envs/multiple-agents-multiple-envs.tfplan.json index c3ecb1db00d44..4cb28ae592516 100644 --- a/provisioner/terraform/testdata/multiple-agents-multiple-envs/multiple-agents-multiple-envs.tfplan.json +++ b/provisioner/terraform/testdata/multiple-agents-multiple-envs/multiple-agents-multiple-envs.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "planned_values": { "root_module": { "resources": [ @@ -472,19 +472,19 @@ }, "relevant_attributes": [ { - "resource": "coder_agent.dev2", + "resource": "coder_agent.dev1", "attribute": [ "id" ] }, { - "resource": "coder_agent.dev1", + "resource": "coder_agent.dev2", "attribute": [ "id" ] } ], - "timestamp": "2024-07-15T17:48:46Z", + "timestamp": "2024-10-28T20:08:06Z", "applyable": true, "complete": true, "errored": false diff --git a/provisioner/terraform/testdata/multiple-agents-multiple-envs/multiple-agents-multiple-envs.tfstate.json b/provisioner/terraform/testdata/multiple-agents-multiple-envs/multiple-agents-multiple-envs.tfstate.json index a982897075c3a..f87b6f0a9eb56 100644 --- a/provisioner/terraform/testdata/multiple-agents-multiple-envs/multiple-agents-multiple-envs.tfstate.json +++ b/provisioner/terraform/testdata/multiple-agents-multiple-envs/multiple-agents-multiple-envs.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -26,7 +26,7 @@ } ], "env": null, - "id": "d5849a8b-3f84-44d1-80df-d61af159490f", + "id": "702e7cd2-95a0-46cf-8ef7-c1dfbd3e56b9", "init_script": "", "login_before_ready": true, "metadata": [], @@ -38,7 +38,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "1c5f00f4-f48b-4f0d-bd9b-5c97a63ea2d9", + "token": "1cfd79e3-3f9c-4d66-b7c2-42c385c26012", "troubleshooting_url": null }, "sensitive_values": { @@ -71,7 +71,7 @@ } ], "env": null, - "id": "48ddd7f1-ab68-4247-9b8c-09ae1b93debc", + "id": "ca137ba9-45ce-44ff-8e30-59a86565fa7d", "init_script": "", "login_before_ready": true, "metadata": [], @@ -83,7 +83,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "ffc286fe-0f27-46fb-bf0f-613f4e2943a4", + "token": "0d3aa4f8-025c-4044-8053-d077484355fb", "troubleshooting_url": null }, "sensitive_values": { @@ -102,8 +102,8 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "d5849a8b-3f84-44d1-80df-d61af159490f", - "id": "88a1c662-5e5b-4da6-bb60-4e4f4311b9ca", + "agent_id": "702e7cd2-95a0-46cf-8ef7-c1dfbd3e56b9", + "id": "e3d37294-2407-4286-a519-7551b901ba54", "name": "ENV_1", "value": "Env 1" }, @@ -120,8 +120,8 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "d5849a8b-3f84-44d1-80df-d61af159490f", - "id": "bbaea14d-a16b-4b1e-9feb-f445a2a08d14", + "agent_id": "702e7cd2-95a0-46cf-8ef7-c1dfbd3e56b9", + "id": "9451575b-da89-4297-a42d-4aaf0a23775d", "name": "ENV_2", "value": "Env 2" }, @@ -138,8 +138,8 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "48ddd7f1-ab68-4247-9b8c-09ae1b93debc", - "id": "d6bdb1d7-06cd-4802-a860-b5d7a31f7d7b", + "agent_id": "ca137ba9-45ce-44ff-8e30-59a86565fa7d", + "id": "948e3fb5-12a1-454b-b85e-d4dc1f01838f", "name": "ENV_3", "value": "Env 3" }, @@ -156,7 +156,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "1850797469207235208", + "id": "7502424400840788651", "triggers": null }, "sensitive_values": {}, @@ -172,7 +172,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "214998680720912111", + "id": "3916143681500058654", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/multiple-agents-multiple-scripts/multiple-agents-multiple-scripts.tfplan.json b/provisioner/terraform/testdata/multiple-agents-multiple-scripts/multiple-agents-multiple-scripts.tfplan.json index 83d55b1e95056..ab14e49f02989 100644 --- a/provisioner/terraform/testdata/multiple-agents-multiple-scripts/multiple-agents-multiple-scripts.tfplan.json +++ b/provisioner/terraform/testdata/multiple-agents-multiple-scripts/multiple-agents-multiple-scripts.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "planned_values": { "root_module": { "resources": [ @@ -523,19 +523,19 @@ }, "relevant_attributes": [ { - "resource": "coder_agent.dev2", + "resource": "coder_agent.dev1", "attribute": [ "id" ] }, { - "resource": "coder_agent.dev1", + "resource": "coder_agent.dev2", "attribute": [ "id" ] } ], - "timestamp": "2024-07-15T17:48:49Z", + "timestamp": "2024-10-28T20:08:08Z", "applyable": true, "complete": true, "errored": false diff --git a/provisioner/terraform/testdata/multiple-agents-multiple-scripts/multiple-agents-multiple-scripts.tfstate.json b/provisioner/terraform/testdata/multiple-agents-multiple-scripts/multiple-agents-multiple-scripts.tfstate.json index 4fa235cb52eb5..37c4ef13ee6fb 100644 --- a/provisioner/terraform/testdata/multiple-agents-multiple-scripts/multiple-agents-multiple-scripts.tfstate.json +++ b/provisioner/terraform/testdata/multiple-agents-multiple-scripts/multiple-agents-multiple-scripts.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -26,7 +26,7 @@ } ], "env": null, - "id": "a46d73a8-3abc-4dab-84ae-1961772256ff", + "id": "753eb8c0-e2b7-4cbc-b0ff-1370ce2e4022", "init_script": "", "login_before_ready": true, "metadata": [], @@ -38,7 +38,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "75b94908-e753-440a-af7d-2a7a97866360", + "token": "77b179b6-0e2d-4307-9ba0-98325fc96e37", "troubleshooting_url": null }, "sensitive_values": { @@ -71,7 +71,7 @@ } ], "env": null, - "id": "b8cce9b4-6a56-43e1-a547-5526a05f2881", + "id": "86f7e422-1798-4de5-8209-69b023808241", "init_script": "", "login_before_ready": true, "metadata": [], @@ -83,7 +83,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "14aa65f3-0e3f-4e86-bb86-5993c06526c1", + "token": "aa4ae02d-4084-4dff-951c-af10f78a98c2", "troubleshooting_url": null }, "sensitive_values": { @@ -102,11 +102,11 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "a46d73a8-3abc-4dab-84ae-1961772256ff", + "agent_id": "753eb8c0-e2b7-4cbc-b0ff-1370ce2e4022", "cron": null, "display_name": "Foobar Script 1", "icon": null, - "id": "13a60062-28d4-459c-8e53-729a45b4a75a", + "id": "eb1eb8f4-3a4a-4040-bd6a-0abce01d6330", "log_path": null, "run_on_start": true, "run_on_stop": false, @@ -127,11 +127,11 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "a46d73a8-3abc-4dab-84ae-1961772256ff", + "agent_id": "753eb8c0-e2b7-4cbc-b0ff-1370ce2e4022", "cron": null, "display_name": "Foobar Script 2", "icon": null, - "id": "c13a1cc1-dfb5-4fab-a8c9-cd65bafef3c0", + "id": "1de43abc-8416-4455-87ca-23fb425b4eeb", "log_path": null, "run_on_start": true, "run_on_stop": false, @@ -152,11 +152,11 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "b8cce9b4-6a56-43e1-a547-5526a05f2881", + "agent_id": "86f7e422-1798-4de5-8209-69b023808241", "cron": null, "display_name": "Foobar Script 3", "icon": null, - "id": "50d359c9-6fdd-4f29-8292-f547b4e22b32", + "id": "ede835f7-4018-464c-807d-7e07af7de9d3", "log_path": null, "run_on_start": true, "run_on_stop": false, @@ -177,7 +177,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "6599800639836820524", + "id": "4207133259459553257", "triggers": null }, "sensitive_values": {}, @@ -193,7 +193,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "7049016876762601534", + "id": "5647997484430231619", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/multiple-agents/multiple-agents.tfplan.json b/provisioner/terraform/testdata/multiple-agents/multiple-agents.tfplan.json index ecb4729f909b2..67da167932aa4 100644 --- a/provisioner/terraform/testdata/multiple-agents/multiple-agents.tfplan.json +++ b/provisioner/terraform/testdata/multiple-agents/multiple-agents.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "planned_values": { "root_module": { "resources": [ @@ -464,7 +464,7 @@ ] } }, - "timestamp": "2024-07-15T17:48:40Z", + "timestamp": "2024-10-28T20:08:03Z", "applyable": true, "complete": true, "errored": false diff --git a/provisioner/terraform/testdata/multiple-agents/multiple-agents.tfstate.json b/provisioner/terraform/testdata/multiple-agents/multiple-agents.tfstate.json index 04bb862e4be54..cd8edc0ae29bc 100644 --- a/provisioner/terraform/testdata/multiple-agents/multiple-agents.tfstate.json +++ b/provisioner/terraform/testdata/multiple-agents/multiple-agents.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -26,7 +26,7 @@ } ], "env": null, - "id": "a777f1dc-7e43-497d-bac5-56ad5a2d7f7e", + "id": "c76ed902-d4cb-4905-9961-4d58dda135f9", "init_script": "", "login_before_ready": true, "metadata": [], @@ -38,7 +38,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "6df4262d-7ce5-41c7-b9ad-84df6d20070e", + "token": "f1aa99ea-570d-49cf-aef9-a4241e3cb023", "troubleshooting_url": null }, "sensitive_values": { @@ -71,7 +71,7 @@ } ], "env": null, - "id": "2f29a1dd-04ad-4360-bada-51a73dc1d352", + "id": "1b037439-4eb3-408e-83da-28dc93645944", "init_script": "", "login_before_ready": true, "metadata": [], @@ -83,7 +83,7 @@ "startup_script": null, "startup_script_behavior": "non-blocking", "startup_script_timeout": 30, - "token": "52549a72-6199-4fab-beb1-27131129f94d", + "token": "20d4e89e-d6de-4eb7-8877-f9186d684aa5", "troubleshooting_url": null }, "sensitive_values": { @@ -116,7 +116,7 @@ } ], "env": null, - "id": "7df8745b-3cd4-4638-a637-f370fc17973d", + "id": "453b5404-8ea4-4197-8664-3638e6a012ca", "init_script": "", "login_before_ready": true, "metadata": [], @@ -128,7 +128,7 @@ "startup_script": null, "startup_script_behavior": "blocking", "startup_script_timeout": 300, - "token": "bf843f72-6965-4000-b1ec-02f158556f5e", + "token": "0355cb42-9da0-4bad-b2aa-74db1df76fef", "troubleshooting_url": "https://coder.com/troubleshoot" }, "sensitive_values": { @@ -161,7 +161,7 @@ } ], "env": null, - "id": "6a756f61-0050-4372-b458-35d38b595a79", + "id": "c0a68e9b-5b29-4d95-b664-5ac71dd633cf", "init_script": "", "login_before_ready": false, "metadata": [], @@ -173,7 +173,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "4ed633b5-eff0-48ac-8089-57ffeff02bdc", + "token": "34b78439-5d6e-431b-b06c-339f97a1e9cf", "troubleshooting_url": null }, "sensitive_values": { @@ -192,7 +192,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "7329660528883337331", + "id": "5109814714394194897", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/multiple-apps/multiple-apps.tfplan.json b/provisioner/terraform/testdata/multiple-apps/multiple-apps.tfplan.json index dd6f3b247d4b9..b156c3b5068b6 100644 --- a/provisioner/terraform/testdata/multiple-apps/multiple-apps.tfplan.json +++ b/provisioner/terraform/testdata/multiple-apps/multiple-apps.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "planned_values": { "root_module": { "resources": [ @@ -446,7 +446,7 @@ ] } ], - "timestamp": "2024-07-15T17:48:50Z", + "timestamp": "2024-10-28T20:08:10Z", "applyable": true, "complete": true, "errored": false diff --git a/provisioner/terraform/testdata/multiple-apps/multiple-apps.tfstate.json b/provisioner/terraform/testdata/multiple-apps/multiple-apps.tfstate.json index b172a050bebe3..d3fc254bf40b0 100644 --- a/provisioner/terraform/testdata/multiple-apps/multiple-apps.tfstate.json +++ b/provisioner/terraform/testdata/multiple-apps/multiple-apps.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -26,7 +26,7 @@ } ], "env": null, - "id": "af75acda-ef6d-4f1f-97e3-31133118b1b9", + "id": "b3ea3cb0-176c-4642-9bf5-cfa72e0782cc", "init_script": "", "login_before_ready": true, "metadata": [], @@ -38,7 +38,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "eb7478f3-26ff-4c6d-b307-7c5cb78c692d", + "token": "30533677-f04a-493b-b6cb-314d9abf7769", "troubleshooting_url": null }, "sensitive_values": { @@ -57,13 +57,13 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "af75acda-ef6d-4f1f-97e3-31133118b1b9", + "agent_id": "b3ea3cb0-176c-4642-9bf5-cfa72e0782cc", "command": null, "display_name": null, "external": false, "healthcheck": [], "icon": null, - "id": "ae194f56-c14c-4d04-a05b-7cd9c4a95dbe", + "id": "537e9069-492b-4721-96dd-cffba275ecd9", "name": null, "order": null, "relative_path": null, @@ -87,7 +87,7 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "af75acda-ef6d-4f1f-97e3-31133118b1b9", + "agent_id": "b3ea3cb0-176c-4642-9bf5-cfa72e0782cc", "command": null, "display_name": null, "external": false, @@ -99,7 +99,7 @@ } ], "icon": null, - "id": "8254828f-8582-497a-8f9d-c2bc2b3495cc", + "id": "3a4c78a0-7ea3-44aa-9ea8-4e08e387b4b6", "name": null, "order": null, "relative_path": null, @@ -125,13 +125,13 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "af75acda-ef6d-4f1f-97e3-31133118b1b9", + "agent_id": "b3ea3cb0-176c-4642-9bf5-cfa72e0782cc", "command": null, "display_name": null, "external": false, "healthcheck": [], "icon": null, - "id": "ec4dea85-191b-4543-b19c-90f298c514fb", + "id": "23555681-0ecb-4962-8e85-367d3a9d0228", "name": null, "order": null, "relative_path": null, @@ -155,7 +155,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "7610101534452317567", + "id": "2905101599123333983", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tfplan.json b/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tfplan.json index e2ccff05866b0..3b7881701038c 100644 --- a/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tfplan.json +++ b/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "planned_values": { "root_module": { "resources": [ @@ -432,7 +432,7 @@ ] } ], - "timestamp": "2024-07-15T17:48:54Z", + "timestamp": "2024-10-28T20:08:13Z", "applyable": true, "complete": true, "errored": false diff --git a/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tfstate.json b/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tfstate.json index 569f348ec6c3a..170630d0e3103 100644 --- a/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tfstate.json +++ b/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -26,7 +26,7 @@ } ], "env": null, - "id": "8a6eab74-3f83-4551-ab7c-6e2fbae32099", + "id": "0cbc2449-fbaa-447a-8487-6c47367af0be", "init_script": "", "login_before_ready": true, "metadata": [ @@ -47,7 +47,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "c90854c9-a5a6-4794-9470-ef05bbc51491", + "token": "b03606cc-1ed3-4187-964d-389cf2ef223f", "troubleshooting_url": null }, "sensitive_values": { @@ -71,7 +71,7 @@ "daily_cost": 29, "hide": true, "icon": "/icon/server.svg", - "id": "77c46f95-fee8-4587-b6db-5da8d7d562a8", + "id": "d6c33b98-addd-4d97-8659-405350bc06c1", "item": [ { "is_null": false, @@ -86,7 +86,7 @@ "value": "" } ], - "resource_id": "5995054412151645025" + "resource_id": "5673227143105805783" }, "sensitive_values": { "item": [ @@ -110,7 +110,7 @@ "daily_cost": 20, "hide": true, "icon": "/icon/server.svg", - "id": "20faad5d-8891-4ec8-8a94-46967240127f", + "id": "76594f08-2261-4114-a61f-e07107a86f89", "item": [ { "is_null": false, @@ -119,7 +119,7 @@ "value": "world" } ], - "resource_id": "5995054412151645025" + "resource_id": "5673227143105805783" }, "sensitive_values": { "item": [ @@ -139,7 +139,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "5995054412151645025", + "id": "5673227143105805783", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/resource-metadata/resource-metadata.tfplan.json b/provisioner/terraform/testdata/resource-metadata/resource-metadata.tfplan.json index 09639c0768fe1..f9c24830c6ef3 100644 --- a/provisioner/terraform/testdata/resource-metadata/resource-metadata.tfplan.json +++ b/provisioner/terraform/testdata/resource-metadata/resource-metadata.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "planned_values": { "root_module": { "resources": [ @@ -384,7 +384,7 @@ ] } ], - "timestamp": "2024-07-15T17:48:52Z", + "timestamp": "2024-10-28T20:08:11Z", "applyable": true, "complete": true, "errored": false diff --git a/provisioner/terraform/testdata/resource-metadata/resource-metadata.tfstate.json b/provisioner/terraform/testdata/resource-metadata/resource-metadata.tfstate.json index 3efef1ac379e8..a41aff216b11c 100644 --- a/provisioner/terraform/testdata/resource-metadata/resource-metadata.tfstate.json +++ b/provisioner/terraform/testdata/resource-metadata/resource-metadata.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -26,7 +26,7 @@ } ], "env": null, - "id": "cbffc18b-d2e5-4826-b202-5b7158917307", + "id": "3bcbc547-b434-4dbd-b5ed-551edfba1b5c", "init_script": "", "login_before_ready": true, "metadata": [ @@ -47,7 +47,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "3ccecdc6-6947-44f8-bede-f3c8ee8f7afe", + "token": "2d25fcc3-a355-4e92-98c6-ab780894ffee", "troubleshooting_url": null }, "sensitive_values": { @@ -71,7 +71,7 @@ "daily_cost": 29, "hide": true, "icon": "/icon/server.svg", - "id": "bee16745-291f-4209-937f-e8198beefbb2", + "id": "d9ce721c-dff3-44fd-92d1-155f37c84a56", "item": [ { "is_null": false, @@ -98,7 +98,7 @@ "value": "squirrel" } ], - "resource_id": "23022633153502273" + "resource_id": "4099397325680267994" }, "sensitive_values": { "item": [ @@ -121,7 +121,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "23022633153502273", + "id": "4099397325680267994", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tfplan.json b/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tfplan.json index 5a9754c6eb8ef..72120dfaabeec 100644 --- a/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tfplan.json +++ b/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "planned_values": { "root_module": { "resources": [ @@ -119,7 +119,7 @@ ], "prior_state": { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -136,7 +136,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "2505d55b-a9f4-4aaa-90fd-b4f36079e2fd", + "id": "e8805d7c-1636-4416-9520-b83234d68ddc", "mutable": false, "name": "Example", "option": null, @@ -163,7 +163,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "ad73ddbc-2c11-45a1-913c-b73cdd3b9b0f", + "id": "df43829a-49ce-4911-97ef-2fca78456c9f", "mutable": false, "name": "Sample", "option": null, @@ -269,7 +269,7 @@ ] } }, - "timestamp": "2024-07-15T17:48:58Z", + "timestamp": "2024-10-28T20:08:17Z", "applyable": true, "complete": true, "errored": false diff --git a/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tfstate.json b/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tfstate.json index b3fed19aaa61c..1d675d685a37c 100644 --- a/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tfstate.json +++ b/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -17,7 +17,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "84da03d3-81af-43bd-bdc0-6fc2f34e3f4b", + "id": "81ada233-3a30-49d3-a56f-aca92f19c411", "mutable": false, "name": "Example", "option": null, @@ -44,7 +44,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "eeb97e5f-1186-422f-b6db-95b3d4257636", + "id": "4dc1049f-0d54-408a-a412-95629ae5cd84", "mutable": false, "name": "Sample", "option": null, @@ -80,7 +80,7 @@ } ], "env": null, - "id": "ba82266f-8b63-4a31-9158-94b5ca51ceeb", + "id": "86cc4d6e-23b3-4632-9bc9-d3a321e8b906", "init_script": "", "login_before_ready": true, "metadata": [], @@ -92,7 +92,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "e8177f3a-5ce1-41ea-b709-cc8c3624c298", + "token": "0c3e7639-bafc-4e62-8e38-cb4e1b44e3f3", "troubleshooting_url": null }, "sensitive_values": { @@ -111,7 +111,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "8146132740199712825", + "id": "2501594036325466407", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tfplan.json b/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tfplan.json index fb308658d78f1..66153605ee4a0 100644 --- a/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tfplan.json +++ b/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "planned_values": { "root_module": { "resources": [ @@ -119,7 +119,7 @@ ], "prior_state": { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -136,7 +136,7 @@ "display_name": null, "ephemeral": true, "icon": null, - "id": "0c018669-159f-4444-a3ca-3f80c9bb3ce3", + "id": "df8ad066-047d-434d-baa3-e19517ee7395", "mutable": true, "name": "number_example", "option": null, @@ -163,7 +163,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "78ced97f-753b-45e1-b176-5f7f37956363", + "id": "7d9658aa-ff69-477a-9063-e9fd49fd9a9b", "mutable": false, "name": "number_example_max", "option": null, @@ -202,7 +202,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "df27d2cd-6feb-4106-bc0d-dacb33da8547", + "id": "bd6fcaac-db7f-4c4d-a664-fe7f47fad28a", "mutable": false, "name": "number_example_max_zero", "option": null, @@ -241,7 +241,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "35584863-347b-4dc0-8618-b2f7f0e42bbf", + "id": "8d42942d-5a10-43c9-a31d-d3fe9a7814e8", "mutable": false, "name": "number_example_min", "option": null, @@ -280,7 +280,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "cafe4351-a64b-481d-9a0d-e2c9cf057b25", + "id": "695301d0-8325-4685-824d-1ca9591689e3", "mutable": false, "name": "number_example_min_max", "option": null, @@ -319,7 +319,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "41659f9c-8934-4763-8285-9ec401f5ef6b", + "id": "cd921934-d1b1-4370-8a73-2d43658ea877", "mutable": false, "name": "number_example_min_zero", "option": null, @@ -551,7 +551,7 @@ ] } }, - "timestamp": "2024-07-15T17:49:00Z", + "timestamp": "2024-10-28T20:08:18Z", "applyable": true, "complete": true, "errored": false diff --git a/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tfstate.json b/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tfstate.json index 3e18e55b2a735..35b981c3a9b54 100644 --- a/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tfstate.json +++ b/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -17,7 +17,7 @@ "display_name": null, "ephemeral": true, "icon": null, - "id": "d82331f3-56ce-43f5-a6f6-d818c916ac7a", + "id": "e09e9110-2f11-4a45-bc9f-dc7a12834ef0", "mutable": true, "name": "number_example", "option": null, @@ -44,7 +44,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "6ee08f4e-4200-4c4c-b606-7e7d4a6a5fdb", + "id": "7ba6324d-d8fd-43b8-91d2-d970a424db8b", "mutable": false, "name": "number_example_max", "option": null, @@ -83,7 +83,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "f879ade0-27ba-45c8-84dd-d2393a7cdad0", + "id": "64e12007-8479-43bf-956b-86fe7ae73066", "mutable": false, "name": "number_example_max_zero", "option": null, @@ -122,7 +122,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "8d057664-79e1-4f0e-a24e-72b2ac5e3306", + "id": "32681b2b-682f-4a5f-9aa6-c05be9d41a89", "mutable": false, "name": "number_example_min", "option": null, @@ -161,7 +161,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "0249146a-ba5e-4d59-bbd2-48d1027ebb42", + "id": "03b67b89-0d35-449d-8997-f5ce4b7c1518", "mutable": false, "name": "number_example_min_max", "option": null, @@ -200,7 +200,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "edeb33bb-b8d4-4770-9c41-e0e94a4886af", + "id": "2201fc53-38c6-4a68-b3b9-4f6ef3390962", "mutable": false, "name": "number_example_min_zero", "option": null, @@ -248,7 +248,7 @@ } ], "env": null, - "id": "7c672b0d-41f4-45ae-9596-9be1455505a9", + "id": "060ffd05-39a9-4fa3-81a3-7d9d8e655bf8", "init_script": "", "login_before_ready": true, "metadata": [], @@ -260,7 +260,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "4938f98a-bc70-4dae-8825-27d41ba34842", + "token": "58ed35b2-6124-4183-a493-40cb0174f4d2", "troubleshooting_url": null }, "sensitive_values": { @@ -279,7 +279,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "8043802126847197223", + "id": "4610812354433374355", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/rich-parameters/rich-parameters.tfplan.json b/provisioner/terraform/testdata/rich-parameters/rich-parameters.tfplan.json index a37148f2b4d24..1ec2927a40ad1 100644 --- a/provisioner/terraform/testdata/rich-parameters/rich-parameters.tfplan.json +++ b/provisioner/terraform/testdata/rich-parameters/rich-parameters.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "planned_values": { "root_module": { "resources": [ @@ -119,7 +119,7 @@ ], "prior_state": { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -136,7 +136,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "b0837593-03d9-4039-87d3-9170a6513751", + "id": "cbec5bff-b81a-4815-99c0-40c0629779fb", "mutable": false, "name": "Example", "option": [ @@ -180,7 +180,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "aff9e428-f431-4ca1-8c2f-3c1adf662ed7", + "id": "dd1c36b7-a961-4eb2-9687-c32b5ee54fbc", "mutable": false, "name": "number_example", "option": null, @@ -207,7 +207,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "15371ea5-9ffc-4672-8c7b-338eed974655", + "id": "f1bcac54-a58c-44b2-94f5-243a0b1492d3", "mutable": false, "name": "number_example_max_zero", "option": null, @@ -246,7 +246,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "2e77000c-d96f-4110-ad55-3a733fef768c", + "id": "79c76ac1-8e71-4872-9107-d7a9529f7dce", "mutable": false, "name": "number_example_min_max", "option": null, @@ -285,7 +285,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "2c9f5877-7df8-42a8-9d34-20d7a74832e0", + "id": "da7a8aff-ffe3-402f-bf7e-b369ae04b041", "mutable": false, "name": "number_example_min_zero", "option": null, @@ -324,7 +324,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "d9eb4625-889c-4eb7-87d4-80644c5ee57a", + "id": "5fe2dad0-e11f-46f0-80ae-c0c3a29cd1fd", "mutable": false, "name": "Sample", "option": null, @@ -355,7 +355,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "7549ee27-b944-46e8-89c7-66ce22285efc", + "id": "920f98a1-3a6f-4602-8c87-ebbbef0310c5", "mutable": true, "name": "First parameter from module", "option": null, @@ -382,7 +382,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "c5fd9f8a-f83f-450a-b93a-4f4267be580a", + "id": "f438d9ad-6c3e-44f3-95cd-1d423a9b09e5", "mutable": true, "name": "Second parameter from module", "option": null, @@ -414,7 +414,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "1b819f45-1451-45d8-bdf6-80c067be383b", + "id": "b2c53701-be53-4591-aacf-1c83f75bcf15", "mutable": true, "name": "First parameter from child module", "option": null, @@ -441,7 +441,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "103f609f-e7d4-4060-b9dc-cc59afbcc2ad", + "id": "038b18d4-d430-4703-886a-b7e10e01f856", "mutable": true, "name": "Second parameter from child module", "option": null, @@ -794,7 +794,7 @@ } } }, - "timestamp": "2024-07-15T17:48:56Z", + "timestamp": "2024-10-28T20:08:15Z", "applyable": true, "complete": true, "errored": false diff --git a/provisioner/terraform/testdata/rich-parameters/rich-parameters.tfstate.json b/provisioner/terraform/testdata/rich-parameters/rich-parameters.tfstate.json index f3011a94e387c..1bfc1835dfcaf 100644 --- a/provisioner/terraform/testdata/rich-parameters/rich-parameters.tfstate.json +++ b/provisioner/terraform/testdata/rich-parameters/rich-parameters.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.9.2", + "terraform_version": "1.9.8", "values": { "root_module": { "resources": [ @@ -17,7 +17,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "6de72459-12d0-493b-a6de-849e08a80231", + "id": "8586d419-7e61-4e67-b8df-d98d8ac7ffd3", "mutable": false, "name": "Example", "option": [ @@ -61,7 +61,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "4c531563-c935-41ad-8cca-f417c16e5278", + "id": "0cc54450-13a6-486c-b542-6e23a9f3596b", "mutable": false, "name": "number_example", "option": null, @@ -88,7 +88,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "0c77e023-ebfd-4868-a25b-2f6b131c52a3", + "id": "0c0b913a-0bde-4b9e-8a70-06d9b6d38a26", "mutable": false, "name": "number_example_max_zero", "option": null, @@ -127,7 +127,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "d5415c63-b007-4409-8715-8750fcd014c5", + "id": "37fd5372-2741-49dd-bf01-6ba29a24c9dd", "mutable": false, "name": "number_example_min_max", "option": null, @@ -166,7 +166,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "27846e1e-1ea4-463d-a0f1-2f06bd2767ff", + "id": "c0fd84ff-117f-442a-95f7-e8368ba7ce1d", "mutable": false, "name": "number_example_min_zero", "option": null, @@ -205,7 +205,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "e0d43ce9-3377-48ab-8917-960a39fc78aa", + "id": "ab067ffc-99de-4705-97fe-16c713d2d115", "mutable": false, "name": "Sample", "option": null, @@ -241,7 +241,7 @@ } ], "env": null, - "id": "a84d968c-98b8-49e4-878f-8afbfcfcd058", + "id": "7daab302-d00e-48d4-878c-47afbe3a13bc", "init_script": "", "login_before_ready": true, "metadata": [], @@ -253,7 +253,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "494f0e2b-0727-4833-b824-f3c5ae5ec701", + "token": "e98c452d-cbe9-4ae1-8382-a986089dccb4", "troubleshooting_url": null }, "sensitive_values": { @@ -272,7 +272,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "6676147453513335498", + "id": "2355126481625628137", "triggers": null }, "sensitive_values": {}, @@ -297,7 +297,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "28bbdb1b-bbfd-448e-a90d-667372384184", + "id": "0978cc7c-f787-406c-a050-9272bbb52085", "mutable": true, "name": "First parameter from module", "option": null, @@ -324,7 +324,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "edaafb64-16d1-4abc-9016-aa30d7ee3ed1", + "id": "cd01d7da-9f56-460d-b163-e88a0a9a5f67", "mutable": true, "name": "Second parameter from module", "option": null, @@ -356,7 +356,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "85b4aa9c-206a-4708-b12b-f80e8905d178", + "id": "528e845a-843b-48b3-a421-a22340726d5a", "mutable": true, "name": "First parameter from child module", "option": null, @@ -383,7 +383,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "913d7ffb-d406-4a2e-9368-106e0af12d34", + "id": "f486efbb-2fc6-4091-9eca-0088ac6cd3cc", "mutable": true, "name": "Second parameter from child module", "option": null, diff --git a/provisioner/terraform/testdata/version.txt b/provisioner/terraform/testdata/version.txt index 8fdcf3869464a..66beabb5795e7 100644 --- a/provisioner/terraform/testdata/version.txt +++ b/provisioner/terraform/testdata/version.txt @@ -1 +1 @@ -1.9.2 +1.9.8 diff --git a/scripts/Dockerfile.base b/scripts/Dockerfile.base index 1ad37edf20360..33c9f551346a6 100644 --- a/scripts/Dockerfile.base +++ b/scripts/Dockerfile.base @@ -26,7 +26,7 @@ RUN apk add --no-cache \ # Terraform was disabled in the edge repo due to a build issue. # https://gitlab.alpinelinux.org/alpine/aports/-/commit/f3e263d94cfac02d594bef83790c280e045eba35 # Using wget for now. Note that busybox unzip doesn't support streaming. -RUN ARCH="$(arch)"; if [ "${ARCH}" == "x86_64" ]; then ARCH="amd64"; elif [ "${ARCH}" == "aarch64" ]; then ARCH="arm64"; fi; wget -O /tmp/terraform.zip "https://releases.hashicorp.com/terraform/1.9.2/terraform_1.9.2_linux_${ARCH}.zip" && \ +RUN ARCH="$(arch)"; if [ "${ARCH}" == "x86_64" ]; then ARCH="amd64"; elif [ "${ARCH}" == "aarch64" ]; then ARCH="arm64"; fi; wget -O /tmp/terraform.zip "https://releases.hashicorp.com/terraform/1.9.8/terraform_1.9.8_linux_${ARCH}.zip" && \ busybox unzip /tmp/terraform.zip -d /usr/local/bin && \ rm -f /tmp/terraform.zip && \ chmod +x /usr/local/bin/terraform && \ From fb4219f57da63287512d562691f38c0605d4b93f Mon Sep 17 00:00:00 2001 From: Jon Ayers Date: Mon, 28 Oct 2024 20:31:01 +0000 Subject: [PATCH 21/42] fix: parse int to correct bit size (#15257) --- coderd/cryptokeys/cache.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/coderd/cryptokeys/cache.go b/coderd/cryptokeys/cache.go index 7777d5f75b942..43d673548ce06 100644 --- a/coderd/cryptokeys/cache.go +++ b/coderd/cryptokeys/cache.go @@ -163,7 +163,7 @@ func (c *cache) DecryptingKey(ctx context.Context, id string) (interface{}, erro return nil, ErrInvalidFeature } - seq, err := strconv.ParseInt(id, 10, 64) + seq, err := strconv.ParseInt(id, 10, 32) if err != nil { return nil, xerrors.Errorf("parse id: %w", err) } @@ -192,7 +192,7 @@ func (c *cache) VerifyingKey(ctx context.Context, id string) (interface{}, error return nil, ErrInvalidFeature } - seq, err := strconv.ParseInt(id, 10, 64) + seq, err := strconv.ParseInt(id, 10, 32) if err != nil { return nil, xerrors.Errorf("parse id: %w", err) } From 742413e14915ecfdbb1154e7b48a6df3c7a0d7cb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 28 Oct 2024 20:47:19 +0000 Subject: [PATCH 22/42] chore: bump google.golang.org/api from 0.202.0 to 0.203.0 (#15246) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 4 ++-- go.sum | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/go.mod b/go.mod index 4bb08217de645..cf3b533b35674 100644 --- a/go.mod +++ b/go.mod @@ -185,7 +185,7 @@ require ( golang.org/x/text v0.19.0 golang.org/x/tools v0.26.0 golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da - google.golang.org/api v0.202.0 + google.golang.org/api v0.203.0 google.golang.org/grpc v1.67.1 google.golang.org/protobuf v1.35.1 gopkg.in/DataDog/dd-trace-go.v1 v1.69.0 @@ -215,7 +215,7 @@ require ( ) require ( - cloud.google.com/go/auth v0.9.8 // indirect + cloud.google.com/go/auth v0.9.9 // indirect cloud.google.com/go/auth/oauth2adapt v0.2.4 // indirect dario.cat/mergo v1.0.0 // indirect github.com/DataDog/go-libddwaf/v3 v3.4.0 // indirect diff --git a/go.sum b/go.sum index e00fe9c715490..771268286eebe 100644 --- a/go.sum +++ b/go.sum @@ -1,8 +1,8 @@ cdr.dev/slog v1.6.2-0.20240126064726-20367d4aede6 h1:KHblWIE/KHOwQ6lEbMZt6YpcGve2FEZ1sDtrW1Am5UI= cdr.dev/slog v1.6.2-0.20240126064726-20367d4aede6/go.mod h1:NaoTA7KwopCrnaSb0JXTC0PTp/O/Y83Lndnq0OEV3ZQ= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go/auth v0.9.8 h1:+CSJ0Gw9iVeSENVCKJoLHhdUykDgXSc4Qn+gu2BRtR8= -cloud.google.com/go/auth v0.9.8/go.mod h1:xxA5AqpDrvS+Gkmo9RqrGGRh6WSNKKOXhY3zNOr38tI= +cloud.google.com/go/auth v0.9.9 h1:BmtbpNQozo8ZwW2t7QJjnrQtdganSdmqeIBxHxNkEZQ= +cloud.google.com/go/auth v0.9.9/go.mod h1:xxA5AqpDrvS+Gkmo9RqrGGRh6WSNKKOXhY3zNOr38tI= cloud.google.com/go/auth/oauth2adapt v0.2.4 h1:0GWE/FUsXhf6C+jAkWgYm7X9tK8cuEIfy19DBn6B6bY= cloud.google.com/go/auth/oauth2adapt v0.2.4/go.mod h1:jC/jOpwFP6JBxhB3P5Rr0a9HLMC/Pe3eaL4NmdvqPtc= cloud.google.com/go/compute/metadata v0.5.2 h1:UxK4uu/Tn+I3p2dYWTfiX4wva7aYlKixAHn3fyqngqo= @@ -1203,8 +1203,8 @@ golang.zx2c4.com/wireguard/wgctrl v0.0.0-20230429144221-925a1e7659e6 h1:CawjfCvY golang.zx2c4.com/wireguard/wgctrl v0.0.0-20230429144221-925a1e7659e6/go.mod h1:3rxYc4HtVcSG9gVaTs2GEBdehh+sYPOwKtyUWEOTb80= golang.zx2c4.com/wireguard/windows v0.5.3 h1:On6j2Rpn3OEMXqBq00QEDC7bWSZrPIHKIus8eIuExIE= golang.zx2c4.com/wireguard/windows v0.5.3/go.mod h1:9TEe8TJmtwyQebdFwAkEWOPr3prrtqm+REGFifP60hI= -google.golang.org/api v0.202.0 h1:y1iuVHMqokQbimW79ZqPZWo4CiyFu6HcCYHwSNyzlfo= -google.golang.org/api v0.202.0/go.mod h1:3Jjeq7M/SFblTNCp7ES2xhq+WvGL0KeXI0joHQBfwTQ= +google.golang.org/api v0.203.0 h1:SrEeuwU3S11Wlscsn+LA1kb/Y5xT8uggJSkIhD08NAU= +google.golang.org/api v0.203.0/go.mod h1:BuOVyCSYEPwJb3npWvDnNmFI92f3GeRnHNkETneT3SI= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM= From 971388762ce6f95eca026eb306c66e60d19bd219 Mon Sep 17 00:00:00 2001 From: Stephen Kirby <58410745+stirby@users.noreply.github.com> Date: Mon, 28 Oct 2024 15:56:51 -0500 Subject: [PATCH 23/42] chore(docs): change mentions of enterprise to premium (#15258) Matches our latest licensing verbiage. --- docs/admin/infrastructure/architecture.md | 4 ++-- docs/admin/infrastructure/scale-testing.md | 4 ++-- docs/admin/monitoring/notifications/index.md | 2 +- docs/admin/networking/index.md | 2 +- docs/admin/networking/port-forwarding.md | 6 +++--- docs/admin/security/audit-logs.md | 2 +- .../templates/extending-templates/process-logging.md | 2 +- docs/admin/templates/managing-templates/index.md | 2 +- docs/admin/templates/managing-templates/schedule.md | 8 ++++---- docs/admin/templates/template-permissions.md | 2 +- docs/tutorials/faqs.md | 4 ++-- docs/tutorials/index.md | 7 +++---- docs/user-guides/workspace-access/port-forwarding.md | 2 +- docs/user-guides/workspace-management.md | 2 +- docs/user-guides/workspace-scheduling.md | 4 ++-- 15 files changed, 26 insertions(+), 27 deletions(-) diff --git a/docs/admin/infrastructure/architecture.md b/docs/admin/infrastructure/architecture.md index 3c4e0b1511031..fb351e4da2d18 100644 --- a/docs/admin/infrastructure/architecture.md +++ b/docs/admin/infrastructure/architecture.md @@ -10,11 +10,11 @@ page describes possible deployments, challenges, and risks associated with them. ![Architecture Diagram](../../images/architecture-diagram.png) -## Enterprise +## Premium ![Single Region Architecture Diagram](../../images/architecture-single-region.png) -## Multi-Region Enterprise +## Multi-Region Premium ![Multi Region Architecture Diagram](../../images/architecture-multi-region.png) diff --git a/docs/admin/infrastructure/scale-testing.md b/docs/admin/infrastructure/scale-testing.md index 75d3f00b35f5d..c371f23fd5559 100644 --- a/docs/admin/infrastructure/scale-testing.md +++ b/docs/admin/infrastructure/scale-testing.md @@ -173,8 +173,8 @@ example, running 10 provisioner containers will allow 10 users to start workspaces at the same time. By default, the Coder server runs 3 built-in provisioner daemons, but the -_Enterprise_ Coder release allows for running external provisioners to separate -the load caused by workspace provisioning on the `coderd` nodes. +_Premium_ Coder release allows for running external provisioners to separate the +load caused by workspace provisioning on the `coderd` nodes. #### Scaling formula diff --git a/docs/admin/monitoring/notifications/index.md b/docs/admin/monitoring/notifications/index.md index 48a1d95e0b412..a98fa0b3e8b48 100644 --- a/docs/admin/monitoring/notifications/index.md +++ b/docs/admin/monitoring/notifications/index.md @@ -76,7 +76,7 @@ can only be delivered to one method, and this method is configured globally with [`CODER_NOTIFICATIONS_METHOD`](../../../reference/cli/server.md#--notifications-method) (default: `smtp`). -Enterprise customers can configure which method to use for each of the supported +Premium customers can configure which method to use for each of the supported [Events](#workspace-events); see the [Preferences](#delivery-preferences-enterprise-premium) section below for more details. diff --git a/docs/admin/networking/index.md b/docs/admin/networking/index.md index d33a8534eacef..2e07a7e6e4ac8 100644 --- a/docs/admin/networking/index.md +++ b/docs/admin/networking/index.md @@ -173,7 +173,7 @@ $ coder server --derp-config-path derpmap.json The dashboard (and web apps opened through the dashboard) are served from the coder server, so they can only be geo-distributed with High Availability mode in -our Enterprise Edition. [Reach out to Sales](https://coder.com/contact) to learn +our Premium Edition. [Reach out to Sales](https://coder.com/contact) to learn more. ## Browser-only connections (enterprise) (premium) diff --git a/docs/admin/networking/port-forwarding.md b/docs/admin/networking/port-forwarding.md index a0db8715a01e7..692f933658538 100644 --- a/docs/admin/networking/port-forwarding.md +++ b/docs/admin/networking/port-forwarding.md @@ -121,7 +121,7 @@ not it is still accessible. ![Annotated port controls in the UI](../../images/networking/annotatedports.png) The sharing level is limited by the maximum level enforced in the template -settings in enterprise deployments, and not restricted in OSS deployments. +settings in premium deployments, and not restricted in OSS deployments. This can also be used to change the sharing level of `coder_app`s by entering their port number in the sharable ports UI. The `share` attribute on `coder_app` @@ -131,8 +131,8 @@ to the app. ### Configure maximum port sharing level (enterprise) (premium) -Enterprise-licensed template admins can control the maximum port sharing level -for workspaces under a given template in the template settings. By default, the +Premium-licensed template admins can control the maximum port sharing level for +workspaces under a given template in the template settings. By default, the maximum sharing level is set to `Owner`, meaning port sharing is disabled for end-users. OSS deployments allow all workspaces to share ports at both the `authenticated` and `public` levels. diff --git a/docs/admin/security/audit-logs.md b/docs/admin/security/audit-logs.md index 602710289261f..87a14a98db23c 100644 --- a/docs/admin/security/audit-logs.md +++ b/docs/admin/security/audit-logs.md @@ -122,5 +122,5 @@ log entry: ## Enabling this feature -This feature is only available with an enterprise license. +This feature is only available with an premium license. [Learn more](../licensing/index.md) diff --git a/docs/admin/templates/extending-templates/process-logging.md b/docs/admin/templates/extending-templates/process-logging.md index b5010f29a672b..989bdd8572ae5 100644 --- a/docs/admin/templates/extending-templates/process-logging.md +++ b/docs/admin/templates/extending-templates/process-logging.md @@ -17,7 +17,7 @@ Please note that these logs are not recorded or captured by the Coder organization in any way, shape, or form. > This is an [Premium or Enterprise](https://coder.com/pricing) feature. To -> learn more about Coder Enterprise, please +> learn more about Coder licensing, please > [contact sales](https://coder.com/contact). ## How this works diff --git a/docs/admin/templates/managing-templates/index.md b/docs/admin/templates/managing-templates/index.md index bee246b82f3d5..0abbac60487a6 100644 --- a/docs/admin/templates/managing-templates/index.md +++ b/docs/admin/templates/managing-templates/index.md @@ -60,7 +60,7 @@ infrastructure, software, or security patches. Learn more about ### Template update policies (enterprise) (premium) -Enterprise template admins may want workspaces to always remain on the latest +Licensed template admins may want workspaces to always remain on the latest version of their parent template. To do so, enable **Template Update Policies** in the template's general settings. All non-admin users of the template will be forced to update their workspaces before starting them once the setting is diff --git a/docs/admin/templates/managing-templates/schedule.md b/docs/admin/templates/managing-templates/schedule.md index b213ce9668313..4fa285dfa74f3 100644 --- a/docs/admin/templates/managing-templates/schedule.md +++ b/docs/admin/templates/managing-templates/schedule.md @@ -30,8 +30,8 @@ manage infrastructure costs. ## Failure cleanup (enterprise) (premium) Failure cleanup defines how long a workspace is permitted to remain in the -failed state prior to being automatically stopped. Failure cleanup is an -enterprise-only feature. +failed state prior to being automatically stopped. Failure cleanup is only +available for licensed customers. ## Dormancy threshold (enterprise) (premium) @@ -41,13 +41,13 @@ by the time elapsed since a user last accessed the workspace. A workspace in the dormant state is not eligible for autostart and must be manually activated by the user before being accessible. Coder stops workspaces during their transition to the dormant state if they are detected to be running. Dormancy Threshold is -an enterprise-only feature. +only available for licensed customers. ## Dormancy auto-deletion (enterprise) (premium) Dormancy Auto-Deletion allows a template admin to dictate how long a workspace is permitted to remain dormant before it is automatically deleted. Dormancy -Auto-Deletion is an enterprise-only feature. +Auto-Deletion is only available for licensed customers. ## Autostop requirement (enterprise) (premium) diff --git a/docs/admin/templates/template-permissions.md b/docs/admin/templates/template-permissions.md index 8bb16adbd4b08..e09acdfb3124c 100644 --- a/docs/admin/templates/template-permissions.md +++ b/docs/admin/templates/template-permissions.md @@ -18,4 +18,4 @@ user can use the template to create a workspace. To prevent this, disable the ![Create Template Permissions](../../images/templates/create-template-permissions.png) -Permissions is an enterprise-only feature. +Permissions is a premium-only feature. diff --git a/docs/tutorials/faqs.md b/docs/tutorials/faqs.md index 29eb86ea39b04..b982d8bc25566 100644 --- a/docs/tutorials/faqs.md +++ b/docs/tutorials/faqs.md @@ -1,7 +1,7 @@ # FAQs -Frequently asked questions on Coder OSS and Enterprise deployments. These FAQs -come from our community and enterprise customers, feel free to +Frequently asked questions on Coder OSS and licensed deployments. These FAQs +come from our community and customers, feel free to [contribute to this page](https://github.com/coder/coder/edit/main/docs/tutorials/faqs.md). For other community resources, see our diff --git a/docs/tutorials/index.md b/docs/tutorials/index.md index 6a38fe2b1cb96..0e75ce50ab29c 100644 --- a/docs/tutorials/index.md +++ b/docs/tutorials/index.md @@ -1,9 +1,8 @@ # Guides and Tutorials -Here you can find a list of employee-written guides on Coder for OSS and -Enterprise. These tutorials are hosted on our -[GitHub](https://github.com/coder/coder/) where you can leave feedback or -request new topics to be covered. +Here you can find a list of employee-written guides on Coder. These tutorials +are hosted on our [GitHub](https://github.com/coder/coder/) where you can leave +feedback or request new topics to be covered. This page is rendered on . Refer to the other documents in the `docs/tutorials/` directory for specific employee-written guides. diff --git a/docs/user-guides/workspace-access/port-forwarding.md b/docs/user-guides/workspace-access/port-forwarding.md index 9980b21455fca..969446be36836 100644 --- a/docs/user-guides/workspace-access/port-forwarding.md +++ b/docs/user-guides/workspace-access/port-forwarding.md @@ -123,7 +123,7 @@ it is still accessible. ![Annotated port controls in the UI](../../images/networking/annotatedports.png) > The sharing level is limited by the maximum level enforced in the template -> settings in enterprise deployments, and not restricted in OSS deployments. +> settings in licensed deployments, and not restricted in OSS deployments. This can also be used to change the sharing level of port-based `coder_app`s by entering their port number in the sharable ports UI. The `share` attribute on diff --git a/docs/user-guides/workspace-management.md b/docs/user-guides/workspace-management.md index ab55e79c2d2b4..4d4f30f2f9026 100644 --- a/docs/user-guides/workspace-management.md +++ b/docs/user-guides/workspace-management.md @@ -90,7 +90,7 @@ manually updated the workspace. ## Bulk operations (enterprise) (premium) -Enterprise admins may apply bulk operations (update, delete, start, stop) in the +Licensed admins may apply bulk operations (update, delete, start, stop) in the **Workspaces** tab. Select the workspaces you'd like to modify with the checkboxes on the left, then use the top-right **Actions** dropdown to apply the operation. diff --git a/docs/user-guides/workspace-scheduling.md b/docs/user-guides/workspace-scheduling.md index 240134c183888..322b7739def97 100644 --- a/docs/user-guides/workspace-scheduling.md +++ b/docs/user-guides/workspace-scheduling.md @@ -51,7 +51,7 @@ for your workspace. ## Autostop requirement (enterprise) (premium) -Enterprise template admins may enforce a required stop for workspaces to apply +Licensed template admins may enforce a required stop for workspaces to apply updates or undergo maintenance. These stops ignore any active connections or inactivity bumps. Rather than being specified with a CRON, admins set a frequency for updates, either in **days** or **weeks**. Workspaces will apply @@ -106,5 +106,5 @@ durations. Template admins configure an inactivity period after which your workspaces will gain a `dormant` badge. A separate period determines how long workspaces will remain in the dormant state before automatic deletion. -Enterprise admins may also configure failure cleanup, which will automatically +Licensed admins may also configure failure cleanup, which will automatically delete workspaces that remain in a `failed` state for too long. From fd60e1c2bab28f116c841cdaaad5827bdb6567b7 Mon Sep 17 00:00:00 2001 From: Jon Ayers Date: Tue, 29 Oct 2024 01:30:43 +0000 Subject: [PATCH 24/42] fix: fix security workflow not installing protoc properly (#15263) --- .github/workflows/security.yaml | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/.github/workflows/security.yaml b/.github/workflows/security.yaml index f71119afb22d7..4ae50b2aa4792 100644 --- a/.github/workflows/security.yaml +++ b/.github/workflows/security.yaml @@ -3,7 +3,6 @@ name: "security" permissions: actions: read contents: read - security-events: write on: workflow_dispatch: @@ -23,6 +22,8 @@ concurrency: jobs: codeql: + permissions: + security-events: write runs-on: ${{ github.repository_owner == 'coder' && 'depot-ubuntu-22.04-8' || 'ubuntu-latest' }} steps: - name: Harden Runner @@ -61,6 +62,8 @@ jobs: "${{ secrets.SLACK_SECURITY_FAILURE_WEBHOOK_URL }}" trivy: + permissions: + security-events: write runs-on: ${{ github.repository_owner == 'coder' && 'depot-ubuntu-22.04-8' || 'ubuntu-latest' }} steps: - name: Harden Runner @@ -95,13 +98,20 @@ jobs: # protoc must be in lockstep with our dogfood Dockerfile or the # version in the comments will differ. This is also defined in # ci.yaml. - set -x + set -euxo pipefail cd dogfood/contents + mkdir -p /usr/local/bin + mkdir -p /usr/local/include + DOCKER_BUILDKIT=1 docker build . --target proto -t protoc protoc_path=/usr/local/bin/protoc docker run --rm --entrypoint cat protoc /tmp/bin/protoc > $protoc_path chmod +x $protoc_path protoc --version + # Copy the generated files to the include directory. + docker run --rm -v /usr/local/include:/target protoc cp -r /tmp/include/google /target/ + ls -la /usr/local/include/google/protobuf/ + stat /usr/local/include/google/protobuf/timestamp.proto - name: Build Coder linux amd64 Docker image id: build From 1d925ab0438c9fb367b68f5c1d9c5aa6403dd859 Mon Sep 17 00:00:00 2001 From: Michael Smith Date: Tue, 29 Oct 2024 00:06:33 -0500 Subject: [PATCH 25/42] fix: ensure user admins can always see users table (#15226) Closes #15212 ## Changes made - Updated logic so that proxy config is only requested when appropriate, instead of for all users on all deployment pages - Split up the main context provider for the `/deployment` and `/organizations` routes, and updated layout logic for `ManagementSettingsLayout` layout component. This ensures the sidebar is always visible, even if request errors happen - Added additional routing safeguards to make sure that even if a user can view one page in the deployment section, they won't be able to navigate directly to any arbitrary deployment page - Updated logic for sidebar navigation to ensure that nav items only appear when the user truly has permission - Centralized a lot of the orgs logic into the `useAuthenticated` hook - Added additional check cases to the `permissions.tsx` file, to give more granularity, and added missing type-checking - Extended the API for the `RequirePermissions` component to let it redirect users anywhere - Updated some of our testing setup files to ensure that types were defined correctly --------- Co-authored-by: McKayla Washburn --- .vscode/settings.json | 1 + site/e2e/global.setup.ts | 1 + site/jest.setup.ts | 8 +-- site/src/contexts/auth/permissions.tsx | 28 ++++++-- .../management/DeploymentSettingsProvider.tsx | 64 +++++++++++++++++++ .../management/ManagementSettingsLayout.tsx | 27 ++------ .../management/SidebarView.stories.tsx | 3 +- site/src/modules/management/SidebarView.tsx | 39 ++++++----- .../ExternalAuthSettingsPage.tsx | 11 +--- .../GeneralSettingsPage.tsx | 25 +++----- .../NetworkSettingsPage.tsx | 11 +--- .../NotificationEvents.stories.tsx | 6 +- .../NotificationsPage/NotificationEvents.tsx | 8 +-- .../NotificationsPage/NotificationsPage.tsx | 23 +++---- .../ObservabilitySettingsPage.tsx | 20 ++---- .../SecuritySettingsPage.tsx | 17 ++--- .../UserAuthSettingsPage.tsx | 11 +--- .../CustomRolesPage/CreateEditRolePage.tsx | 1 + .../OrganizationProvisionersPage.tsx | 12 +--- .../OrganizationSettingsPage.stories.tsx | 7 +- .../OrganizationSettingsPage.tsx | 21 ++---- .../OrganizationSettingsPageView.stories.tsx | 1 - site/src/router.tsx | 52 +++++++++------ site/src/testHelpers/entities.ts | 25 ++++++++ site/src/testHelpers/renderHelpers.tsx | 2 +- site/src/testHelpers/storybook.tsx | 8 ++- 26 files changed, 243 insertions(+), 189 deletions(-) create mode 100644 site/src/modules/management/DeploymentSettingsProvider.tsx diff --git a/.vscode/settings.json b/.vscode/settings.json index 2476e330cd306..6695a12faa8dc 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -175,6 +175,7 @@ "unauthenticate", "unconvert", "untar", + "userauth", "userspace", "VMID", "walkthrough", diff --git a/site/e2e/global.setup.ts b/site/e2e/global.setup.ts index 6eafd2886de37..f39a2d475804e 100644 --- a/site/e2e/global.setup.ts +++ b/site/e2e/global.setup.ts @@ -35,6 +35,7 @@ test("setup deployment", async ({ page }) => { expect(constants.license.split(".").length).toBe(3); // otherwise it's invalid await page.goto("/deployment/licenses", { waitUntil: "domcontentloaded" }); + await expect(page).toHaveTitle("License Settings - Coder"); await page.getByText("Add a license").click(); await page.getByRole("textbox").fill(constants.license); diff --git a/site/jest.setup.ts b/site/jest.setup.ts index 7a06ebba2592f..7d4b6f0772bc4 100644 --- a/site/jest.setup.ts +++ b/site/jest.setup.ts @@ -1,7 +1,7 @@ import "@testing-library/jest-dom"; import "jest-location-mock"; import { cleanup } from "@testing-library/react"; -import crypto from "crypto"; +import crypto from "node:crypto"; import { useMemo } from "react"; import type { Region } from "api/typesGenerated"; import type { ProxyLatencyReport } from "contexts/useProxyLatency"; @@ -48,9 +48,7 @@ global.ResizeObserver = require("resize-observer-polyfill"); // Polyfill the getRandomValues that is used on utils/random.ts Object.defineProperty(global.self, "crypto", { value: { - getRandomValues: function (buffer: Buffer) { - return crypto.randomFillSync(buffer); - }, + getRandomValues: crypto.randomFillSync, }, }); @@ -72,5 +70,5 @@ afterEach(() => { // Clean up after the tests are finished. afterAll(() => server.close()); -// This is needed because we are compiling under `--isolatedModules` +// biome-ignore lint/complexity/noUselessEmptyExport: This is needed because we are compiling under `--isolatedModules` export {}; diff --git a/site/src/contexts/auth/permissions.tsx b/site/src/contexts/auth/permissions.tsx index 0c89d81686d2f..d2de7864874f0 100644 --- a/site/src/contexts/auth/permissions.tsx +++ b/site/src/contexts/auth/permissions.tsx @@ -1,3 +1,5 @@ +import type { AuthorizationCheck } from "api/typesGenerated"; + export const checks = { viewAllUsers: "viewAllUsers", updateUsers: "updateUsers", @@ -11,13 +13,20 @@ export const checks = { viewUpdateCheck: "viewUpdateCheck", viewExternalAuthConfig: "viewExternalAuthConfig", viewDeploymentStats: "viewDeploymentStats", + readWorkspaceProxies: "readWorkspaceProxies", editWorkspaceProxies: "editWorkspaceProxies", createOrganization: "createOrganization", editAnyOrganization: "editAnyOrganization", viewAnyGroup: "viewAnyGroup", createGroup: "createGroup", viewAllLicenses: "viewAllLicenses", -} as const; + viewNotificationTemplate: "viewNotificationTemplate", +} as const satisfies Record; + +// Type expression seems a little redundant (`keyof typeof checks` has the same +// result), just because each key-value pair is currently symmetrical; this may +// change down the line +type PermissionValue = (typeof checks)[keyof typeof checks]; export const permissionsToCheck = { [checks.viewAllUsers]: { @@ -94,6 +103,12 @@ export const permissionsToCheck = { }, action: "read", }, + [checks.readWorkspaceProxies]: { + object: { + resource_type: "workspace_proxy", + }, + action: "read", + }, [checks.editWorkspaceProxies]: { object: { resource_type: "workspace_proxy", @@ -116,7 +131,6 @@ export const permissionsToCheck = { [checks.viewAnyGroup]: { object: { resource_type: "group", - org_id: "any", }, action: "read", }, @@ -132,6 +146,12 @@ export const permissionsToCheck = { }, action: "read", }, -} as const; + [checks.viewNotificationTemplate]: { + object: { + resource_type: "notification_template", + }, + action: "read", + }, +} as const satisfies Record; -export type Permissions = Record; +export type Permissions = Record; diff --git a/site/src/modules/management/DeploymentSettingsProvider.tsx b/site/src/modules/management/DeploymentSettingsProvider.tsx new file mode 100644 index 0000000000000..c9f6cd5f4a8ce --- /dev/null +++ b/site/src/modules/management/DeploymentSettingsProvider.tsx @@ -0,0 +1,64 @@ +import type { DeploymentConfig } from "api/api"; +import { deploymentConfig } from "api/queries/deployment"; +import { ErrorAlert } from "components/Alert/ErrorAlert"; +import { Loader } from "components/Loader/Loader"; +import { useAuthenticated } from "contexts/auth/RequireAuth"; +import { RequirePermission } from "contexts/auth/RequirePermission"; +import { type FC, createContext, useContext } from "react"; +import { useQuery } from "react-query"; +import { Outlet } from "react-router-dom"; + +export const DeploymentSettingsContext = createContext< + DeploymentSettingsValue | undefined +>(undefined); + +type DeploymentSettingsValue = Readonly<{ + deploymentConfig: DeploymentConfig; +}>; + +export const useDeploymentSettings = (): DeploymentSettingsValue => { + const context = useContext(DeploymentSettingsContext); + if (!context) { + throw new Error( + `${useDeploymentSettings.name} should be used inside of ${DeploymentSettingsProvider.name}`, + ); + } + + return context; +}; + +const DeploymentSettingsProvider: FC = () => { + const { permissions } = useAuthenticated(); + const deploymentConfigQuery = useQuery(deploymentConfig()); + + // The deployment settings page also contains users, audit logs, groups and + // organizations, so this page must be visible if you can see any of these. + const canViewDeploymentSettingsPage = + permissions.viewDeploymentValues || + permissions.viewAllUsers || + permissions.editAnyOrganization || + permissions.viewAnyAuditLog; + + // Not a huge problem to unload the content in the event of an error, + // because the sidebar rendering isn't tied to this. Even if the user hits + // a 403 error, they'll still have navigation options + if (deploymentConfigQuery.error) { + return ; + } + + if (!deploymentConfigQuery.data) { + return ; + } + + return ( + + + + + + ); +}; + +export default DeploymentSettingsProvider; diff --git a/site/src/modules/management/ManagementSettingsLayout.tsx b/site/src/modules/management/ManagementSettingsLayout.tsx index b9fcbc0936e4b..0cb313f0e53b9 100644 --- a/site/src/modules/management/ManagementSettingsLayout.tsx +++ b/site/src/modules/management/ManagementSettingsLayout.tsx @@ -1,7 +1,4 @@ -import type { DeploymentConfig } from "api/api"; -import { deploymentConfig } from "api/queries/deployment"; import type { AuthorizationResponse, Organization } from "api/typesGenerated"; -import { ErrorAlert } from "components/Alert/ErrorAlert"; import { Loader } from "components/Loader/Loader"; import { Margins } from "components/Margins/Margins"; import { Stack } from "components/Stack/Stack"; @@ -9,7 +6,6 @@ import { useAuthenticated } from "contexts/auth/RequireAuth"; import { RequirePermission } from "contexts/auth/RequirePermission"; import { useDashboard } from "modules/dashboard/useDashboard"; import { type FC, Suspense, createContext, useContext } from "react"; -import { useQuery } from "react-query"; import { Outlet, useParams } from "react-router-dom"; import { Sidebar } from "./Sidebar"; @@ -18,7 +14,6 @@ export const ManagementSettingsContext = createContext< >(undefined); type ManagementSettingsValue = Readonly<{ - deploymentValues: DeploymentConfig; organizations: readonly Organization[]; organization?: Organization; }>; @@ -48,15 +43,8 @@ export const canEditOrganization = ( ); }; -/** - * A multi-org capable settings page layout. - * - * If multi-org is not enabled or licensed, this is the wrong layout to use. - * See DeploySettingsLayoutInner instead. - */ -export const ManagementSettingsLayout: FC = () => { +const ManagementSettingsLayout: FC = () => { const { permissions } = useAuthenticated(); - const deploymentConfigQuery = useQuery(deploymentConfig()); const { organizations } = useDashboard(); const { organization: orgName } = useParams() as { organization?: string; @@ -70,14 +58,6 @@ export const ManagementSettingsLayout: FC = () => { permissions.editAnyOrganization || permissions.viewAnyAuditLog; - if (deploymentConfigQuery.error) { - return ; - } - - if (!deploymentConfigQuery.data) { - return ; - } - const organization = organizations && orgName ? organizations.find((org) => org.name === orgName) @@ -87,7 +67,6 @@ export const ManagementSettingsLayout: FC = () => { { -
+
}> @@ -106,3 +85,5 @@ export const ManagementSettingsLayout: FC = () => { ); }; + +export default ManagementSettingsLayout; diff --git a/site/src/modules/management/SidebarView.stories.tsx b/site/src/modules/management/SidebarView.stories.tsx index 2ddcf7750bc8d..6ffe4480261c9 100644 --- a/site/src/modules/management/SidebarView.stories.tsx +++ b/site/src/modules/management/SidebarView.stories.tsx @@ -1,5 +1,6 @@ import type { Meta, StoryObj } from "@storybook/react"; import { + MockNoPermissions, MockOrganization, MockOrganization2, MockPermissions, @@ -96,7 +97,7 @@ export const NoDeploymentValues: Story = { export const NoPermissions: Story = { args: { - permissions: {}, + permissions: MockNoPermissions, }, }; diff --git a/site/src/modules/management/SidebarView.tsx b/site/src/modules/management/SidebarView.tsx index b4099a4dd7815..e6c99769e529f 100644 --- a/site/src/modules/management/SidebarView.tsx +++ b/site/src/modules/management/SidebarView.tsx @@ -2,19 +2,15 @@ import { cx } from "@emotion/css"; import type { Interpolation, Theme } from "@emotion/react"; import AddIcon from "@mui/icons-material/Add"; import SettingsIcon from "@mui/icons-material/Settings"; -import type { - AuthorizationResponse, - Experiments, - Organization, -} from "api/typesGenerated"; +import type { AuthorizationResponse, Organization } from "api/typesGenerated"; import { FeatureStageBadge } from "components/FeatureStageBadge/FeatureStageBadge"; import { Loader } from "components/Loader/Loader"; import { Sidebar as BaseSidebar } from "components/Sidebar/Sidebar"; import { Stack } from "components/Stack/Stack"; import { UserAvatar } from "components/UserAvatar/UserAvatar"; +import type { Permissions } from "contexts/auth/permissions"; import { type ClassName, useClassName } from "hooks/useClassName"; import { useDashboard } from "modules/dashboard/useDashboard"; -import { linkToUsers } from "modules/navigation"; import type { FC, ReactNode } from "react"; import { Link, NavLink } from "react-router-dom"; @@ -30,7 +26,7 @@ interface SidebarProps { /** Organizations and their permissions or undefined if still fetching. */ organizations: OrganizationWithPermissions[] | undefined; /** Site-wide permissions. */ - permissions: AuthorizationResponse; + permissions: Permissions; } /** @@ -72,7 +68,7 @@ interface DeploymentSettingsNavigationProps { /** Whether a deployment setting page is being viewed. */ active: boolean; /** Site-wide permissions. */ - permissions: AuthorizationResponse; + permissions: Permissions; } /** @@ -130,10 +126,11 @@ const DeploymentSettingsNavigation: FC = ({ {permissions.viewDeploymentValues && ( Network )} - {/* All users can view workspace regions. */} - - Workspace Proxies - + {permissions.readWorkspaceProxies && ( + + Workspace Proxies + + )} {permissions.viewDeploymentValues && ( Security )} @@ -145,12 +142,14 @@ const DeploymentSettingsNavigation: FC = ({ {permissions.viewAllUsers && ( Users )} - - - Notifications - - - + {permissions.viewNotificationTemplate && ( + + + Notifications + + + + )} )}
@@ -167,7 +166,7 @@ interface OrganizationsSettingsNavigationProps { /** Organizations and their permissions or undefined if still fetching. */ organizations: OrganizationWithPermissions[] | undefined; /** Site-wide permissions. */ - permissions: AuthorizationResponse; + permissions: Permissions; } /** @@ -241,8 +240,6 @@ interface OrganizationSettingsNavigationProps { const OrganizationSettingsNavigation: FC< OrganizationSettingsNavigationProps > = ({ active, organization }) => { - const { experiments } = useDashboard(); - return ( <> { - const { deploymentValues } = useManagementSettings(); + const { deploymentConfig } = useDeploymentSettings(); return ( <> {pageTitle("External Authentication Settings")} - - {deploymentValues ? ( - - ) : ( - - )} + ); }; diff --git a/site/src/pages/DeploymentSettingsPage/GeneralSettingsPage/GeneralSettingsPage.tsx b/site/src/pages/DeploymentSettingsPage/GeneralSettingsPage/GeneralSettingsPage.tsx index 5d3879e195996..2b094cbf89b26 100644 --- a/site/src/pages/DeploymentSettingsPage/GeneralSettingsPage/GeneralSettingsPage.tsx +++ b/site/src/pages/DeploymentSettingsPage/GeneralSettingsPage/GeneralSettingsPage.tsx @@ -1,9 +1,8 @@ import { deploymentDAUs } from "api/queries/deployment"; import { entitlements } from "api/queries/entitlements"; import { availableExperiments, experiments } from "api/queries/experiments"; -import { Loader } from "components/Loader/Loader"; import { useEmbeddedMetadata } from "hooks/useEmbeddedMetadata"; -import { useManagementSettings } from "modules/management/ManagementSettingsLayout"; +import { useDeploymentSettings } from "modules/management/DeploymentSettingsProvider"; import type { FC } from "react"; import { Helmet } from "react-helmet-async"; import { useQuery } from "react-query"; @@ -11,7 +10,7 @@ import { pageTitle } from "utils/page"; import { GeneralSettingsPageView } from "./GeneralSettingsPageView"; const GeneralSettingsPage: FC = () => { - const { deploymentValues } = useManagementSettings(); + const { deploymentConfig } = useDeploymentSettings(); const deploymentDAUsQuery = useQuery(deploymentDAUs()); const safeExperimentsQuery = useQuery(availableExperiments()); @@ -30,18 +29,14 @@ const GeneralSettingsPage: FC = () => { {pageTitle("General Settings")} - {deploymentValues ? ( - - ) : ( - - )} + ); }; diff --git a/site/src/pages/DeploymentSettingsPage/NetworkSettingsPage/NetworkSettingsPage.tsx b/site/src/pages/DeploymentSettingsPage/NetworkSettingsPage/NetworkSettingsPage.tsx index 6ebd005f71031..ec77bb95e5241 100644 --- a/site/src/pages/DeploymentSettingsPage/NetworkSettingsPage/NetworkSettingsPage.tsx +++ b/site/src/pages/DeploymentSettingsPage/NetworkSettingsPage/NetworkSettingsPage.tsx @@ -1,24 +1,19 @@ import { Loader } from "components/Loader/Loader"; -import { useManagementSettings } from "modules/management/ManagementSettingsLayout"; +import { useDeploymentSettings } from "modules/management/DeploymentSettingsProvider"; import type { FC } from "react"; import { Helmet } from "react-helmet-async"; import { pageTitle } from "utils/page"; import { NetworkSettingsPageView } from "./NetworkSettingsPageView"; const NetworkSettingsPage: FC = () => { - const { deploymentValues } = useManagementSettings(); + const { deploymentConfig } = useDeploymentSettings(); return ( <> {pageTitle("Network Settings")} - - {deploymentValues ? ( - - ) : ( - - )} + ); }; diff --git a/site/src/pages/DeploymentSettingsPage/NotificationsPage/NotificationEvents.stories.tsx b/site/src/pages/DeploymentSettingsPage/NotificationsPage/NotificationEvents.stories.tsx index c2e8479a26f8c..61a1eddcd1a78 100644 --- a/site/src/pages/DeploymentSettingsPage/NotificationsPage/NotificationEvents.stories.tsx +++ b/site/src/pages/DeploymentSettingsPage/NotificationsPage/NotificationEvents.stories.tsx @@ -14,7 +14,7 @@ const meta: Meta = { defaultMethod: "smtp", availableMethods: ["smtp", "webhook"], templatesByGroup: selectTemplatesByGroup(MockNotificationTemplates), - deploymentValues: baseMeta.parameters.deploymentValues, + deploymentConfig: baseMeta.parameters.deploymentValues, }, ...baseMeta, }; @@ -25,7 +25,7 @@ type Story = StoryObj; export const SMTPNotConfigured: Story = { args: { - deploymentValues: { + deploymentConfig: { notifications: { webhook: { endpoint: "https://example.com", @@ -40,7 +40,7 @@ export const SMTPNotConfigured: Story = { export const WebhookNotConfigured: Story = { args: { - deploymentValues: { + deploymentConfig: { notifications: { webhook: { endpoint: "", diff --git a/site/src/pages/DeploymentSettingsPage/NotificationsPage/NotificationEvents.tsx b/site/src/pages/DeploymentSettingsPage/NotificationsPage/NotificationEvents.tsx index 191e2eda6958e..38c36fc52c044 100644 --- a/site/src/pages/DeploymentSettingsPage/NotificationsPage/NotificationEvents.tsx +++ b/site/src/pages/DeploymentSettingsPage/NotificationsPage/NotificationEvents.tsx @@ -31,20 +31,20 @@ type NotificationEventsProps = { defaultMethod: NotificationMethod; availableMethods: NotificationMethod[]; templatesByGroup: ReturnType; - deploymentValues: DeploymentValues; + deploymentConfig: DeploymentValues; }; export const NotificationEvents: FC = ({ defaultMethod, availableMethods, templatesByGroup, - deploymentValues, + deploymentConfig, }) => { // Webhook const hasWebhookNotifications = Object.values(templatesByGroup) .flat() .some((t) => t.method === "webhook"); - const webhookValues = deploymentValues.notifications?.webhook ?? {}; + const webhookValues = deploymentConfig.notifications?.webhook ?? {}; const isWebhookConfigured = requiredFieldsArePresent(webhookValues, [ "endpoint", ]); @@ -53,7 +53,7 @@ export const NotificationEvents: FC = ({ const hasSMTPNotifications = Object.values(templatesByGroup) .flat() .some((t) => t.method === "smtp"); - const smtpValues = deploymentValues.notifications?.email ?? {}; + const smtpValues = deploymentConfig.notifications?.email ?? {}; const isSMTPConfigured = requiredFieldsArePresent(smtpValues, [ "smarthost", "from", diff --git a/site/src/pages/DeploymentSettingsPage/NotificationsPage/NotificationsPage.tsx b/site/src/pages/DeploymentSettingsPage/NotificationsPage/NotificationsPage.tsx index d43c8c3a841a6..23f8e6b42651e 100644 --- a/site/src/pages/DeploymentSettingsPage/NotificationsPage/NotificationsPage.tsx +++ b/site/src/pages/DeploymentSettingsPage/NotificationsPage/NotificationsPage.tsx @@ -6,21 +6,20 @@ import { } from "api/queries/notifications"; import { Loader } from "components/Loader/Loader"; import { TabLink, Tabs, TabsList } from "components/Tabs/Tabs"; -import { useManagementSettings } from "modules/management/ManagementSettingsLayout"; +import { useSearchParamsKey } from "hooks/useSearchParamsKey"; +import { useDeploymentSettings } from "modules/management/DeploymentSettingsProvider"; import { castNotificationMethod } from "modules/notifications/utils"; import { Section } from "pages/UserSettingsPage/Section"; import type { FC } from "react"; import { Helmet } from "react-helmet-async"; import { useQueries } from "react-query"; -import { useSearchParams } from "react-router-dom"; import { deploymentGroupHasParent } from "utils/deployOptions"; import { pageTitle } from "utils/page"; import OptionsTable from "../OptionsTable"; import { NotificationEvents } from "./NotificationEvents"; export const NotificationsPage: FC = () => { - const [searchParams] = useSearchParams(); - const { deploymentValues } = useManagementSettings(); + const { deploymentConfig } = useDeploymentSettings(); const [templatesByGroup, dispatchMethods] = useQueries({ queries: [ { @@ -30,10 +29,12 @@ export const NotificationsPage: FC = () => { notificationDispatchMethods(), ], }); - const ready = - templatesByGroup.data && dispatchMethods.data && deploymentValues; - const tab = searchParams.get("tab") || "events"; + const tabState = useSearchParamsKey({ + key: "tab", + defaultValue: "events", + }); + const ready = !!(templatesByGroup.data && dispatchMethods.data); return ( <> @@ -45,7 +46,7 @@ export const NotificationsPage: FC = () => { layout="fluid" featureStage={"beta"} > - + Events @@ -58,10 +59,10 @@ export const NotificationsPage: FC = () => {
{ready ? ( - tab === "events" ? ( + tabState.value === "events" ? ( { /> ) : ( + options={deploymentConfig.options.filter((o) => deploymentGroupHasParent(o.group, "Notifications"), )} /> diff --git a/site/src/pages/DeploymentSettingsPage/ObservabilitySettingsPage/ObservabilitySettingsPage.tsx b/site/src/pages/DeploymentSettingsPage/ObservabilitySettingsPage/ObservabilitySettingsPage.tsx index 1ea1a2d19ef82..12b574c177384 100644 --- a/site/src/pages/DeploymentSettingsPage/ObservabilitySettingsPage/ObservabilitySettingsPage.tsx +++ b/site/src/pages/DeploymentSettingsPage/ObservabilitySettingsPage/ObservabilitySettingsPage.tsx @@ -1,14 +1,13 @@ -import { Loader } from "components/Loader/Loader"; import { useDashboard } from "modules/dashboard/useDashboard"; import { useFeatureVisibility } from "modules/dashboard/useFeatureVisibility"; -import { useManagementSettings } from "modules/management/ManagementSettingsLayout"; +import { useDeploymentSettings } from "modules/management/DeploymentSettingsProvider"; import type { FC } from "react"; import { Helmet } from "react-helmet-async"; import { pageTitle } from "utils/page"; import { ObservabilitySettingsPageView } from "./ObservabilitySettingsPageView"; const ObservabilitySettingsPage: FC = () => { - const { deploymentValues } = useManagementSettings(); + const { deploymentConfig } = useDeploymentSettings(); const { entitlements } = useDashboard(); const { multiple_organizations: hasPremiumLicense } = useFeatureVisibility(); @@ -17,16 +16,11 @@ const ObservabilitySettingsPage: FC = () => { {pageTitle("Observability Settings")} - - {deploymentValues ? ( - - ) : ( - - )} + ); }; diff --git a/site/src/pages/DeploymentSettingsPage/SecuritySettingsPage/SecuritySettingsPage.tsx b/site/src/pages/DeploymentSettingsPage/SecuritySettingsPage/SecuritySettingsPage.tsx index 2a296fc9d22d2..bda0988f01966 100644 --- a/site/src/pages/DeploymentSettingsPage/SecuritySettingsPage/SecuritySettingsPage.tsx +++ b/site/src/pages/DeploymentSettingsPage/SecuritySettingsPage/SecuritySettingsPage.tsx @@ -1,13 +1,13 @@ import { Loader } from "components/Loader/Loader"; import { useDashboard } from "modules/dashboard/useDashboard"; -import { useManagementSettings } from "modules/management/ManagementSettingsLayout"; +import { useDeploymentSettings } from "modules/management/DeploymentSettingsProvider"; import type { FC } from "react"; import { Helmet } from "react-helmet-async"; import { pageTitle } from "utils/page"; import { SecuritySettingsPageView } from "./SecuritySettingsPageView"; const SecuritySettingsPage: FC = () => { - const { deploymentValues } = useManagementSettings(); + const { deploymentConfig } = useDeploymentSettings(); const { entitlements } = useDashboard(); return ( @@ -15,15 +15,10 @@ const SecuritySettingsPage: FC = () => { {pageTitle("Security Settings")} - - {deploymentValues ? ( - - ) : ( - - )} + ); }; diff --git a/site/src/pages/DeploymentSettingsPage/UserAuthSettingsPage/UserAuthSettingsPage.tsx b/site/src/pages/DeploymentSettingsPage/UserAuthSettingsPage/UserAuthSettingsPage.tsx index b6382f5a54f99..1511e29aca2d0 100644 --- a/site/src/pages/DeploymentSettingsPage/UserAuthSettingsPage/UserAuthSettingsPage.tsx +++ b/site/src/pages/DeploymentSettingsPage/UserAuthSettingsPage/UserAuthSettingsPage.tsx @@ -1,24 +1,19 @@ import { Loader } from "components/Loader/Loader"; -import { useManagementSettings } from "modules/management/ManagementSettingsLayout"; +import { useDeploymentSettings } from "modules/management/DeploymentSettingsProvider"; import type { FC } from "react"; import { Helmet } from "react-helmet-async"; import { pageTitle } from "utils/page"; import { UserAuthSettingsPageView } from "./UserAuthSettingsPageView"; const UserAuthSettingsPage: FC = () => { - const { deploymentValues } = useManagementSettings(); + const { deploymentConfig } = useDeploymentSettings(); return ( <> {pageTitle("User Authentication Settings")} - - {deploymentValues ? ( - - ) : ( - - )} + ); }; diff --git a/site/src/pages/ManagementSettingsPage/CustomRolesPage/CreateEditRolePage.tsx b/site/src/pages/ManagementSettingsPage/CustomRolesPage/CreateEditRolePage.tsx index 80995a160b67d..e770a400af2a7 100644 --- a/site/src/pages/ManagementSettingsPage/CustomRolesPage/CreateEditRolePage.tsx +++ b/site/src/pages/ManagementSettingsPage/CustomRolesPage/CreateEditRolePage.tsx @@ -19,6 +19,7 @@ import CreateEditRolePageView from "./CreateEditRolePageView"; export const CreateEditRolePage: FC = () => { const queryClient = useQueryClient(); const navigate = useNavigate(); + const { organization: organizationName, roleName } = useParams() as { organization: string; roleName: string; diff --git a/site/src/pages/ManagementSettingsPage/OrganizationProvisionersPage.tsx b/site/src/pages/ManagementSettingsPage/OrganizationProvisionersPage.tsx index bd91c348e03ee..19387a28730eb 100644 --- a/site/src/pages/ManagementSettingsPage/OrganizationProvisionersPage.tsx +++ b/site/src/pages/ManagementSettingsPage/OrganizationProvisionersPage.tsx @@ -14,15 +14,10 @@ const OrganizationProvisionersPage: FC = () => { const { organization: organizationName } = useParams() as { organization: string; }; - const { organizations } = useManagementSettings(); + const { organization } = useManagementSettings(); const { entitlements } = useDashboard(); - const { metadata } = useEmbeddedMetadata(); const buildInfoQuery = useQuery(buildInfo(metadata["build-info"])); - - const organization = organizations - ? getOrganizationByName(organizations, organizationName) - : undefined; const provisionersQuery = useQuery(provisionerDaemonGroups(organizationName)); if (!organization) { @@ -40,8 +35,3 @@ const OrganizationProvisionersPage: FC = () => { }; export default OrganizationProvisionersPage; - -const getOrganizationByName = ( - organizations: readonly Organization[], - name: string, -) => organizations.find((org) => org.name === name); diff --git a/site/src/pages/ManagementSettingsPage/OrganizationSettingsPage.stories.tsx b/site/src/pages/ManagementSettingsPage/OrganizationSettingsPage.stories.tsx index 9c85f89a62b55..f6b6b49c88d37 100644 --- a/site/src/pages/ManagementSettingsPage/OrganizationSettingsPage.stories.tsx +++ b/site/src/pages/ManagementSettingsPage/OrganizationSettingsPage.stories.tsx @@ -1,6 +1,11 @@ import type { Meta, StoryObj } from "@storybook/react"; import { reactRouterParameters } from "storybook-addon-remix-react-router"; -import { MockDefaultOrganization, MockUser } from "testHelpers/entities"; +import { + MockDefaultOrganization, + MockOrganization, + MockOrganization2, + MockUser, +} from "testHelpers/entities"; import { withAuthProvider, withDashboardProvider, diff --git a/site/src/pages/ManagementSettingsPage/OrganizationSettingsPage.tsx b/site/src/pages/ManagementSettingsPage/OrganizationSettingsPage.tsx index 1d11c85a605ae..2b4eb18a9a524 100644 --- a/site/src/pages/ManagementSettingsPage/OrganizationSettingsPage.tsx +++ b/site/src/pages/ManagementSettingsPage/OrganizationSettingsPage.tsx @@ -35,10 +35,7 @@ const OrganizationSettingsPage: FC = () => { deleteOrganization(queryClient), ); - const organization = - organizations && organizationName - ? getOrganizationByName(organizations, organizationName) - : undefined; + const organization = organizations?.find((o) => o.name === organizationName); const permissionsQuery = useQuery( organizationsPermissions(organizations?.map((o) => o.id)), ); @@ -55,13 +52,10 @@ const OrganizationSettingsPage: FC = () => { // Redirect /organizations => /organizations/default-org, or if they cannot edit // the default org, then the first org they can edit, if any. if (!organizationName) { + // .find will stop at the first match found; make sure default + // organizations are placed first const editableOrg = [...organizations] - .sort((a, b) => { - // Prefer default org (it may not be first). - // JavaScript will happily subtract booleans, but use numbers to keep - // the compiler happy. - return (b.is_default ? 1 : 0) - (a.is_default ? 1 : 0); - }) + .sort((a, b) => (b.is_default ? 1 : 0) - (a.is_default ? 1 : 0)) .find((org) => canEditOrganization(permissions[org.id])); if (editableOrg) { return ; @@ -111,10 +105,3 @@ const OrganizationSettingsPage: FC = () => { }; export default OrganizationSettingsPage; - -const getOrganizationByName = ( - organizations: readonly Organization[], - name: string, -) => { - return organizations.find((org) => org.name === name); -}; diff --git a/site/src/pages/ManagementSettingsPage/OrganizationSettingsPageView.stories.tsx b/site/src/pages/ManagementSettingsPage/OrganizationSettingsPageView.stories.tsx index 9983c25080a59..3e8b1ad3133b7 100644 --- a/site/src/pages/ManagementSettingsPage/OrganizationSettingsPageView.stories.tsx +++ b/site/src/pages/ManagementSettingsPage/OrganizationSettingsPageView.stories.tsx @@ -4,7 +4,6 @@ import { MockDefaultOrganization, MockOrganization, } from "testHelpers/entities"; -import { withManagementSettingsProvider } from "testHelpers/storybook"; import { OrganizationSettingsPageView } from "./OrganizationSettingsPageView"; const meta: Meta = { diff --git a/site/src/router.tsx b/site/src/router.tsx index 2531c823b9f48..c9d8736979c34 100644 --- a/site/src/router.tsx +++ b/site/src/router.tsx @@ -10,7 +10,6 @@ import { import { Loader } from "./components/Loader/Loader"; import { RequireAuth } from "./contexts/auth/RequireAuth"; import { DashboardLayout } from "./modules/dashboard/DashboardLayout"; -import { ManagementSettingsLayout } from "./modules/management/ManagementSettingsLayout"; import AuditPage from "./pages/AuditPage/AuditPage"; import { HealthLayout } from "./pages/HealthPage/HealthLayout"; import LoginPage from "./pages/LoginPage/LoginPage"; @@ -28,6 +27,12 @@ import WorkspacesPage from "./pages/WorkspacesPage/WorkspacesPage"; // - Pages that are secondary, not in the main navigation or not usually accessed // - Pages that use heavy dependencies like charts or time libraries const NotFoundPage = lazy(() => import("./pages/404Page/404Page")); +const ManagementSettingsLayout = lazy( + () => import("./modules/management/ManagementSettingsLayout"), +); +const DeploymentSettingsProvider = lazy( + () => import("./modules/management/DeploymentSettingsProvider"), +); const CliAuthenticationPage = lazy( () => import("./pages/CliAuthPage/CliAuthPage"), ); @@ -427,22 +432,32 @@ export const router = createBrowserRouter( }> - } /> - } /> - } /> - } /> - } - /> - } /> - } /> - } /> - } - /> + }> + } /> + } /> + } + /> + } /> + } /> + } + /> + + } + /> + + + } /> + } /> + + } /> + } /> } /> @@ -452,14 +467,9 @@ export const router = createBrowserRouter( - } /> } /> } /> {groupsRouter()} - } - /> }> diff --git a/site/src/testHelpers/entities.ts b/site/src/testHelpers/entities.ts index 0db6e80d435d6..1593790e9792d 100644 --- a/site/src/testHelpers/entities.ts +++ b/site/src/testHelpers/entities.ts @@ -2766,12 +2766,37 @@ export const MockPermissions: Permissions = { viewUpdateCheck: true, viewDeploymentStats: true, viewExternalAuthConfig: true, + readWorkspaceProxies: true, editWorkspaceProxies: true, createOrganization: true, editAnyOrganization: true, viewAnyGroup: true, createGroup: true, viewAllLicenses: true, + viewNotificationTemplate: true, +}; + +export const MockNoPermissions: Permissions = { + createTemplates: false, + createUser: false, + deleteTemplates: false, + updateTemplates: false, + viewAllUsers: false, + updateUsers: false, + viewAnyAuditLog: false, + viewDeploymentValues: false, + editDeploymentValues: false, + viewUpdateCheck: false, + viewDeploymentStats: false, + viewExternalAuthConfig: false, + readWorkspaceProxies: false, + editWorkspaceProxies: false, + createOrganization: false, + editAnyOrganization: false, + viewAnyGroup: false, + createGroup: false, + viewAllLicenses: false, + viewNotificationTemplate: false, }; export const MockDeploymentConfig: DeploymentConfig = { diff --git a/site/src/testHelpers/renderHelpers.tsx b/site/src/testHelpers/renderHelpers.tsx index f093adb1cfb4a..46ae893927801 100644 --- a/site/src/testHelpers/renderHelpers.tsx +++ b/site/src/testHelpers/renderHelpers.tsx @@ -9,7 +9,7 @@ import { ThemeProvider } from "contexts/ThemeProvider"; import { RequireAuth } from "contexts/auth/RequireAuth"; import { DashboardLayout } from "modules/dashboard/DashboardLayout"; import type { DashboardProvider } from "modules/dashboard/DashboardProvider"; -import { ManagementSettingsLayout } from "modules/management/ManagementSettingsLayout"; +import ManagementSettingsLayout from "modules/management/ManagementSettingsLayout"; import { TemplateSettingsLayout } from "pages/TemplateSettingsPage/TemplateSettingsLayout"; import { WorkspaceSettingsLayout } from "pages/WorkspaceSettingsPage/WorkspaceSettingsLayout"; import type { ReactNode } from "react"; diff --git a/site/src/testHelpers/storybook.tsx b/site/src/testHelpers/storybook.tsx index a76e1230205fc..e905a9b412c2c 100644 --- a/site/src/testHelpers/storybook.tsx +++ b/site/src/testHelpers/storybook.tsx @@ -7,6 +7,7 @@ import { GlobalSnackbar } from "components/GlobalSnackbar/GlobalSnackbar"; import { AuthProvider } from "contexts/auth/AuthProvider"; import { permissionsToCheck } from "contexts/auth/permissions"; import { DashboardContext } from "modules/dashboard/DashboardProvider"; +import { DeploymentSettingsContext } from "modules/management/DeploymentSettingsProvider"; import { ManagementSettingsContext } from "modules/management/ManagementSettingsLayout"; import type { FC } from "react"; import { useQueryClient } from "react-query"; @@ -131,12 +132,15 @@ export const withManagementSettingsProvider = (Story: FC) => { return ( - + + + ); }; From 4e20eea9e6ccdbf5432801c682e95ec5d5a1c067 Mon Sep 17 00:00:00 2001 From: Danielle Maywood Date: Tue, 29 Oct 2024 09:57:40 +0000 Subject: [PATCH 26/42] chore: remove unused 'must_reset_password' column (#15265) Closes https://github.com/coder/internal/issues/153 Remove the 'must_reset_password' as it was introduced for use in the "forgot password?" flow but never used. --- coderd/database/dump.sql | 3 -- ...000272_remove_must_reset_password.down.sql | 1 + .../000272_remove_must_reset_password.up.sql | 1 + coderd/database/modelqueries.go | 1 - coderd/database/models.go | 2 - coderd/database/queries.sql.go | 37 ++++++------------- docs/admin/security/audit-logs.md | 2 +- enterprise/audit/table.go | 1 - 8 files changed, 15 insertions(+), 33 deletions(-) create mode 100644 coderd/database/migrations/000272_remove_must_reset_password.down.sql create mode 100644 coderd/database/migrations/000272_remove_must_reset_password.up.sql diff --git a/coderd/database/dump.sql b/coderd/database/dump.sql index fc7819e38f218..e4e119423ea78 100644 --- a/coderd/database/dump.sql +++ b/coderd/database/dump.sql @@ -668,7 +668,6 @@ CREATE TABLE users ( github_com_user_id bigint, hashed_one_time_passcode bytea, one_time_passcode_expires_at timestamp with time zone, - must_reset_password boolean DEFAULT false NOT NULL, CONSTRAINT one_time_passcode_set CHECK ((((hashed_one_time_passcode IS NULL) AND (one_time_passcode_expires_at IS NULL)) OR ((hashed_one_time_passcode IS NOT NULL) AND (one_time_passcode_expires_at IS NOT NULL)))) ); @@ -684,8 +683,6 @@ COMMENT ON COLUMN users.hashed_one_time_passcode IS 'A hash of the one-time-pass COMMENT ON COLUMN users.one_time_passcode_expires_at IS 'The time when the one-time-passcode expires.'; -COMMENT ON COLUMN users.must_reset_password IS 'Determines if the user should be forced to change their password.'; - CREATE VIEW group_members_expanded AS WITH all_members AS ( SELECT group_members.user_id, diff --git a/coderd/database/migrations/000272_remove_must_reset_password.down.sql b/coderd/database/migrations/000272_remove_must_reset_password.down.sql new file mode 100644 index 0000000000000..9f798fc1898ca --- /dev/null +++ b/coderd/database/migrations/000272_remove_must_reset_password.down.sql @@ -0,0 +1 @@ +ALTER TABLE users ADD COLUMN must_reset_password bool NOT NULL DEFAULT false; diff --git a/coderd/database/migrations/000272_remove_must_reset_password.up.sql b/coderd/database/migrations/000272_remove_must_reset_password.up.sql new file mode 100644 index 0000000000000..d93e464493cc4 --- /dev/null +++ b/coderd/database/migrations/000272_remove_must_reset_password.up.sql @@ -0,0 +1 @@ +ALTER TABLE users DROP COLUMN must_reset_password; diff --git a/coderd/database/modelqueries.go b/coderd/database/modelqueries.go index 9888027e01559..9cab04d8e5c2e 100644 --- a/coderd/database/modelqueries.go +++ b/coderd/database/modelqueries.go @@ -374,7 +374,6 @@ func (q *sqlQuerier) GetAuthorizedUsers(ctx context.Context, arg GetUsersParams, &i.GithubComUserID, &i.HashedOneTimePasscode, &i.OneTimePasscodeExpiresAt, - &i.MustResetPassword, &i.Count, ); err != nil { return nil, err diff --git a/coderd/database/models.go b/coderd/database/models.go index e7d90acf5ea94..680450a7826d0 100644 --- a/coderd/database/models.go +++ b/coderd/database/models.go @@ -2879,8 +2879,6 @@ type User struct { HashedOneTimePasscode []byte `db:"hashed_one_time_passcode" json:"hashed_one_time_passcode"` // The time when the one-time-passcode expires. OneTimePasscodeExpiresAt sql.NullTime `db:"one_time_passcode_expires_at" json:"one_time_passcode_expires_at"` - // Determines if the user should be forced to change their password. - MustResetPassword bool `db:"must_reset_password" json:"must_reset_password"` } type UserLink struct { diff --git a/coderd/database/queries.sql.go b/coderd/database/queries.sql.go index 45cbef3f5e1d8..d00c4ec3bcdef 100644 --- a/coderd/database/queries.sql.go +++ b/coderd/database/queries.sql.go @@ -10031,7 +10031,7 @@ func (q *sqlQuerier) GetAuthorizationUserRoles(ctx context.Context, userID uuid. const getUserByEmailOrUsername = `-- name: GetUserByEmailOrUsername :one SELECT - id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at, must_reset_password + id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at FROM users WHERE @@ -10068,14 +10068,13 @@ func (q *sqlQuerier) GetUserByEmailOrUsername(ctx context.Context, arg GetUserBy &i.GithubComUserID, &i.HashedOneTimePasscode, &i.OneTimePasscodeExpiresAt, - &i.MustResetPassword, ) return i, err } const getUserByID = `-- name: GetUserByID :one SELECT - id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at, must_reset_password + id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at FROM users WHERE @@ -10106,7 +10105,6 @@ func (q *sqlQuerier) GetUserByID(ctx context.Context, id uuid.UUID) (User, error &i.GithubComUserID, &i.HashedOneTimePasscode, &i.OneTimePasscodeExpiresAt, - &i.MustResetPassword, ) return i, err } @@ -10129,7 +10127,7 @@ func (q *sqlQuerier) GetUserCount(ctx context.Context) (int64, error) { const getUsers = `-- name: GetUsers :many SELECT - id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at, must_reset_password, COUNT(*) OVER() AS count + id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at, COUNT(*) OVER() AS count FROM users WHERE @@ -10231,7 +10229,6 @@ type GetUsersRow struct { GithubComUserID sql.NullInt64 `db:"github_com_user_id" json:"github_com_user_id"` HashedOneTimePasscode []byte `db:"hashed_one_time_passcode" json:"hashed_one_time_passcode"` OneTimePasscodeExpiresAt sql.NullTime `db:"one_time_passcode_expires_at" json:"one_time_passcode_expires_at"` - MustResetPassword bool `db:"must_reset_password" json:"must_reset_password"` Count int64 `db:"count" json:"count"` } @@ -10273,7 +10270,6 @@ func (q *sqlQuerier) GetUsers(ctx context.Context, arg GetUsersParams) ([]GetUse &i.GithubComUserID, &i.HashedOneTimePasscode, &i.OneTimePasscodeExpiresAt, - &i.MustResetPassword, &i.Count, ); err != nil { return nil, err @@ -10290,7 +10286,7 @@ func (q *sqlQuerier) GetUsers(ctx context.Context, arg GetUsersParams) ([]GetUse } const getUsersByIDs = `-- name: GetUsersByIDs :many -SELECT id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at, must_reset_password FROM users WHERE id = ANY($1 :: uuid [ ]) +SELECT id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at FROM users WHERE id = ANY($1 :: uuid [ ]) ` // This shouldn't check for deleted, because it's frequently used @@ -10324,7 +10320,6 @@ func (q *sqlQuerier) GetUsersByIDs(ctx context.Context, ids []uuid.UUID) ([]User &i.GithubComUserID, &i.HashedOneTimePasscode, &i.OneTimePasscodeExpiresAt, - &i.MustResetPassword, ); err != nil { return nil, err } @@ -10353,7 +10348,7 @@ INSERT INTO login_type ) VALUES - ($1, $2, $3, $4, $5, $6, $7, $8, $9) RETURNING id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at, must_reset_password + ($1, $2, $3, $4, $5, $6, $7, $8, $9) RETURNING id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at ` type InsertUserParams struct { @@ -10400,7 +10395,6 @@ func (q *sqlQuerier) InsertUser(ctx context.Context, arg InsertUserParams) (User &i.GithubComUserID, &i.HashedOneTimePasscode, &i.OneTimePasscodeExpiresAt, - &i.MustResetPassword, ) return i, err } @@ -10459,7 +10453,7 @@ SET updated_at = $3 WHERE id = $1 -RETURNING id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at, must_reset_password +RETURNING id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at ` type UpdateUserAppearanceSettingsParams struct { @@ -10490,7 +10484,6 @@ func (q *sqlQuerier) UpdateUserAppearanceSettings(ctx context.Context, arg Updat &i.GithubComUserID, &i.HashedOneTimePasscode, &i.OneTimePasscodeExpiresAt, - &i.MustResetPassword, ) return i, err } @@ -10577,7 +10570,7 @@ SET last_seen_at = $2, updated_at = $3 WHERE - id = $1 RETURNING id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at, must_reset_password + id = $1 RETURNING id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at ` type UpdateUserLastSeenAtParams struct { @@ -10608,7 +10601,6 @@ func (q *sqlQuerier) UpdateUserLastSeenAt(ctx context.Context, arg UpdateUserLas &i.GithubComUserID, &i.HashedOneTimePasscode, &i.OneTimePasscodeExpiresAt, - &i.MustResetPassword, ) return i, err } @@ -10626,7 +10618,7 @@ SET '':: bytea END WHERE - id = $2 RETURNING id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at, must_reset_password + id = $2 RETURNING id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at ` type UpdateUserLoginTypeParams struct { @@ -10656,7 +10648,6 @@ func (q *sqlQuerier) UpdateUserLoginType(ctx context.Context, arg UpdateUserLogi &i.GithubComUserID, &i.HashedOneTimePasscode, &i.OneTimePasscodeExpiresAt, - &i.MustResetPassword, ) return i, err } @@ -10672,7 +10663,7 @@ SET name = $6 WHERE id = $1 -RETURNING id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at, must_reset_password +RETURNING id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at ` type UpdateUserProfileParams struct { @@ -10713,7 +10704,6 @@ func (q *sqlQuerier) UpdateUserProfile(ctx context.Context, arg UpdateUserProfil &i.GithubComUserID, &i.HashedOneTimePasscode, &i.OneTimePasscodeExpiresAt, - &i.MustResetPassword, ) return i, err } @@ -10725,7 +10715,7 @@ SET quiet_hours_schedule = $2 WHERE id = $1 -RETURNING id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at, must_reset_password +RETURNING id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at ` type UpdateUserQuietHoursScheduleParams struct { @@ -10755,7 +10745,6 @@ func (q *sqlQuerier) UpdateUserQuietHoursSchedule(ctx context.Context, arg Updat &i.GithubComUserID, &i.HashedOneTimePasscode, &i.OneTimePasscodeExpiresAt, - &i.MustResetPassword, ) return i, err } @@ -10768,7 +10757,7 @@ SET rbac_roles = ARRAY(SELECT DISTINCT UNNEST($1 :: text[])) WHERE id = $2 -RETURNING id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at, must_reset_password +RETURNING id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at ` type UpdateUserRolesParams struct { @@ -10798,7 +10787,6 @@ func (q *sqlQuerier) UpdateUserRoles(ctx context.Context, arg UpdateUserRolesPar &i.GithubComUserID, &i.HashedOneTimePasscode, &i.OneTimePasscodeExpiresAt, - &i.MustResetPassword, ) return i, err } @@ -10810,7 +10798,7 @@ SET status = $2, updated_at = $3 WHERE - id = $1 RETURNING id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at, must_reset_password + id = $1 RETURNING id, email, username, hashed_password, created_at, updated_at, status, rbac_roles, login_type, avatar_url, deleted, last_seen_at, quiet_hours_schedule, theme_preference, name, github_com_user_id, hashed_one_time_passcode, one_time_passcode_expires_at ` type UpdateUserStatusParams struct { @@ -10841,7 +10829,6 @@ func (q *sqlQuerier) UpdateUserStatus(ctx context.Context, arg UpdateUserStatusP &i.GithubComUserID, &i.HashedOneTimePasscode, &i.OneTimePasscodeExpiresAt, - &i.MustResetPassword, ) return i, err } diff --git a/docs/admin/security/audit-logs.md b/docs/admin/security/audit-logs.md index 87a14a98db23c..3ea4e145d13eb 100644 --- a/docs/admin/security/audit-logs.md +++ b/docs/admin/security/audit-logs.md @@ -25,7 +25,7 @@ We track the following resources: | Organization
|
FieldTracked
created_atfalse
descriptiontrue
display_nametrue
icontrue
idfalse
is_defaulttrue
nametrue
updated_attrue
| | Template
write, delete |
FieldTracked
active_version_idtrue
activity_bumptrue
allow_user_autostarttrue
allow_user_autostoptrue
allow_user_cancel_workspace_jobstrue
autostart_block_days_of_weektrue
autostop_requirement_days_of_weektrue
autostop_requirement_weekstrue
created_atfalse
created_bytrue
created_by_avatar_urlfalse
created_by_usernamefalse
default_ttltrue
deletedfalse
deprecatedtrue
descriptiontrue
display_nametrue
failure_ttltrue
group_acltrue
icontrue
idtrue
max_port_sharing_leveltrue
nametrue
organization_display_namefalse
organization_iconfalse
organization_idfalse
organization_namefalse
provisionertrue
require_active_versiontrue
time_til_dormanttrue
time_til_dormant_autodeletetrue
updated_atfalse
user_acltrue
| | TemplateVersion
create, write |
FieldTracked
archivedtrue
created_atfalse
created_bytrue
created_by_avatar_urlfalse
created_by_usernamefalse
external_auth_providersfalse
idtrue
job_idfalse
messagefalse
nametrue
organization_idfalse
readmetrue
template_idtrue
updated_atfalse
| -| User
create, write, delete |
FieldTracked
avatar_urlfalse
created_atfalse
deletedtrue
emailtrue
github_com_user_idfalse
hashed_one_time_passcodefalse
hashed_passwordtrue
idtrue
last_seen_atfalse
login_typetrue
must_reset_passwordtrue
nametrue
one_time_passcode_expires_attrue
quiet_hours_scheduletrue
rbac_rolestrue
statustrue
theme_preferencefalse
updated_atfalse
usernametrue
| +| User
create, write, delete |
FieldTracked
avatar_urlfalse
created_atfalse
deletedtrue
emailtrue
github_com_user_idfalse
hashed_one_time_passcodefalse
hashed_passwordtrue
idtrue
last_seen_atfalse
login_typetrue
nametrue
one_time_passcode_expires_attrue
quiet_hours_scheduletrue
rbac_rolestrue
statustrue
theme_preferencefalse
updated_atfalse
usernametrue
| | WorkspaceBuild
start, stop |
FieldTracked
build_numberfalse
created_atfalse
daily_costfalse
deadlinefalse
idfalse
initiator_by_avatar_urlfalse
initiator_by_usernamefalse
initiator_idfalse
job_idfalse
max_deadlinefalse
provisioner_statefalse
reasonfalse
template_version_idtrue
transitionfalse
updated_atfalse
workspace_idfalse
| | WorkspaceProxy
|
FieldTracked
created_attrue
deletedfalse
derp_enabledtrue
derp_onlytrue
display_nametrue
icontrue
idtrue
nametrue
region_idtrue
token_hashed_secrettrue
updated_atfalse
urltrue
versiontrue
wildcard_hostnametrue
| | WorkspaceTable
|
FieldTracked
automatic_updatestrue
autostart_scheduletrue
created_atfalse
deletedfalse
deleting_attrue
dormant_attrue
favoritetrue
idtrue
last_used_atfalse
nametrue
organization_idfalse
owner_idtrue
template_idtrue
ttltrue
updated_atfalse
| diff --git a/enterprise/audit/table.go b/enterprise/audit/table.go index 2de2d918dc0aa..f9e74959f2a28 100644 --- a/enterprise/audit/table.go +++ b/enterprise/audit/table.go @@ -147,7 +147,6 @@ var auditableResourcesTypes = map[any]map[string]Action{ "github_com_user_id": ActionIgnore, "hashed_one_time_passcode": ActionIgnore, "one_time_passcode_expires_at": ActionTrack, - "must_reset_password": ActionTrack, }, &database.WorkspaceTable{}: { "id": ActionTrack, From 4cad6f75a9dbdd90e6fbef10eadf010b9e6c83e1 Mon Sep 17 00:00:00 2001 From: Bruno Quaresma Date: Tue, 29 Oct 2024 09:05:41 -0300 Subject: [PATCH 27/42] fix(site): fix workspace timings verbiage (#15268) --- site/src/modules/workspaces/WorkspaceTiming/StagesChart.tsx | 2 +- .../src/modules/workspaces/WorkspaceTiming/WorkspaceTimings.tsx | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/site/src/modules/workspaces/WorkspaceTiming/StagesChart.tsx b/site/src/modules/workspaces/WorkspaceTiming/StagesChart.tsx index 8f37605ce5956..dc5550dcfed98 100644 --- a/site/src/modules/workspaces/WorkspaceTiming/StagesChart.tsx +++ b/site/src/modules/workspaces/WorkspaceTiming/StagesChart.tsx @@ -102,7 +102,7 @@ export const stages: Stage[] = [ <> Terraform apply - Execute terraform plan to create/modify/delete resources into + Execute Terraform plan to create/modify/delete resources into desired states. diff --git a/site/src/modules/workspaces/WorkspaceTiming/WorkspaceTimings.tsx b/site/src/modules/workspaces/WorkspaceTiming/WorkspaceTimings.tsx index 4835cc2be8f69..9e16e55bae36e 100644 --- a/site/src/modules/workspaces/WorkspaceTiming/WorkspaceTimings.tsx +++ b/site/src/modules/workspaces/WorkspaceTiming/WorkspaceTimings.tsx @@ -55,7 +55,7 @@ export const WorkspaceTimings: FC = ({ ) : ( )} - Provisioning time + Build timeline ({ marginLeft: "auto", From 78ff375fed9a308e6b9f3104e3e8671724b62d5c Mon Sep 17 00:00:00 2001 From: Danielle Maywood Date: Tue, 29 Oct 2024 12:19:56 +0000 Subject: [PATCH 28/42] feat: log when attempted password resets fail (#15267) Closes https://github.com/coder/coder/issues/15154 Log when someone attempts to either - Request a one-time passcode for an account that doesn't exist - Attempt to change a password with an invalid one-time passcode and/or email --------- Co-authored-by: Mathias Fredriksson --- coderd/userauth.go | 3 +++ 1 file changed, 3 insertions(+) diff --git a/coderd/userauth.go b/coderd/userauth.go index f1a19d77d23d0..13f9b088d731f 100644 --- a/coderd/userauth.go +++ b/coderd/userauth.go @@ -291,6 +291,8 @@ func (api *API) postRequestOneTimePasscode(rw http.ResponseWriter, r *http.Reque if err != nil { logger.Error(ctx, "unable to notify user about one-time passcode request", slog.Error(err)) } + } else { + logger.Warn(ctx, "password reset requested for account that does not exist", slog.F("email", req.Email)) } } @@ -381,6 +383,7 @@ func (api *API) postChangePasswordWithOneTimePasscode(rw http.ResponseWriter, r now := dbtime.Now() if !equal || now.After(user.OneTimePasscodeExpiresAt.Time) { + logger.Warn(ctx, "password reset attempted with invalid or expired one-time passcode", slog.F("email", req.Email)) httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ Message: "Incorrect email or one-time passcode.", }) From 7982ad7659be9baa448a45f59388ef762b585c3f Mon Sep 17 00:00:00 2001 From: Joobi S B Date: Tue, 29 Oct 2024 18:32:20 +0530 Subject: [PATCH 29/42] feat: expose premium trial form via CLI (#15054) This PR closes https://github.com/coder/coder/issues/14856 --- .github/workflows/pr-deploy.yaml | 2 +- cli/login.go | 129 ++++++++++++++++++++++++- cli/login_test.go | 97 ++++++++++++++++++- coderd/coderdtest/coderdtest.go | 10 ++ scripts/develop.sh | 2 +- site/src/pages/SetupPage/countries.tsx | 2 +- 6 files changed, 233 insertions(+), 9 deletions(-) diff --git a/.github/workflows/pr-deploy.yaml b/.github/workflows/pr-deploy.yaml index e86ad1f3dd351..6ca35c82eebeb 100644 --- a/.github/workflows/pr-deploy.yaml +++ b/.github/workflows/pr-deploy.yaml @@ -425,7 +425,7 @@ jobs: --first-user-username coder \ --first-user-email pr${{ env.PR_NUMBER }}@coder.com \ --first-user-password $password \ - --first-user-trial \ + --first-user-trial=false \ --use-token-as-session \ https://${{ env.PR_HOSTNAME }} diff --git a/cli/login.go b/cli/login.go index 484de69fdf1b5..3bb4f0796e4a5 100644 --- a/cli/login.go +++ b/cli/login.go @@ -267,12 +267,59 @@ func (r *RootCmd) login() *serpent.Command { trial = v == "yes" || v == "y" } + var trialInfo codersdk.CreateFirstUserTrialInfo + if trial { + if trialInfo.FirstName == "" { + trialInfo.FirstName, err = promptTrialInfo(inv, "firstName") + if err != nil { + return err + } + } + if trialInfo.LastName == "" { + trialInfo.LastName, err = promptTrialInfo(inv, "lastName") + if err != nil { + return err + } + } + if trialInfo.PhoneNumber == "" { + trialInfo.PhoneNumber, err = promptTrialInfo(inv, "phoneNumber") + if err != nil { + return err + } + } + if trialInfo.JobTitle == "" { + trialInfo.JobTitle, err = promptTrialInfo(inv, "jobTitle") + if err != nil { + return err + } + } + if trialInfo.CompanyName == "" { + trialInfo.CompanyName, err = promptTrialInfo(inv, "companyName") + if err != nil { + return err + } + } + if trialInfo.Country == "" { + trialInfo.Country, err = promptCountry(inv) + if err != nil { + return err + } + } + if trialInfo.Developers == "" { + trialInfo.Developers, err = promptDevelopers(inv) + if err != nil { + return err + } + } + } + _, err = client.CreateFirstUser(ctx, codersdk.CreateFirstUserRequest{ - Email: email, - Username: username, - Name: name, - Password: password, - Trial: trial, + Email: email, + Username: username, + Name: name, + Password: password, + Trial: trial, + TrialInfo: trialInfo, }) if err != nil { return xerrors.Errorf("create initial user: %w", err) @@ -449,3 +496,75 @@ func openURL(inv *serpent.Invocation, urlToOpen string) error { return browser.OpenURL(urlToOpen) } + +func promptTrialInfo(inv *serpent.Invocation, fieldName string) (string, error) { + value, err := cliui.Prompt(inv, cliui.PromptOptions{ + Text: fmt.Sprintf("Please enter %s:", pretty.Sprint(cliui.DefaultStyles.Field, fieldName)), + Validate: func(s string) error { + if strings.TrimSpace(s) == "" { + return xerrors.Errorf("%s is required", fieldName) + } + return nil + }, + }) + if err != nil { + if errors.Is(err, cliui.Canceled) { + return "", nil + } + return "", err + } + return value, nil +} + +func promptDevelopers(inv *serpent.Invocation) (string, error) { + options := []string{"1-100", "101-500", "501-1000", "1001-2500", "2500+"} + selection, err := cliui.Select(inv, cliui.SelectOptions{ + Options: options, + HideSearch: false, + Message: "Select the number of developers:", + }) + if err != nil { + return "", xerrors.Errorf("select developers: %w", err) + } + return selection, nil +} + +func promptCountry(inv *serpent.Invocation) (string, error) { + countries := []string{ + "Afghanistan", "Åland Islands", "Albania", "Algeria", "American Samoa", "Andorra", "Angola", "Anguilla", "Antarctica", "Antigua and Barbuda", + "Argentina", "Armenia", "Aruba", "Australia", "Austria", "Azerbaijan", "Bahamas", "Bahrain", "Bangladesh", "Barbados", + "Belarus", "Belgium", "Belize", "Benin", "Bermuda", "Bhutan", "Bolivia, Plurinational State of", "Bonaire, Sint Eustatius and Saba", "Bosnia and Herzegovina", "Botswana", + "Bouvet Island", "Brazil", "British Indian Ocean Territory", "Brunei Darussalam", "Bulgaria", "Burkina Faso", "Burundi", "Cambodia", "Cameroon", "Canada", + "Cape Verde", "Cayman Islands", "Central African Republic", "Chad", "Chile", "China", "Christmas Island", "Cocos (Keeling) Islands", "Colombia", "Comoros", + "Congo", "Congo, the Democratic Republic of the", "Cook Islands", "Costa Rica", "Côte d'Ivoire", "Croatia", "Cuba", "Curaçao", "Cyprus", "Czech Republic", + "Denmark", "Djibouti", "Dominica", "Dominican Republic", "Ecuador", "Egypt", "El Salvador", "Equatorial Guinea", "Eritrea", "Estonia", + "Ethiopia", "Falkland Islands (Malvinas)", "Faroe Islands", "Fiji", "Finland", "France", "French Guiana", "French Polynesia", "French Southern Territories", "Gabon", + "Gambia", "Georgia", "Germany", "Ghana", "Gibraltar", "Greece", "Greenland", "Grenada", "Guadeloupe", "Guam", + "Guatemala", "Guernsey", "Guinea", "Guinea-Bissau", "Guyana", "Haiti", "Heard Island and McDonald Islands", "Holy See (Vatican City State)", "Honduras", "Hong Kong", + "Hungary", "Iceland", "India", "Indonesia", "Iran, Islamic Republic of", "Iraq", "Ireland", "Isle of Man", "Israel", "Italy", + "Jamaica", "Japan", "Jersey", "Jordan", "Kazakhstan", "Kenya", "Kiribati", "Korea, Democratic People's Republic of", "Korea, Republic of", "Kuwait", + "Kyrgyzstan", "Lao People's Democratic Republic", "Latvia", "Lebanon", "Lesotho", "Liberia", "Libya", "Liechtenstein", "Lithuania", "Luxembourg", + "Macao", "Macedonia, the Former Yugoslav Republic of", "Madagascar", "Malawi", "Malaysia", "Maldives", "Mali", "Malta", "Marshall Islands", "Martinique", + "Mauritania", "Mauritius", "Mayotte", "Mexico", "Micronesia, Federated States of", "Moldova, Republic of", "Monaco", "Mongolia", "Montenegro", "Montserrat", + "Morocco", "Mozambique", "Myanmar", "Namibia", "Nauru", "Nepal", "Netherlands", "New Caledonia", "New Zealand", "Nicaragua", + "Niger", "Nigeria", "Niue", "Norfolk Island", "Northern Mariana Islands", "Norway", "Oman", "Pakistan", "Palau", "Palestine, State of", + "Panama", "Papua New Guinea", "Paraguay", "Peru", "Philippines", "Pitcairn", "Poland", "Portugal", "Puerto Rico", "Qatar", + "Réunion", "Romania", "Russian Federation", "Rwanda", "Saint Barthélemy", "Saint Helena, Ascension and Tristan da Cunha", "Saint Kitts and Nevis", "Saint Lucia", "Saint Martin (French part)", "Saint Pierre and Miquelon", + "Saint Vincent and the Grenadines", "Samoa", "San Marino", "Sao Tome and Principe", "Saudi Arabia", "Senegal", "Serbia", "Seychelles", "Sierra Leone", "Singapore", + "Sint Maarten (Dutch part)", "Slovakia", "Slovenia", "Solomon Islands", "Somalia", "South Africa", "South Georgia and the South Sandwich Islands", "South Sudan", "Spain", "Sri Lanka", + "Sudan", "Suriname", "Svalbard and Jan Mayen", "Swaziland", "Sweden", "Switzerland", "Syrian Arab Republic", "Taiwan, Province of China", "Tajikistan", "Tanzania, United Republic of", + "Thailand", "Timor-Leste", "Togo", "Tokelau", "Tonga", "Trinidad and Tobago", "Tunisia", "Turkey", "Turkmenistan", "Turks and Caicos Islands", + "Tuvalu", "Uganda", "Ukraine", "United Arab Emirates", "United Kingdom", "United States", "United States Minor Outlying Islands", "Uruguay", "Uzbekistan", "Vanuatu", + "Venezuela, Bolivarian Republic of", "Vietnam", "Virgin Islands, British", "Virgin Islands, U.S.", "Wallis and Futuna", "Western Sahara", "Yemen", "Zambia", "Zimbabwe", + } + + selection, err := cliui.Select(inv, cliui.SelectOptions{ + Options: countries, + Message: "Select the country:", + HideSearch: false, + }) + if err != nil { + return "", xerrors.Errorf("select country: %w", err) + } + return selection, nil +} diff --git a/cli/login_test.go b/cli/login_test.go index 0428c332d02b0..9a86e7caad351 100644 --- a/cli/login_test.go +++ b/cli/login_test.go @@ -96,6 +96,58 @@ func TestLogin(t *testing.T) { "password", coderdtest.FirstUserParams.Password, "password", coderdtest.FirstUserParams.Password, // confirm "trial", "yes", + "firstName", coderdtest.TrialUserParams.FirstName, + "lastName", coderdtest.TrialUserParams.LastName, + "phoneNumber", coderdtest.TrialUserParams.PhoneNumber, + "jobTitle", coderdtest.TrialUserParams.JobTitle, + "companyName", coderdtest.TrialUserParams.CompanyName, + // `developers` and `country` `cliui.Select` automatically selects the first option during tests. + } + for i := 0; i < len(matches); i += 2 { + match := matches[i] + value := matches[i+1] + pty.ExpectMatch(match) + pty.WriteLine(value) + } + pty.ExpectMatch("Welcome to Coder") + <-doneChan + ctx := testutil.Context(t, testutil.WaitShort) + resp, err := client.LoginWithPassword(ctx, codersdk.LoginWithPasswordRequest{ + Email: coderdtest.FirstUserParams.Email, + Password: coderdtest.FirstUserParams.Password, + }) + require.NoError(t, err) + client.SetSessionToken(resp.SessionToken) + me, err := client.User(ctx, codersdk.Me) + require.NoError(t, err) + assert.Equal(t, coderdtest.FirstUserParams.Username, me.Username) + assert.Equal(t, coderdtest.FirstUserParams.Name, me.Name) + assert.Equal(t, coderdtest.FirstUserParams.Email, me.Email) + }) + + t.Run("InitialUserTTYWithNoTrial", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t, nil) + // The --force-tty flag is required on Windows, because the `isatty` library does not + // accurately detect Windows ptys when they are not attached to a process: + // https://github.com/mattn/go-isatty/issues/59 + doneChan := make(chan struct{}) + root, _ := clitest.New(t, "login", "--force-tty", client.URL.String()) + pty := ptytest.New(t).Attach(root) + go func() { + defer close(doneChan) + err := root.Run() + assert.NoError(t, err) + }() + + matches := []string{ + "first user?", "yes", + "username", coderdtest.FirstUserParams.Username, + "name", coderdtest.FirstUserParams.Name, + "email", coderdtest.FirstUserParams.Email, + "password", coderdtest.FirstUserParams.Password, + "password", coderdtest.FirstUserParams.Password, // confirm + "trial", "no", } for i := 0; i < len(matches); i += 2 { match := matches[i] @@ -142,6 +194,12 @@ func TestLogin(t *testing.T) { "password", coderdtest.FirstUserParams.Password, "password", coderdtest.FirstUserParams.Password, // confirm "trial", "yes", + "firstName", coderdtest.TrialUserParams.FirstName, + "lastName", coderdtest.TrialUserParams.LastName, + "phoneNumber", coderdtest.TrialUserParams.PhoneNumber, + "jobTitle", coderdtest.TrialUserParams.JobTitle, + "companyName", coderdtest.TrialUserParams.CompanyName, + // `developers` and `country` `cliui.Select` automatically selects the first option during tests. } for i := 0; i < len(matches); i += 2 { match := matches[i] @@ -185,6 +243,12 @@ func TestLogin(t *testing.T) { "password", coderdtest.FirstUserParams.Password, "password", coderdtest.FirstUserParams.Password, // confirm "trial", "yes", + "firstName", coderdtest.TrialUserParams.FirstName, + "lastName", coderdtest.TrialUserParams.LastName, + "phoneNumber", coderdtest.TrialUserParams.PhoneNumber, + "jobTitle", coderdtest.TrialUserParams.JobTitle, + "companyName", coderdtest.TrialUserParams.CompanyName, + // `developers` and `country` `cliui.Select` automatically selects the first option during tests. } for i := 0; i < len(matches); i += 2 { match := matches[i] @@ -220,6 +284,17 @@ func TestLogin(t *testing.T) { ) pty := ptytest.New(t).Attach(inv) w := clitest.StartWithWaiter(t, inv) + pty.ExpectMatch("firstName") + pty.WriteLine(coderdtest.TrialUserParams.FirstName) + pty.ExpectMatch("lastName") + pty.WriteLine(coderdtest.TrialUserParams.LastName) + pty.ExpectMatch("phoneNumber") + pty.WriteLine(coderdtest.TrialUserParams.PhoneNumber) + pty.ExpectMatch("jobTitle") + pty.WriteLine(coderdtest.TrialUserParams.JobTitle) + pty.ExpectMatch("companyName") + pty.WriteLine(coderdtest.TrialUserParams.CompanyName) + // `developers` and `country` `cliui.Select` automatically selects the first option during tests. pty.ExpectMatch("Welcome to Coder") w.RequireSuccess() ctx := testutil.Context(t, testutil.WaitShort) @@ -248,6 +323,17 @@ func TestLogin(t *testing.T) { ) pty := ptytest.New(t).Attach(inv) w := clitest.StartWithWaiter(t, inv) + pty.ExpectMatch("firstName") + pty.WriteLine(coderdtest.TrialUserParams.FirstName) + pty.ExpectMatch("lastName") + pty.WriteLine(coderdtest.TrialUserParams.LastName) + pty.ExpectMatch("phoneNumber") + pty.WriteLine(coderdtest.TrialUserParams.PhoneNumber) + pty.ExpectMatch("jobTitle") + pty.WriteLine(coderdtest.TrialUserParams.JobTitle) + pty.ExpectMatch("companyName") + pty.WriteLine(coderdtest.TrialUserParams.CompanyName) + // `developers` and `country` `cliui.Select` automatically selects the first option during tests. pty.ExpectMatch("Welcome to Coder") w.RequireSuccess() ctx := testutil.Context(t, testutil.WaitShort) @@ -299,12 +385,21 @@ func TestLogin(t *testing.T) { // Validate that we reprompt for matching passwords. pty.ExpectMatch("Passwords do not match") pty.ExpectMatch("Enter a " + pretty.Sprint(cliui.DefaultStyles.Field, "password")) - pty.WriteLine(coderdtest.FirstUserParams.Password) pty.ExpectMatch("Confirm") pty.WriteLine(coderdtest.FirstUserParams.Password) pty.ExpectMatch("trial") pty.WriteLine("yes") + pty.ExpectMatch("firstName") + pty.WriteLine(coderdtest.TrialUserParams.FirstName) + pty.ExpectMatch("lastName") + pty.WriteLine(coderdtest.TrialUserParams.LastName) + pty.ExpectMatch("phoneNumber") + pty.WriteLine(coderdtest.TrialUserParams.PhoneNumber) + pty.ExpectMatch("jobTitle") + pty.WriteLine(coderdtest.TrialUserParams.JobTitle) + pty.ExpectMatch("companyName") + pty.WriteLine(coderdtest.TrialUserParams.CompanyName) pty.ExpectMatch("Welcome to Coder") <-doneChan }) diff --git a/coderd/coderdtest/coderdtest.go b/coderd/coderdtest/coderdtest.go index d94a6fbe93c4e..47d9a42319d20 100644 --- a/coderd/coderdtest/coderdtest.go +++ b/coderd/coderdtest/coderdtest.go @@ -653,6 +653,16 @@ var FirstUserParams = codersdk.CreateFirstUserRequest{ Name: "Test User", } +var TrialUserParams = codersdk.CreateFirstUserTrialInfo{ + FirstName: "John", + LastName: "Doe", + PhoneNumber: "9999999999", + JobTitle: "Engineer", + CompanyName: "Acme Inc", + Country: "United States", + Developers: "10-50", +} + // CreateFirstUser creates a user with preset credentials and authenticates // with the passed in codersdk client. func CreateFirstUser(t testing.TB, client *codersdk.Client) codersdk.CreateFirstUserResponse { diff --git a/scripts/develop.sh b/scripts/develop.sh index bdaf81c7536e5..7dfad72d2e9f6 100755 --- a/scripts/develop.sh +++ b/scripts/develop.sh @@ -164,7 +164,7 @@ fatal() { if [ ! -f "${PROJECT_ROOT}/.coderv2/developsh-did-first-setup" ]; then # Try to create the initial admin user. - if "${CODER_DEV_SHIM}" login http://127.0.0.1:3000 --first-user-username=admin --first-user-email=admin@coder.com --first-user-password="${password}" --first-user-full-name="Admin User" --first-user-trial=true; then + if "${CODER_DEV_SHIM}" login http://127.0.0.1:3000 --first-user-username=admin --first-user-email=admin@coder.com --first-user-password="${password}" --first-user-full-name="Admin User" --first-user-trial=false; then # Only create this file if an admin user was successfully # created, otherwise we won't retry on a later attempt. touch "${PROJECT_ROOT}/.coderv2/developsh-did-first-setup" diff --git a/site/src/pages/SetupPage/countries.tsx b/site/src/pages/SetupPage/countries.tsx index 0fcebc25ac54d..9b13b6b6be0d9 100644 --- a/site/src/pages/SetupPage/countries.tsx +++ b/site/src/pages/SetupPage/countries.tsx @@ -964,7 +964,7 @@ export const countries = [ flag: "🇻🇪", }, { - name: "Viet Nam", + name: "Vietnam", flag: "🇻🇳", }, { From 3c7808c57513cc15fafc8fba04e59a65a3374825 Mon Sep 17 00:00:00 2001 From: Bruno Quaresma Date: Tue, 29 Oct 2024 13:02:24 -0300 Subject: [PATCH 30/42] fix(site): update workspace timings to use theme colors (#15269) Fix https://github.com/coder/coder/issues/15266 After fix: Screenshot 2024-10-29 at 09 37 02 Screenshot 2024-10-29 at 09 36 49 Screenshot 2024-10-29 at 09 36 40 --- .../workspaces/WorkspaceTiming/Chart/Bar.tsx | 6 +- .../WorkspaceTiming/Chart/Blocks.tsx | 14 ++--- .../WorkspaceTiming/ResourcesChart.tsx | 58 ++++++++++--------- .../WorkspaceTiming/ScriptsChart.tsx | 53 +++++++++-------- .../WorkspaceTimings.stories.tsx | 4 ++ 5 files changed, 74 insertions(+), 61 deletions(-) diff --git a/site/src/modules/workspaces/WorkspaceTiming/Chart/Bar.tsx b/site/src/modules/workspaces/WorkspaceTiming/Chart/Bar.tsx index a98d91ae428b5..3ed7fdcd31898 100644 --- a/site/src/modules/workspaces/WorkspaceTiming/Chart/Bar.tsx +++ b/site/src/modules/workspaces/WorkspaceTiming/Chart/Bar.tsx @@ -91,7 +91,7 @@ const styles = { left: -8, }, }), - clickable: { + clickable: (theme) => ({ cursor: "pointer", // We need to make the bar width at least 34px to allow the "..." icons to be displayed. // The calculation is border * 1 + side paddings * 2 + icon width (which is 18px) @@ -99,7 +99,7 @@ const styles = { "&:focus, &:hover, &:active": { outline: "none", - borderColor: "#38BDF8", + borderColor: theme.roles.active.outline, }, - }, + }), } satisfies Record>; diff --git a/site/src/modules/workspaces/WorkspaceTiming/Chart/Blocks.tsx b/site/src/modules/workspaces/WorkspaceTiming/Chart/Blocks.tsx index 752e53c5b5c4a..00660c39f495c 100644 --- a/site/src/modules/workspaces/WorkspaceTiming/Chart/Blocks.tsx +++ b/site/src/modules/workspaces/WorkspaceTiming/Chart/Blocks.tsx @@ -52,16 +52,16 @@ const styles = { gap: spaceBetweenBlocks, alignItems: "center", }, - block: { + block: (theme) => ({ borderRadius: 4, height: 18, - backgroundColor: "#082F49", - border: "1px solid #38BDF8", + backgroundColor: theme.roles.active.background, + border: `1px solid ${theme.roles.active.outline}`, flexShrink: 0, flex: 1, - }, - more: { - color: "#38BDF8", + }), + more: (theme) => ({ + color: theme.roles.active.outline, lineHeight: 0, flexShrink: 0, flex: 1, @@ -69,5 +69,5 @@ const styles = { "& svg": { fontSize: moreIconSize, }, - }, + }), } satisfies Record>; diff --git a/site/src/modules/workspaces/WorkspaceTiming/ResourcesChart.tsx b/site/src/modules/workspaces/WorkspaceTiming/ResourcesChart.tsx index b1c69b6d1baf7..3f1f7d761e748 100644 --- a/site/src/modules/workspaces/WorkspaceTiming/ResourcesChart.tsx +++ b/site/src/modules/workspaces/WorkspaceTiming/ResourcesChart.tsx @@ -32,33 +32,6 @@ import { } from "./Chart/utils"; import type { StageCategory } from "./StagesChart"; -const legendsByAction: Record = { - "state refresh": { - label: "state refresh", - }, - create: { - label: "create", - colors: { - fill: "#022C22", - stroke: "#BBF7D0", - }, - }, - delete: { - label: "delete", - colors: { - fill: "#422006", - stroke: "#FDBA74", - }, - }, - read: { - label: "read", - colors: { - fill: "#082F49", - stroke: "#38BDF8", - }, - }, -}; - type ResourceTiming = { name: string; source: string; @@ -86,6 +59,8 @@ export const ResourcesChart: FC = ({ const visibleTimings = timings.filter( (t) => !isCoderResource(t.name) && t.name.includes(filter), ); + const theme = useTheme(); + const legendsByAction = getLegendsByAction(theme); const visibleLegends = [...new Set(visibleTimings.map((t) => t.action))].map( (a) => legendsByAction[a], ); @@ -168,3 +143,32 @@ export const isCoderResource = (resource: string) => { resource.startsWith("coder_") ); }; + +function getLegendsByAction(theme: Theme): Record { + return { + "state refresh": { + label: "state refresh", + }, + create: { + label: "create", + colors: { + fill: theme.roles.success.background, + stroke: theme.roles.success.outline, + }, + }, + delete: { + label: "delete", + colors: { + fill: theme.roles.warning.background, + stroke: theme.roles.warning.outline, + }, + }, + read: { + label: "read", + colors: { + fill: theme.roles.active.background, + stroke: theme.roles.active.outline, + }, + }, + }; +} diff --git a/site/src/modules/workspaces/WorkspaceTiming/ScriptsChart.tsx b/site/src/modules/workspaces/WorkspaceTiming/ScriptsChart.tsx index 5dfc57e51098f..64d97bff7cfdb 100644 --- a/site/src/modules/workspaces/WorkspaceTiming/ScriptsChart.tsx +++ b/site/src/modules/workspaces/WorkspaceTiming/ScriptsChart.tsx @@ -1,3 +1,4 @@ +import { type Theme, useTheme } from "@emotion/react"; import { type FC, useState } from "react"; import { Bar } from "./Chart/Bar"; import { @@ -28,30 +29,6 @@ import { } from "./Chart/utils"; import type { StageCategory } from "./StagesChart"; -const legendsByStatus: Record = { - ok: { - label: "success", - colors: { - fill: "#022C22", - stroke: "#BBF7D0", - }, - }, - exit_failure: { - label: "failure", - colors: { - fill: "#450A0A", - stroke: "#F87171", - }, - }, - timeout: { - label: "timed out", - colors: { - fill: "#422006", - stroke: "#FDBA74", - }, - }, -}; - type ScriptTiming = { name: string; status: string; @@ -77,6 +54,8 @@ export const ScriptsChart: FC = ({ const [ticks, scale] = makeTicks(totalTime); const [filter, setFilter] = useState(""); const visibleTimings = timings.filter((t) => t.name.includes(filter)); + const theme = useTheme(); + const legendsByStatus = getLegendsByStatus(theme); const visibleLegends = [...new Set(visibleTimings.map((t) => t.status))].map( (s) => legendsByStatus[s], ); @@ -151,3 +130,29 @@ export const ScriptsChart: FC = ({ ); }; + +function getLegendsByStatus(theme: Theme): Record { + return { + ok: { + label: "success", + colors: { + fill: theme.roles.success.background, + stroke: theme.roles.success.outline, + }, + }, + exit_failure: { + label: "failure", + colors: { + fill: theme.roles.error.background, + stroke: theme.roles.error.outline, + }, + }, + timeout: { + label: "timed out", + colors: { + fill: theme.roles.warning.background, + stroke: theme.roles.warning.outline, + }, + }, + }; +} diff --git a/site/src/modules/workspaces/WorkspaceTiming/WorkspaceTimings.stories.tsx b/site/src/modules/workspaces/WorkspaceTiming/WorkspaceTimings.stories.tsx index b1bf487c52732..f546e271395ab 100644 --- a/site/src/modules/workspaces/WorkspaceTiming/WorkspaceTimings.stories.tsx +++ b/site/src/modules/workspaces/WorkspaceTiming/WorkspaceTimings.stories.tsx @@ -1,5 +1,6 @@ import type { Meta, StoryObj } from "@storybook/react"; import { expect, userEvent, waitFor, within } from "@storybook/test"; +import { chromatic } from "testHelpers/chromatic"; import { WorkspaceTimings } from "./WorkspaceTimings"; import { WorkspaceTimingsResponse } from "./storybookData"; @@ -11,6 +12,9 @@ const meta: Meta = { provisionerTimings: WorkspaceTimingsResponse.provisioner_timings, agentScriptTimings: WorkspaceTimingsResponse.agent_script_timings, }, + parameters: { + chromatic, + }, }; export default meta; From 097fdaffe2fe2309437d34882e0a5f9d77236dff Mon Sep 17 00:00:00 2001 From: Jon Ayers Date: Tue, 29 Oct 2024 16:31:46 +0000 Subject: [PATCH 31/42] chore: add quota to source-controlled template (#15271) --- dogfood/contents/main.tf | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/dogfood/contents/main.tf b/dogfood/contents/main.tf index 5b026a46f934b..c2709e0faf6c1 100644 --- a/dogfood/contents/main.tf +++ b/dogfood/contents/main.tf @@ -275,6 +275,12 @@ resource "coder_agent" "dev" { EOT } +# Add a cost so we get some quota usage in dev.coder.com +resource "coder_metadata" "home_volume" { + resource_id = docker_volume.home_volume.id + daily_cost = 1 +} + resource "docker_volume" "home_volume" { name = "coder-${data.coder_workspace.me.id}-home" # Protect the volume from being deleted due to changes in attributes. From ceb168be95f3fdcf1fe8f9a4b13d195d67d442cf Mon Sep 17 00:00:00 2001 From: Colin Adler Date: Tue, 29 Oct 2024 12:57:13 -0500 Subject: [PATCH 32/42] fix(flake.nix): remove `preBuild` to fix building on Linux (#15259) --- flake.nix | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/flake.nix b/flake.nix index e943ecc99df48..1473db147ce84 100644 --- a/flake.nix +++ b/flake.nix @@ -44,7 +44,7 @@ name = "protoc-gen-go"; owner = "protocolbuffers"; repo = "protobuf-go"; - rev = "v1.30.0"; + rev = "v1.30.0"; src = pkgs.fetchFromGitHub { owner = "protocolbuffers"; repo = "protobuf-go"; @@ -54,11 +54,6 @@ }; subPackages = [ "cmd/protoc-gen-go" ]; vendorHash = null; - proxyVendor = true; - preBuild = '' - export GOPROXY=https://proxy.golang.org,direct - go mod download - ''; }; # The minimal set of packages to build Coder. @@ -172,7 +167,7 @@ ''; }; packages = { - proto_gen_go = proto_gen_go_1_30; + proto_gen_go = proto_gen_go_1_30; all = pkgs.buildEnv { name = "all-packages"; paths = devShellPackages; From d83f4eb076c6d1ad67cf6cbe0cd7b11df2f8aecf Mon Sep 17 00:00:00 2001 From: Garrett Delfosse Date: Tue, 29 Oct 2024 14:08:24 -0400 Subject: [PATCH 33/42] fix: stop activity bump if no tracked sessions (#15237) Part of https://github.com/coder/coder/issues/15176 I originally kept this the same because I wanted to be conservative about when we start dropping activity, but this is proving to be a problem when using `coder ssh` with `--usage-app=disabled`. Because the workspace agent still counts this as a connection (I think it still should so it's counted somewhere) but not as a SSH / IDE session. This leads to background ssh tasks that want to be untracked still continuing to bump activity when it shouldn't. This makes it so we have to have an explicit session to bump activity. --- coderd/workspacestats/reporter.go | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/coderd/workspacestats/reporter.go b/coderd/workspacestats/reporter.go index 6bb1b2dea4028..e59a9f15d5e95 100644 --- a/coderd/workspacestats/reporter.go +++ b/coderd/workspacestats/reporter.go @@ -117,6 +117,7 @@ func (r *Reporter) ReportAppStats(ctx context.Context, stats []workspaceapps.Sta return nil } +// nolint:revive // usage is a control flag while we have the experiment func (r *Reporter) ReportAgentStats(ctx context.Context, now time.Time, workspace database.Workspace, workspaceAgent database.WorkspaceAgent, templateName string, stats *agentproto.Stats, usage bool) error { // update agent stats r.opts.StatsBatcher.Add(now, workspaceAgent.ID, workspace.TemplateID, workspace.OwnerID, workspace.ID, stats, usage) @@ -136,8 +137,13 @@ func (r *Reporter) ReportAgentStats(ctx context.Context, now time.Time, workspac }, stats.Metrics) } - // if no active connections we do not bump activity - if stats.ConnectionCount == 0 { + // workspace activity: if no sessions we do not bump activity + if usage && stats.SessionCountVscode == 0 && stats.SessionCountJetbrains == 0 && stats.SessionCountReconnectingPty == 0 && stats.SessionCountSsh == 0 { + return nil + } + + // legacy stats: if no active connections we do not bump activity + if !usage && stats.ConnectionCount == 0 { return nil } From bbd2dd80e22ec236e68417809d21b67168958926 Mon Sep 17 00:00:00 2001 From: Kayla Washburn-Love Date: Tue, 29 Oct 2024 12:30:13 -0600 Subject: [PATCH 34/42] fix: show template name on workspace page when template display name is unset (#15262) --- .../WorkspacePage/WorkspaceTopbar.stories.tsx | 36 +++++++++++++++++++ .../pages/WorkspacePage/WorkspaceTopbar.tsx | 12 ++++--- 2 files changed, 43 insertions(+), 5 deletions(-) diff --git a/site/src/pages/WorkspacePage/WorkspaceTopbar.stories.tsx b/site/src/pages/WorkspacePage/WorkspaceTopbar.stories.tsx index ef7c72895552b..d95cfc3d60daf 100644 --- a/site/src/pages/WorkspacePage/WorkspaceTopbar.stories.tsx +++ b/site/src/pages/WorkspacePage/WorkspaceTopbar.stories.tsx @@ -320,3 +320,39 @@ export const TemplateDoesNotAllowAutostop: Story = { }, }, }; + +export const TemplateInfoPopover: Story = { + play: async ({ canvasElement, step }) => { + const canvas = within(canvasElement); + + await step("activate hover trigger", async () => { + await userEvent.hover(canvas.getByText(baseWorkspace.name)); + await waitFor(() => + expect( + canvas.getByRole("presentation", { hidden: true }), + ).toHaveTextContent(MockTemplate.display_name), + ); + }); + }, +}; + +export const TemplateInfoPopoverWithoutDisplayName: Story = { + args: { + workspace: { + ...baseWorkspace, + template_display_name: "", + }, + }, + play: async ({ canvasElement, step }) => { + const canvas = within(canvasElement); + + await step("activate hover trigger", async () => { + await userEvent.hover(canvas.getByText(baseWorkspace.name)); + await waitFor(() => + expect( + canvas.getByRole("presentation", { hidden: true }), + ).toHaveTextContent(MockTemplate.name), + ); + }); + }, +}; diff --git a/site/src/pages/WorkspacePage/WorkspaceTopbar.tsx b/site/src/pages/WorkspacePage/WorkspaceTopbar.tsx index e3be26462cc5b..7ca112befb4e5 100644 --- a/site/src/pages/WorkspacePage/WorkspaceTopbar.tsx +++ b/site/src/pages/WorkspacePage/WorkspaceTopbar.tsx @@ -160,7 +160,9 @@ export const WorkspaceTopbar: FC = ({ templateIconUrl={workspace.template_icon} rootTemplateUrl={templateLink} templateVersionName={workspace.latest_build.template_version_name} - templateVersionDisplayName={workspace.template_display_name} + templateDisplayName={ + workspace.template_display_name || workspace.template_name + } latestBuildVersionName={ workspace.latest_build.template_version_name } @@ -366,7 +368,7 @@ type WorkspaceBreadcrumbProps = Readonly<{ rootTemplateUrl: string; templateVersionName: string; latestBuildVersionName: string; - templateVersionDisplayName?: string; + templateDisplayName: string; }>; const WorkspaceBreadcrumb: FC = ({ @@ -375,7 +377,7 @@ const WorkspaceBreadcrumb: FC = ({ rootTemplateUrl, templateVersionName, latestBuildVersionName, - templateVersionDisplayName = templateVersionName, + templateDisplayName, }) => { return ( @@ -399,7 +401,7 @@ const WorkspaceBreadcrumb: FC = ({ to={rootTemplateUrl} css={{ color: "inherit" }} > - {templateVersionDisplayName} + {templateDisplayName} } subtitle={ @@ -419,7 +421,7 @@ const WorkspaceBreadcrumb: FC = ({ fitImage /> } - imgFallbackText={templateVersionDisplayName} + imgFallbackText={templateDisplayName} /> From 25738388d55ed3bfc38495752a4ea044a0e3a75c Mon Sep 17 00:00:00 2001 From: Stephen Kirby <58410745+stirby@users.noreply.github.com> Date: Tue, 29 Oct 2024 20:58:26 -0500 Subject: [PATCH 35/42] chore(docs): add documentation on custom roles (#15280) These docs were overwritten in the restructure merge. --- docs/admin/users/groups-roles.md | 24 ++++++++++++++++++ .../users/roles/assigning-custom-role.PNG | Bin 0 -> 64037 bytes .../users/roles/creating-custom-role.PNG | Bin 0 -> 84273 bytes .../images/admin/users/roles/custom-roles.PNG | Bin 0 -> 107693 bytes 4 files changed, 24 insertions(+) create mode 100644 docs/images/admin/users/roles/assigning-custom-role.PNG create mode 100644 docs/images/admin/users/roles/creating-custom-role.PNG create mode 100644 docs/images/admin/users/roles/custom-roles.PNG diff --git a/docs/admin/users/groups-roles.md b/docs/admin/users/groups-roles.md index 77dd35bf9dd89..17c0fc8b5b8b9 100644 --- a/docs/admin/users/groups-roles.md +++ b/docs/admin/users/groups-roles.md @@ -31,6 +31,30 @@ Roles determine which actions users can take within the platform. A user may have one or more roles. All users have an implicit Member role that may use personal workspaces. +## Custom Roles (Premium) (Beta) + +Starting in v2.16.0, Premium Coder deployments can configure custom roles on the +[Organization](./organizations.md) level. You can create and assign custom roles +in the dashboard under **Organizations** -> **My Organization** -> **Roles**. + +> Note: This requires a Premium license. +> [Contact your account team](https://coder.com/contact) for more details. + +![Custom roles](../../images/admin/users/roles/custom-roles.PNG) + +Clicking "Create custom role" opens a UI to select the desired permissions for a +given persona. + +![Creating a custom role](../../images/admin/users/roles/creating-custom-role.PNG) + +From there, you can assign the custom role to any user in the organization under +the **Users** settings in the dashboard. + +![Assigning a custom role](../../images/admin/users/roles/assigning-custom-role.PNG) + +Note that these permissions only apply to the scope of an +[organization](./organizations.md), not across the deployment. + ### Security notes A malicious Template Admin could write a template that executes commands on the diff --git a/docs/images/admin/users/roles/assigning-custom-role.PNG b/docs/images/admin/users/roles/assigning-custom-role.PNG new file mode 100644 index 0000000000000000000000000000000000000000..271f1bcae7781f353c1ab2db101cd508655787c0 GIT binary patch literal 64037 zcmeFYXH=8X*Dr`v6#*5c7ZC-KDpf#=A|NOry-5=g3B80CiilvLi8LvpN(m+OP5?ov z5^5446p<20=#T^m?ALhf{HM7=C*2>D0^PIZ(IeY(hIUk?vYBF5pxJW@k z!Jzfzu>l1Ibqoas<O0kUYJZQuEoUtgtrC@~uj@96G`Wogj3Hgdf?S~Zo&w@&z$r!w z^UOOXeDO2&qaEzRcJM8!xy@iFdB6L+a3TqO+IX;Rc{UyLC*VAJ1!tejQ61<1J=y(# zp-9`zHmUeKbGOj#{_}X9t&b*E2T&(@dE(%I#>emkL3bJd1acSY5f`tM8aYJ+QR^0Tw~_vh9-@k;6Q?ID-{ zyPYYyotoyxHd<`l2;94emSz3#cjmUf57PvT4qaJax}{f#)YB(Bef^SDhEUYIvp!R! zkjSqiNN^iZdN8Yu^k3!SmJgPGSEExY0v)=#uAG*FUS3_8l90bw5?dVPQHbp=gaBCe zhkW`v?&M~(Y(6z^Mx}sz;J5F!1EE$s>09=aMS8DKZ^sGSmdL)dVtQW_YndXU%NeKW zmNodid$L9wEhqL8P2oQ$9+jo0I@L$jhGIHf-9+7nq36^*fZHa!z=b%mU+H%4Y-)D~wQXz{pUed&c>d1H@85#(_Q^?To8=h> zUUH;QOe^`RoSR@{>+9f*Rqwi~*B#x*5xnJ^11n`a#vb z%`9{OWG=v4e=N=?NZMHaVdglX(GU!N&k#kcQNP&=HUC;61q2A?{@W6Ti*y?1&f26K zFK6g95hy28o4AIqA2_#GPVC+bVa;kn_Fz71Z2Vdc-CLyyZ}sSioEYIG34kcegg9Rnu7?_Od#; zO=)whI~izFWL+yC9ETgHV};mPOTNX&VSi+#ohZf4&Ze}7Soe;mC4y>$I`0=eGs{0N z5+9wNq`GqMh03}Lr7n!9)z`6X&J~T6#mG_!41CvyCU6sLIoJ6iiQ%&;bnm0i@X~mUGCOU!gQq9)O z7Yg2uZ!^V^*CrM8Xyv;;tn%3pG)5r%`6~x7Q=A6AP|{0z9-bDF%QvM?nLlJucleU| z1Bp5yVVARML%u}H#z7dDI6b^Np2(kv`@55#_b0`5^a_zreV$w&YIQ2T20Y||N)w!J zNL*rP)A_)tSv@!L)slzrwq(ZzMaI)($JPn)G6q!J%JU!o?aDnEE9Qzg4_n^^I}j`9 z2Jn@QJGj`V)nN$irL5HQ2&78hNuf$V9_{1zKdssQ6B}YBDbR6f zR(OJ*XX1fIukdev*cJ9dht=`vX~NEQwv*l7H+UQDwHQ$6?GYjEp!!r8I~XOGhoR^X zlDW_ItLk)SOOifd^a!f{eEd0V$UsKXDJ+mTEa|C^br!fF#(&KBPbEX$Q}{yi$=Nod zNYX73lxS|U*^_|Uil+++@~dS|^O*$F-1mBa9QFF=&EV0MKUdifHl*$})9J6YEgZie z_<3_}UyNVEJMQ~trkDUO(U&`oQ|XBD->xjVMx)VCvN_W0*%KvuaH%Q?qz7+(6c{N~ zMa_Hyg>|fq`cR9GK+3kNzh1vDA=x*To<6UlMO1glnq5{Ly83rc1v-$6G46znBL!he zkqgiK*6Y%ozW!m(a(MqNFS9{OBmdEBB`xcW=bygx3OKOE@U~H@~rQiBq6Kt1b)B{?#cH)DC{sWN(eoc-!Cq6 zs%QP_sjct!$CiZ1d9dqiaKg&*aDrcds$WW1%(NceLYGrXol=lf^y+3Bn%FL!&d z!`$E5Ch-)AN$i9yfj;zB3Jy^Ses{`d4e2twQTZfILhOco&}`-z0bzM_$-&c3k59y-_>P-rrgK!xUFm15v2=i8WYuA3tFoOG@Cj5+tp=*^xwa!xL~W zAO_f=Xi{!j^sjSCf!=`L6S0xPF=X2K!W7IqQ&D7X!ns`wQbU$P($gPhB$|6ME+pkP z{hA0^oXO_y>4PKqU-|H6w9trFBQ()QQ`c|3&*lv?+GW~jf3I0cEL?w?j`afKX2^2w zlAP~ggPief^$^d807LV&@R6krQeM~AKUz6zJJ~LNNIz0|S?!|NMwOYtTz9yrE?_`N z1n1gI<-gaup4!nHHNLKr(L+7Y%Kj>a-#Jz{_=_edU$x48=%jx5pd+=0Kw$i+eIP8% zq}k5~`{G~6>e_C$zPpPH{rfj9Cm*tlAGS}5oQCVGwlvWt5%l+D<*RFamlx}|n!+Dl z%kJpy|58WRi-*8H8z1wI|ZUnx?JNww;%K21F?+3iL0~$9%d%)UooZwVTr?>;7?YL(gXUYt*~ShPd0M zU>k7ohZ?yblfk-h{C)3$coW5S_4e!KuitHw-pKyqE}9Ps7U(>~wqzu$@%M+~yykxp z%NvRRtw`!YFB<#e z${R>XsyBJIM6MR%gi`N@x}V-oslU)tf3knqhwfO*;+N_h_CFIs1PGH8P7c}0A+JL5 zfBVw^NFSr%zfn-{rnu!vlVes1N|;Gpn_V;wy~m}q_3ytbE>S=QTI7}*iV_&Ss;z}X9q+WQN!bT*kgI8by2{Kp`G8M$FCt5w;uC+1 z+@#W76(0_Bz3ZIVMboH11IkiC?Bot}zTLSOo^ZT0yG*|=O!ekZ$Ab`f}#)Ec)rv$@Y!bCHstqVetd7;RZ)Me_e7GK!+`q_IMW>zgOT zO5bj@)|Ba`W!oN0hB`iMNJfEm!Dx59Pz&u9w!cffALE9+4)PnF0B$zU z>n^CTqd04di6eI-R(KKGQVMBS#7&`gut~-U0VRb0zLAdC6xUQ~dCBZ@GR=S`vE+SF5;coY{`e~^lUHwCz^|dComwesEYRGUuigKSVv&M$LQ!G zh=v|vW8E<#Emj;+=E(gURH{R0F&#c4j9IZjK_f+kH^bsVR&ob9)Wz4znMj?eNQo2}NW;1Cx z6kXqKaLbw-cIzO%4R;j3uw=+y;_9jG<4}10E~@YsVt-pIvtEpa?SNX>7}V^>=yuf% zIu%$_b4(e#iP8~OyoB6m*Cp92K)zVEx27|iU%p@XPzlk5qSi^Pc_9^5dnrmIoZw_$ z0r_YT+|2n7(E6(=8qdez%Hoy;Bm>v|35FQgN$A+S z3(P)oppzpS`G+q;q52}I>CTE|Vbdnud#3g|rO*~JZbd)80WYA{UFo~8%^W`6m8iB> z&;92l{H~Q$c!Vl-&@8?P!2OODYrk%rFdrF#sZ+AHvJiv-Z<*w2C z8(T8sTMz^Ys(7l8^ZxGE`gygQD!&9P0hfo`xdAgwt>tw^7fR?-Tqf^{BHuLHrg=q( zdxV7(LO@Tg1jzF0pO^~#a9g5Ufh8(HN$cuWj#4W{%)=k%^*kab4TeQx@@NpQ%s+vZ z&HTfBvL*mg63cmc#%HLb7>IP~P7(c9soF*Fd`MWwL|tKtnedLF_1ZWvllbE{SMXGh z=x?7J8OduMpEXQ@F$hoKZeJIyMSTXB(g-H>_(8Nf)rd|vsq){xuqU;O6tgP@J=1NP zQ&418bl*&0!f-4R)6rfQ#g4F~R-Y}T`3J{=#>1mW`JLyZpXn1MBv~$AcgVVFmgI(H~#m;nH6_IL4ByGNlMaY+dk1`K?bzc*dMiD^6o&=l(VPT?J4-1(lWy&8zSNhy-jhil0d$9Q3Y z!NG#jXDaO~7m~%OG&QmykXmB~ummlYn{bnomS`@IA(F^iL)1a{mR!uIuP&=5MJMI6 zlK(W>J!*{uTc%x>;KziMM&c~rMpqcRetpJ~p7oJnU_Flzi^jz8EqY(qm9Kb-?S3`$ z-p{7C4!Q4XuBWEs1X8v-1>EZ;6`3kTB&q4$HpvG7PpGPYBp6ZeQ|HrW_I0V}ZO*X5 z_Vxw$crNi_phJZ>)jG$W0KaB`PS9I5Fl zT2mO+7erYwWp7iBR|05e%w-fx7B^+HMm zZ`9S#B4TXZH5+aR2*3RCuNSzHjWR#DQa-}<{1=`3Zo8XiukGez@vqWk-86JR%;S%$ z`o6e<8De)R3zi2h8|}KkX)!ed%cpUWmrE>?i)3urhJk}qYnUvZcBv+ngNPdoVm()_ z17QBHA2PV(>pav7T;DpiODz6J0MEcH90!o@;9mP(d(p zM%VTlPDV1dd70267c-pU(kF0QA7Z8_P@$|16()}+QQNxc^xC2zj-XK+O|9V0lr^nY zFo5LDsrqhbM|f2Y%DAOLExBY|fpQp3XD*X+dzW6{H2zsyvrd;oM${~9*#YQ2A{YF` z;&oG+j9SQox8Q7BQ?<{f@1KQ=9$;jS|5`d7KQFT_CBj%{-d>^?e?TxP8#bLnFOo;; zjuJP|vE@rdwRBJX=wo?3QE$={l^Xbu66_$W76zxCsmkMtRt1(4FrtVZSjP@2m^)3Pj zMSZL1U_O=hbw2ezL~Q!qCKVvm~dw;UJl2#>Cox)>xT1_$u;uz4vur47iEe zTC=~jwes?R!O&0oi2^z^JW|MrpB2+)U@|&fes{kr1@k^qHjrC0)SMG(VmFv=k8wKc zzc`TG*qwWk#KBG<=c>!UEPB^UJJi;wd3*9dke2d|@Q03KhlLul>3;?{sVPh^hwJxs z|DPjj^^X@~z9ed5k(M4r(SJDKuVP1-UVX)L{m;KH^j}iuD~HgQK8n2C=1B3JYeK1i z!#LD2_dGY$D%g*#&&d9aXSU;oI$y3{O(&)NPgVfgP7+iSsCY<7n{>jMo>{FgT*y_8 z3bcWETrt3k;&fyK!3M5C&kapL!wEDefJR4uHNO5_%yORV-v#sCy_ z(=zH$kFszO>lv+X33-JlRJH#>4>M(m{}4#K{eaT*_M}qJhLRssHAp`c#1eVe6xzzE zYF0ruj(&uc?N<%`Q064Xtksamtl%M!=qY+Pf!s&nqcy%kvJq5K#Utq;P4VK2-%u4O zekNi^%4wuv;<6w;zpJDph>G2{m$xogC z4;DT9$Pjbas5nm}Hoe5E1sO|GBohqPSImp4Fa#m zLieCC#NfJdnc3ulxpA`a++V*og?8h`>mqdLeQ(|iZ92DO@`o~ic@SN!3#KhGtNOGV zcK>guIg2%nw^EqQbbqJv{vOpfvUTXQn`vfP(f{XAeZj&pGIq!Bwtz%+)kiO{*j4j? z<=$p`{PZzj$L`3Uy<(c{zH@lo^N*m~;vTyE?GMhKkT;uyhg#|L+%KD9Z2tz^tNvIi zDz`Pj{JVl|m4X$CFE?)fee-+3o9aJp#tWW$%$?r&TTCJHw{rY{LUBm})EZTMXQmwa zbDJkz-}OJCIn|Gvw%0iSLKw15`{xSV&;N?v{omB&qOlIbV^&v`6BR91iZBwjtTj3Y zT*00DT~b`M&jLTbn=}W*eW7%dF1HygBD(+{5<<#*&d0Vi%sEaopQHFmcGQ#SdeK;K za0Z{ps3fi2?7EmwX6Kt_zmH~26sA7|%2gJpD{ih^6!FC#V@Vfkt6Z%0va9pPiuDZ{ z*glt4*$V5n#z#6?aYRn=9CNhFMmB}EB4z%>Wwz^g z$;3c?vYn`Lc&*EnAE%ZI7#M>qx#C>wc4q`elbK>H>RoZlixYa5P~%c-c=NY!V*2=V zH&mu{K%)#F2+)kOtic7NauY*5*J&}q!gKFOFc3evY|(YhOmRNurMj(A@yiP_Sja+e zfkpj+*7^+oo2+P7SzO&CtmVOe@>DJUji1|0p?V@oNV%viyRZyo6sT@ zqkO)W6ur@56Jx;Ha~SJ7!$giCocE?W$bZduL!AmC?ew}_kF%~BF#X4Estxz1-op%{ zrPO)qZB>4ts^dMti)+X+NM{)18hY)yVCg3Ablk zyIU5^j+O9@&_BAEtN^3Z4F(Zn@RokLjfAxl-5~0a`+cpWg{gl?-LjgD{edn-Aj(>G_0H^)yPQDM%iZJRoNJ!vK=djD1IMD+Yc7!_F>)hJ?#W`ILmFR3v7M% z`~Aky38jmn?@@^5m!Q0yctowiN6ce@tBp2yz*dIsXJL2qV!PqhGXq(n1~`l;BCm(B z2@U0C4?gdFU*LJ0Q^vAxR$&t-I05KqCX>e8_MWkj!GWhI4No;B7Fo zXk`2^lC?CJ55uz@e5&BN^`@T8lL3R~5({F%lp|4ts5qHFRm<-=_hNYl-Avt&iofJ6 zyauxSZQbmTI>&v!zUpV2o<$(hmUCL=T+Z{$XJ@SV`I4LjHium?GbsL3rNHtD zLf%t+XG3p3Tz}{%oa$FN1IL#!Do9~eS-maD+4g=od#JQ8Ek?6vvhyI=D;h zuxFAbykTyJ^LV$?WevaQrRbH*cBU#pc%=DwNY5^wxRar25WH2xi-Y)~#(hWuKhQ*^ zX~(R8; zN@5-~ef!;5cO0;ZC|WbUOcQ97?`pXzzNm+mm_Fc`ZC88d5f>BuyXrl6Mk69L25-o*{8u{75?#BYZ!UzpN_0i9$FIb8XU<- z{`np9fmAy?kl9_YkC9XgwhPN(@r!gwny5^gwr^bCEee(mi_gQiH_5o|;@tq=?I1S1 z88&K^FuXAM@k%2rO;;AzchYq8y(YItV&3mVSor+4`B$Y*o}>;gF4>{?43nN%-e3c% zRzB9ittkC~0W$3T(K)dxyVhw*`zH7TqYzs1o>+6)l!IF}X}YZAh5AT%!l{2XKnOj( z86xG{|7PG)%+1a3XZz;&f95*n+;4*6-Kuo&D=efHZKztFXYNBX`FYgubfmuRV0{nR zNIj2vs%lvQ&9_iO^gAbZoL7CQKC1};^j-v=4>T}*OU&wk-SIvDE`L2(&TLA#99+XS zr7_qMkt9dWUu;nxWS3ZNQp%eub*;hM9`o=QrBD0knfFS+-!Z0DHXH_c5VoTX-zi@{ zr+TtCc9Zkuy234yI})pSK2aR6SsY``Te~JX-(T0c;&6Dzi?kXU2HsuZ{XV37fUN_u zbDHFsDd6UZRJv!w9f4~Mqg4I=SLGCcM(}PIGu}U4;SF#t=*DlBq;S)cMHo07i0h_q ze@5N@^+A=lBDjPhte zdi?~fKaj#})A&1RgpacjSr7S>K^*(0DPpvRQ#LCN1Angv#-&O5Cujn4DDliC0STQr4md>DswYR zNfSEe*We%c*!G%h7ue8!N7jE1T?9PO#d{_a$qr$%w!Z5tgAj9!@S}O3M#F`GA`%f| zixnn_<{`#sVmO3+fa*Zh@`+=S1T`n*99r4hJth;EI!g##|p?;_7Jo`@r#$hX~ z;Per61|5Gj1Lj0-TYp`N%^(b=z%Z1SR0oL?Y1mS1IOvIKO@iO zzZ&BVG?_9e^_=^y%_s4R;h!0fh#9GdovEhZI<6FN^3?vbL90;UjHc+(2ja;~B*|gZOWeM-#S(bAmFVLzZYl`cyW*|&6x`BvXpgqui?gbbv zZTvpvf;Qkv1u~YiSp89mOmu0!g|RRscv7;B^lzFYe+9lTjZuY}E>X6$5J>z{>Xeuc zuqL|KmDyO8b0Cdsf)L{^YQk>lR&6(n9}wq6S^z@iVR^c=0a^e*F4#(o~xVBkcUf@$$aRe!8X3~QRxWs9K#_zjc%spOfQV*T;zviTU--yG7_|+Yc7ga9^uN&+o~B-X$Jjm+vPv;XWUd@XI_51c*%l<4VlBu&{A- z$UwfR?x1&^=+h#W!&KJ#jyiJ6m$Ks-`JRvdgIVWij1T-|oK3atHG5p}dxUg}Hpt;^ zl>RQQ@p$VipbW%HJr-bM(DN+?i-grg&7`hj=PKf9XUA;ZGI}6dT^xYly6-Ge5C5sXBhUf&t1n-{dm?iMjUz>i_d9DBkG9jBGuL<+M4G+d9L+cD@-8b|LLXrx~^x)VZenqjF$3KLZ zau>7}GazN+nS|MAh?Ay|BRvwk#iJ@-ar1*p@{U=mcdqhu{@{jtQstM8-p;j!#zS*R=8cN^-v6{Au?qgn)K@+AD z@2>e0qp*qpnceUTi3ooB=jqbU5Jv8xV&^fF5=%K1x3u)Dqgh-Mxa|7MGhNzYO+4Nq9$qEJqq-|z2aL)IM0*t8-kYR z<_SF;_*QJu@g{Bjsf!{H#{WI~71_JH=H)B9$QFV6VadwEc6(F58O_{pZVl*ZdR*js zfb(?&HZX}PqMh2S?hVhXId3S#mX562ZEce(byg?C3DwF9r7=Tq-4s#d^u=$4`ErTA z4Vt>6KRj$|&gomPwXPklJy;JuhN_7G#*1-;4y`AzO= zcu34@yj6=CQAXxnsi^^?x}n+UIs$5>>*yg~D%mhotnHs{9KmsYyiHx{wsgE9$-Nxm zqbiJN|C*C^%vn75n;KDW`jhx|ry@HpNhg>fFL*a-O%zKL zC!~DWc$@-vE;w3*(7%7%qi{c%0nc9i)3!L!+7d&sIn-55HX^~pU42GckO6i2g?8ootjhrvRdLs3uBM$w}k zm%uCgpfrR8FEK97@~AWzog}jY4cux`CM$y6e`Zs!d;7Wx-c8>ydegZibp39zS2TM3 zn~piq&D8u@aonVarOvqBk3lBdBag>Q5jXpg3XyMVdRx}HtiJB9i}ofm$oHj3IcLhZ zrjT@(7Y0;ye#q3O1C1HqE_{i}ydMyN?saZRnDTRmWD#o(<{s)GNLk%jsT_z!J+xGk zCT*yf{-*caxKM>Zj86yF59~WTX??a_Z^ZPje!O<&K6O_@bPB2dN=2KY9N^TB-WPtC zey6vE)_4w+l@6Q@1Nd+>4HWtdg^`+1Es`U0-}$&v^-v z7l(cfw@aGq=ymQ1l9X4GipTI|yMSzO?)*ya5rWS+73vmPoKJ!gpHD$@8_*GEi zi-&@@UGTas8qVQ7T7Is%OR86@vt(JaCWb}ZJqx~7@7C?MWoBt0Yq~qHa+WNy7ACRy zo%nIDIKTTN=3aK3(b{tmw_=mUZGAf*!Ws6{O*k&5DC)L#oAvMi_ojo{C1 zJ(L!znAH)67?w%`5NnKeC4ns*$YwP!23&C{Oo2niudF+kqZIC@2(j;?<(O&>kE;L| z#51XUYv&1klD%eCJm-;-+aUXGr72KtycP>?UX8LeD#vnTrwTq=NS9W~a$g+}QFXzu zAk5aM#)p4j;YO6|2_wp_Mu-zT{69In7x|r567yH6@)xJ=tS2WoB!n`e1$}w-dw%na zOl%_sd71XkABq^)c*LRpm_}fZfVLZ1dks>Qc3KX$*G4t;X*F0|`8?p;fv|`d7rx6X zD?pDu6jjRCF9=Ti$ooh%h)BE>smHN4NE@epKYr!pi2aqZ+JAnXk9NOtvu$Lm-YuRq zR`HYem&wJ`cAeH}X=$wv;17V3hbQ*}OS zy+3|1TC|_=5uH{JkF=0TBTS3>k6}-nK6pEZ@V>n4QbAE+^Qj-&FAc)Uev|R{C+t{J zg>Lpq(?BS()JOE@8QRt7t8R);M>HCPu`|#V|GB)WC8%kErN^+ST$o|QtZ&5K8DJ6C zxZP3*i_{&5TSnBT3lVUl5j$lUcPZX?!?84t zSJl8}$IbGW0-~2IgA)oFzBjA$u+c<(rqAXY3Oo*3bU>tXv)x`~tU-J3$`+tm8^uTK z5cE#1};AV#81Gc&kim&wXLz3S~LJ>ir%~1G*9dhCIVe z(NMPj2L9^zRujX8EFxpK58di0`e*z-Dc#zK=nA6n6R(kF$>PE_3WkNhkhyB&%_7p+ zk$gQP#RErXRdl9usGYX69mWIoee-a?>O!^~BX9fGx7uo)9qQ+1(76LgyOD~pV;mO? zW7;tAEQcoA`vf+0a>v+G^inf<*X;A8922rK!&-Ev&2YzOZu_`*p&ZJ5=If7;Kk@9j zNgse!cUA}#+Hrcy2ajh@MWlEBn`0#=O>es|t>m0&#R3Q9PShHvy=&Z?@w780%dhI3 z=U(_Y=^01vI07-;4MJ5LGwBn}^>Kr19qD^Y(^|_%^b96RoX8@x8Htf(gV*5?`J z9q=P6e$q?@yzaw40a~mR9iY?L9QF8I45 z*bioB%UYeh?G4DGGiamY#~g|%&4+qL1NbomUlD!Wgr(3cid>BXtmt@z|3D4cXHQ^} z5F|Dk2vW*x<$Tw+I;Z#8ml9u+GyFFm$hkc(WQ7ZDkR6n2i{evZwnbA95deYh!?#qX7(-N<-wQ-lpg|g#_6PY9^ieKm z{jb1B`>kz9lZTZGD9RSGXK!NPR(h^1rP}yK@R3%_iO95x_um3{&d02Qo|A8h>?cQh zwu`?DmGdl92Cox*b+_Bl_uU*V!M6Jjr^}d7Iv=tF2i|@RCnl&46$TV=4V@=3QswNC z^YB=%-Mdxp%P(>FF%`)VS>3d-*RcXJs_Nv{er=*z2QW1NA0o!9t<>tCy5`XWT}s@4 zTGph8zxrn;)Ve+zO`zfBa@cdr?Jbs99J@U~1^m4{xO^+Y*y!Y$btxO0_QC z&j;HoN;$d_2KfSY*Hed8y+k=mv`VJ{6D81e0fgaztT6)S@wOT7kylub-RD`5=~;JI zdK99D*%7+>k>i0~CavH>@{(!mVVuZo&Pb7N97-%O)8I?mi<;UNGuvw%EbR0jFJ5Iy z3oQ-JBO65ohUa2{Bi!NMsWR%L&^!*!b!3J)#plW*Y@kX-r)A?sg>(aim0pk2GG@ir z=J&p9zu37mm7k4S?>{H=bAJ5P45NHxR>3=2A#qY84;)ZG zq49G-s+aSu7tOz3YZ8+^I3J^YpF{FmNc`wJLq(Px714%#ljNjIsAG1PBRCoWZ|>Nt zmm*OP1k!21y&_wu7hd`Iw$-@1uP9J768`zoFYABO*fYIu>5>%@p z*$rbziW@|fYl38SUVp2v+Zc_O0p4#mxJ@5J7S4Rmv)*>^)R#{LPdP{#BW*;d)kIVXWtx2u&caBuF8@5>Cui4 z*fWlwj0az#zC+*70WT&ejhCx>XzbF*+#C#B_fR;68@4%WN@lto9LR%%gKHQIk97{Q zPeV?&U8btGy$V9tUUbw96_+D)UR0n?y{xm_1C26F9zYW}!yvotnpA2tCPk|QySkI4 ziq#2!Uer;)J=kvuH$b!*iMyTwT;wjA3H#>k)*HIp1&y}iZO73rrpjF!d`GKE04+98 zEM8$VkDsr!Rti^nRdBz)ErIi@I-SPJy2X=4Jjalt#HTfVSEcV=bD!xcZlC)X86W8e z*3`L>E{dj0>C51Z^bJy%IWni!Vl!l(XMCfsbDMkYW+7{MG~e}A`~R;E0_IaO|@~&>53Bi!p#liKGE-2e^_Ab@-!{WdB)X+H*wHI zhLV*O$+^*3IjahfSzj=;{UD>cYE2G^1Iw+XD7GzuVkkbR_I9io*o`c~trn{qB7Q1_ zB}6_rJzC<+u2u^=e!2>afNwsXLt;)3pQFKeHJgJMr$@&p5Wi{Yz=V6H17 z4L4Q>REMS3_KQY)#GTDNTl$mbaH3Ax9&U^km=rf%)WML=*imM)J#eRDGz~WEeKG;8 z!Y%mEym?x6V`a4{8_w>>(mxxw9H~C5P|6ANOwzRVjfW+51zl8!bC2t3D)%x3Uo3b( zqUPM+X2g{LPH9hqQd@rZ3oSYL_{1HC_j3ceHf2q;S&Z>;K{)D4i!C<^+SqZUK?_+U zV%z$7jVYas%jlDGz%=9_gu}%;zA(toqVv5=F-%qhO9v@RRI3Lh~I3b9|=90M*gv1Wpb zMJ*?4OaxC{2g2R*It6jCYp!Up;OZ%5A^`NwmPB@sJ>SuY)s8bu)l=tWyA7CFWKUQ2 ziOPC*YG6~SvUH*xva_Y?k#mDSTKSZi8Wa+;&!w45z9LP`Y;X-9k>@5SO1{0(p7l%~ ztwF0X1Rh|hdT`1(qjz$dZQk2#i<#r{LDidHY!uwl;U#@6S_)+_lkJBV&j$YW2Fj$e-tX?6IM&m>& zjf<5&=Hl-XCatIP*cTb%c;rUah$EkviiGxfRXo_Oo!NRww5DTaA+@u!bm04E=@)Um zrmHqFh`L#gT-;00&(K-&-3K+x5J|O3dGQ5L2QVU`scDz~kVZpJ@YhIH4WEZrbnt-r z%7ADad|U{$t?`&Cf6s_M&AUNGHnHK`xdL_%tq8JD>~V{q@;Cmuz(;5v%K{4^0J8ap z2QUe3vZ(VY1Zfh6b4WqX&xY#Lr7H8SExfZsYXaZShP^C=KTuE)3wxDzQsoi8H^zVX z=V#B}wqZ2>XTp2@CfOnl3u`nC3)$8$Shpy1N{vM^vBscQHJ62;`o_l1Et&l@A<>*y zjUk4*$$(_zr78d7m$8z6_0z{o;Wuqcc`ZzaJ65U%V9<6GP36{{-RvXnrxCEgsb3&- zj}X3NDbZWMC-J%P(`?@prV5&<rHb9 zu=AAp!ZNG)O8n5u@5A|doMF~Tn)ncI+9#DU`dKIyzyDYB-eFn)VsyI-;vQ!k3p-nE zfL6AGqhJvYNOtl_lO5yhs$TgZ`|BP@YFAXc`Q;?mQkQf3D+*}YYFOt>@w^WQ>Ks~q z+U)FW;3N;ehe$^^rkq=UM4ic3=^|I=-?QK%F~rxHIDCm^OUmIK+Oj+`9in4E&%i0A z+SN@QZ;r#`7w8UG5x|ALc#H}5s3L3Rl!(LND=NtLP}vwSV;RcDav3mJt#vq?*Ec2OUkey-duohLGCcsuLEInRMgYw85!n0K6w_o5 z|9u{QipAS0my2`Amm^^&b0cHcR;S4%(~4bkDlHj>>xCV7|7d+Ed!c@BRd}|PV1opm z3W7;lL_+NYN`6qtsY^DlRZe7(usxNlI$=Z|K9U?%-j>b3*^(-G6qXyV`+DYxRwJ3Y zj-Z*14fZTfJ2@_ZuQAaYkh2@@_fDh^cjn+2cCI+WlHw4xKyTc+3Iv^)W@wP|In_SZ zf7vS1Ampu(%@$Cd6NVO1VQ5JewP%Qq6Xg<>c{TbxHruPhFK(q&FI8_8hZ9)Q8xk8i zRm8tj4h5uzwtIcbYHF{E)73$eQ@iR$AjFvh<~^#SjWMY&_+wzAZ@|HQC0UFzx48m2 z+T`~<*1{j+7ku5=!^|3iO);MQG-uFsw*gy!QLY#;*b*FHCEdK;o#ETXAcsA~wJ$Yq zGAN z6=$_MMs~sHpb=I~?YL_tFPd7$bJgMpzV@u#)#8GFl@oxUs7+6MgPz{{Wei}FfP6*(5MTnwAzGj#)y5CiTy4?{yl ziRqS7_m_>r40mKh8!(@9FL;4-ZaD0yC+u1i4%z}|NTtjjjdjmi^mpy3>wI*BGbn|Z zhJ1gF*d%_(`A z9~;wiGm2=IkzPD`h#iMTH2b+#d>o5%p8BQFFv4!McDoBv?cE_7+oHX5`wJ zIG10z&B}-DW}INIPr0m;iE^O{YK-vs?ye-M?EmHIZ|x?suhDLo#p;vJyLFU?NSSFY zjJqr}RlUtlTgwUKhRM#JRp-H}+j~%zlJgghJPkM|ft9$2o8)xTi*)~?M8k|Y=? za%(ROnv0ft8D>8v1BavFX!zV_^9$`*4j`f}1VahCSAMHe^H|NAYH2J4Kp& z7vWLLgT{7iC9fyj47in(duoV$8&lx=h&~gX2a>kS`~38Kh*68R8y<%=U?w#nk#uw` zKd&$N9#+X8c5TpT&ppuTNr^$^2J!>3xpeqwH!1v&w_X;Xzm)ZTO`2OY zt&Clap67QPkJkp?mhmuG2RBPUZ;1nrH#&#R*|%KFSiopA%s*Nyd;bOhsJS3hiOU$t zZ3jzu^X0oha6Q?}pROEGW6)$hxgzYy3>cL&j5NeJ^!g#EcPt{FhvBpwx7iXb`b4IDfeVJzph7wwQ zlHEg`8YTU)Row%~$ILJK-^+9K)rBf;KGQ{D=e@kU>hvmHFG-0y_7{ERXr0PcVz@L| zZNvm<)Vj$d!r#AMN4=+QHAzmTWpSqj<~1QY{KiIp3#JY%uJe5ro;(VfiR1&wHqOYp zrOf$khLhd(721Q^AvW^t0r!%2kv{>=xn}BMrIuW-YP|(@6+e4YNo*k&y%NUk*z|r6 zW`HV@AvuO%;)NtKOo;$R<>*ygMYBXMB>oo^Uu8pyd600!nmCw!u{h(tYK78f?`!|MLXGDhMCYW; zOV%fs3QydE8P!C=1`G1A2spoRV+t1C@C@Myh+J=bFD05hPpxaeX-s0g=H^mX)v_Jf z*o62=dx^6I4MJ;p?#Bgr`OifuxX+|mx4E=|7OfU_K0Va>i&4|rN53->A0I#1g%7db zmp8|)++PVe=)_a+XlR<|zpP$@;AUc*3f{yE>F5WZ^c>Eloa8Swp%*feH51@AF7RLF zDdO;Qgga%B9Fs4W`bofF%)bu7j^0$4GnN`EI0hB3R~BF*jGKnm@xe^sH?b4Q8oM*g zS!TfgP&qzHaAf)ZcB{rMKUx<sJP81jQlc)+&t=uB&ych9f4`yRS}N3_4s7Gj1KW1KS}CP!jAu-wE%@ z>+k}|qd-eMDck;VSyn$PjvOD;0BbkjYx}Aw`dSW;7 z?G8F;mKdqbx7_B`wxsN&w9LmGJKQ6w)-O_}K7gqMjt_dwjz0VHjTHVf-s5IdQfW;Q ziR`gPsXzG2Ke%Fgz?68Z!{ehFTVht2G#oQ5!joD+_SQht`{yk$#;W#~CFE3EnS#`o zCY0VXcm-nkZ`VuXvE?{W+y|BmhrB6(19U)A3@|EXMO~$P+(LW3ZsFiySHE`PNI>(B zrsg3abeQh+Lz;SXfz$t@ zgn43a#`vU&%Rd3tFyASndF!bl`_536t7i{XD&^qAP3VV*Z(d|%RrP(?PaL*b4Hq0X z^Tvlg1&>Qs{$w0ll0KBCw9*}xz)kJTgCDr8Iily?5X&Drp2y-4cs!ryzBxz$WqoMj zagk^4BtUR@R5%@`mh|22&O!0{3;*Bd4<3nc4!Mx=1%E^_)6aL;k8kBf99d}nzs`XC zmm2*4Vsn~3p6{H>HAY|yU(O~vVvsp9;1xoevPcru6<72VYiNd&Jm^T82a`v*N?=TPF*0o?+4U)(>P zWWo$kA`PAd1PpxX%@;LHg-~h(4nqXmN+g{$ko4h`!#Dk7j-Gs6q?+uWB(W=qDssU@ zOf_12Kd=`z?F}o^6(R{8TF^|7+4S@t?LDWA-s`X{_O3st@Tc{@0od(pS|#IVFq5h* zK$(;O`Lio_eAN8(bP&(q!gcE4hLze;Vg+u=I}n%*Dx8#(xg*k?rWQ%?oegDOHhcuQ z*}xaM^>zTOrk%N>{A4#Z4#?5IP+u+~Ln0VK%e*ie)m}x> zfN{>9pG*%L4V(IaTLobIps1FL)Sjq)3DYmmgXtFLbtJ4Raj(dX63cOR!#yQAW*P_l zEzPmfii#<}Eg+b4{qY^(EOA;sUrSVU2XOPTu+RkmUOZuMHUxk6%IhOy-x*m)_-cQ- zV@q(0!q+BoqcXZk&^8e7yjnSvXweQ(q_!+MX{Jk+qHA55lV88KJN|drw}bE0m4E}O zC4jZa0|(2Vu0O3d!uj)2XqrHwB}juYT#l{W)6qe6{Hc7&856QsE!adP8^7#I6Q5A~ z*N8R0d4)xn=Ws5F*xE{36=gW?&T5 z3y+N^P7y8$2hpxrj?6MCcnlY7jj@661Lk@&`nmO2*dd88D=i3M0Q*UPeSQD?L-f3g|W4-S#;R20b*jRl)uhH^a<95e7B8l#0 zRX(la&BS-Zo683Fo}%v53LtC;wf{PN>FtaRY0oz>9XXmWh|=dLYGmB@3Zw2-Ps)e{ z?sc50Dzfi56HQ<-Lb;6s?V~Qjdo_(Hc$z7qNcjVUnZ4I8vZ3Jh*0d-Uz@x|%?eLhb zGe#x>;%3VtB@DaiDY->e1MxEfJe}R@!w`7fm%sVs-5X;^8I@Q3$A-Jfl|nzHF(#n3 zrom&)k}ej~avk{Ue~tFnZwnraSIGhliwFjb#x^}2M^p8jk<@-~?@fzWA$@Ep$X%H8 zw`THYf(|Kv@UfPyZ`nWE&S+U?%}$j6E?{QKk=8W!O-L^5Ox3W~`;GbQv$miM&PNUQ z-{Rx6cK*aVi-MP0@PW5RoD;w!ac$0W~nf6Yp=fQ&MQujE%vn%F<*`MB_dS5(Gl zXuI3%(JhGWwejU+uNPrTXfEq;dHc&NV$i9C$!BJs(&|>JZBb9zblirw8@1dmmlMR_-!VY$gt% zW(5_voyiF!e)mVcs&K318k$_F1;34z_Cc_ZSM+qCLwlEh{N7_^xg6*u0p)6Wk)&K5 zt>V!!z`dTRX6b`T4h)&E2j~*ji6=WvG4Xqdf5dx7E6R_yk-w9O;L2yml_D(|ezI-$ z1$~!~)oPwhJeesDL^b~zeZ6_B&}0Rq)Tv=<(-7RfuU5Khk+sZe(U-s2gDdXR2NlKG zqzxynqO!RJBysJsL2-K@@(`|^%3g#i_0f!eul2f~7x*`pbyh|8nwiDR3hqcbimJNL zEcbAdxykv?w;Ib)9%l~2p_1}B!wP%ai~CSccHf3~3krK5CwEMD?o)4Y`GXt6rEOh? zW%2hG*N40%oknL%m(nWsJ$g>ienB{>%c_so45#hHikw-I%2=tz5XltN;QpVS;dYNg zc`0)4X3jvA2br(DiG4aa%`hAPYh*{s^$8H%*{*WBrnT^+P#U!egh#l>60P5=+u91a zxE#jD|Jhc6oendj%liCv4|eLDR5HP>Qly)E_&Ex{Zh0`O{#X3ct5rPc!w(J1O|C^U zsxf={ZAdvXaOe0dXxFmL{-mJT`Pa|h_AEtdSYF(&leOq?>(6hCpiklgpJ|?PV`UFaM#@(So&-jp>SVnzT0UISUMi(6_)bt%#k6!#0h%<3*ot8HPthn;T8yl3CXFV18!>S?h(bqI!IQBKGFI`arA|zjNC+4{M zLe6WtvF^S;`8P!HYI$vz$K~XaM>)W%+V;!91(JP+%Y@^~dPc7X;Bj-C@~T4kp1qc* zrDvIRR6jzZdc}3a2&T^`-FaZ;N|z5Yw6yVH_rC=P>?-CL!-TKq;qbw(Nc@AD!e}5}>zuFnN3_&q5 z2wwntuLTZ-Cuv$kEi7%0eQ1zT5EopBbHY10J}!_}-Vzw*&Yt&Ic;1hbx29{UZajTb z7U;WOZC8y9QGf^LTAI1Bvm2QDjDJbPq82~eTXvl(YRJDc=35f2Tx!}B;|2CRe=(hU z6OgdiYG{UYxmYWVG+0^WI&d99^NGb%z1n%sRdi$9$G@e?KtlSu7=YS3x101|pK?#E zS6()1CIP{S7fFke!?N3|WZBc3U7lEvSE%7Pv~%dG18X5X_kP}vsRteKr2uy{ayI<3dO_EDZn z&XY`*?*M}FFUA02VhopDnfyI z=T$v*UnToS&yF71`GpN}{3-S4a(?@l1Gho>{P(WlJH68x!tVeB{lbLi;s#IG8{?qg zMe|Q2XEqJF0@}bF_ewk*x*_g4QBUDjiLaS*&j2L#%kT{SOe0S6qlD;z6o7jJ5{+*HW4N=lio z<=?yvU%nN%+qpiSj
;{83V6=3Tc3$zTlbw~3BvNX?_N>~_#(+LYPbqKLnhEp^`j zhkZ8IvAJiWqVEEpjlmsOmo+Soe^0UKzM=#cgtRTy?CszJ=~#9nFtR^ZQ;y|^g*}km z6jV6P!Jd3m&G`1UtYpxakZh}AOiA6;B5+Y?GU>Mjd>sHaigy^Wzh5{L<$hE0%vwUj z4gAP_!;P+()>U>nNph@(;VYW(vMlks{}t9qifkzEeC3LAqYLXSsW1Z_n4<(o&K!k6 zi)=f;eVJZx{9^rE7OBblX6wX_Q(vz6Lx!0i^c863l#EraUZHRfoMWkL1VizDl)wX< z#_&I=I){W~M3|I8C|BrI+2eAlNV6`f&zI~ajgV$J>k_ki`Tbm4!`*s$FzjHDP}R9^ ze`^c?$>h1~Il)qN66=I@GwW4qFkcgk_td2N>KJn4)S8^FPRd2U3pX>UYgF|FEJoJ9 z9#;NclWl`g@%y<&{EIBW#VEDs%{0Ghadj=sTu7aX^IM^M!d(LA{7Mo`ugE6Jo|pVO>Q#wz7)}73F=;ZzE9~u#dh9rG59Mb4W%~5+ z1{RAe@z@Lf%^;{5AbL$og)Y^k?WPT~%IAbRm3#4Q?)3*9Jpa5bzZi~_20*q44)#4i z|KPpf+N78m-Aj&R@~6t?=;JIcWE;1;Ks%7|2T!e!9oz?x)M_w~#q{om61H{A;U6VQ zW>R&K<3Oosj*-TXEom*kG}eM#iGN}FCG9Rl=ASxQAuR6MQYeQu$O}QbQ||}Ks758gkdvs z*A+rDi1wiy@o!Rd~6XuG*QN+8O_qVYPU~6UsmCEjPqxih8JTLL*n%IIyZ!8LW#LE++vqOy+5B2QX$ef!gH~$-)&u}E z)Lbe``WPniA?umVn>dRPctg!g9P1-{yD>TZqvzx?RctBqCsiKvq>Rd!e3w1|6PplG zhce)%OVeR5Lf+q!Qmy#$otN%zY=STG{uxEZH9HW2j%F@ns^KDyrtTkLubmGS0d*Uq zoTYg$h4OzPj`F_)Xv9BoemgLXc^_e-oAt<6!f{X)kMY)D?3Nr(*`M2`34T@PUo_!_ zPj@1l1Lz0;eQ*Zx5F4TiepcNY_~NMw7Phrz2Q#XOf$7$mBnZ&ayn8iajW>59o7R!X zL62-_us#7P`Jc?gFkw;XonR|ERb%v0(}U?6g3;LI69T zOv^t#m1{h0#k+p&HBf?g8Ikl#ua#)tA3?UX!P-K*%`FGg@YY!1j03!NHRyld+6-+| zlbTRH_^0Mo(OT(W>1x2=+m8?7V;X++R~{K4+(DCh5AcKyeg)KD?d|C(8XAZnpiwo2 z$`C!hRz_O-%(!Gcb+ok&ZrlWG)q$adNEc;qSyaR&0Eo6G+tn#!8pa5t;8mo5^+YE= zoiH(_xVlXOJ=j7q09U{5d4}K0%s_<@U-utDBe{V}Um0x}Esgj9Q1EHyqL0+;nvRbZ z+>Qe%=!2Jk10_76_@{)3Y@U1nUcv);d%STU^Oa)S>g z%)d3B{wAF=4e;7U<%3s5k9sIrTVN-IH~64Mc@JJ;Kho4cD7-{%I!wLMdj8;*Ge;ve zbMPHx`V^q~4gLk-lYkKF|8%i~1$fx?-hpytb%xqLKZn(^gBVnyr}KgtG{gc4aB9dL(58lHT7H+b>e)_7#T-2mcEBk6K?` zZ7Nvlo?6&7{}j@fC=|PFvA>hlxb=%s>dD%WN#=UK0`(~y0Qt_eOr6CnsIZIOd<^Jn zEGDS?tNVMUjg*Pi?V(8YDugUs)s?Y3mrq#tUxmI*($UeGm^Z3eSlV8+=#A4L=skbl z2BhOK<@%4l%iKDte9AJMvb{l}OI{&kt%*Xo1Xdwg-7F+htm@+Z_R68n^cH1+ipCm|Git^B{)U3a(uDVvM@yy!6{|*FiwzpAe#zqAp-5ej zLcJtY7Zvs&0}t)z?C-7a5B+L==F!0>Wc-UTHCAS2hwKhdFrt(;kmAy;JP(}x!qtMd zJ1r`M_K#8b$S8ozSZ`gP<$$E-XF#xt_V)HTsP`n`U`rI#+?gcD)C_+c+v$siqVtJNQP?JhpMps!b7l zFR*MZCN=N%u@>FBKzltHrnicTraItYjNx?*eRSpk*OK6!sHqizzrM_mmX{u&lJA=xk~Cu#ql8a1A8-~ai?4rXdEZ)35i z?$}@(PteMc0(iwooeMP^v%*#Bg(IcSih@m%(Nw|-r7XFadc2(eFv;5L1Dkjlat4~} zUF=L<%m^YR`nFATi3E2ZxXhn8vMdlNQr&(BiyJg^`QD{jiZ(%ep!JD{#80)QBP%|C zPb7l0t87yTy?&hZGO2%P##U@F9EKaT-7`j1gVG$4>izh6!nzA5&2tuD&bxWh@-l`A zo;wZAX*U58ueV$WJhpjLeRfuZ_9Jx2PE~Bt=xtIJDsDh_aVA2ldM1i3`}N5u4h{~k zfg)*TGMtjmo{P6mLtkJjKvNJY!SderuQ@z>zxBLjaW!*H&pHJ}Q=~l-F%|8JE-2QW zBEzx{IeL>tHulHJnG^B=$$)#Z#>iw63~ET=1{bd^?rt+mxX-;HbBvGth1Qg3fhiwAVH-w$?eWR&caHn3Y2)z|d?# zVa~9$dt(^3{hI+cnGIc)r@6X+u(+0}Tk1jx^4`+o;fS1QKVOBS7Mt{h=Z?yVM3tMX z0WRX+!}FKejjr0p*Uh^~jXQGcFuTbJD0L_4d3o!5625o<|U}b_3wcTd9F=DLnjZIr@ zy<0;Cq(Vk`vOxWrWWm6zh(X)t7VLcx)VhI#RBJE~)7_Y`Z~g{fZI>7e+&aiykk!Zl z@j7i$H}?_%+>5~SwvTUo>1p?<+^8bXiq~t@*VS*edP03Ss#z_58Gj>Qo^chs_XBl> zA9C1QlkY2gd-*rXz3=&TwT>%ED)**IZ@3RJ^FV^Sd+&%G2W(KoITr%9=aUG2)-3}? z2Lb_lGq?_flWzuo=B%#eRnKB?_OBCT^D!toW1eoDsHE<80z(yJTvLbG6N*I#;>e=(oP|7lSALk#WP`v9Th znE&nUTNsap#Y+aJ!D=1*8!;I-_}a_1eRRexEDhQ>`+0ca9vi1|_4DOB3w~2*R&D7#G3r|OjP~$E9dqE zrfVDFTKI`N4awb~k1jLX17`EsaA`@A_e?)j)VZ*r{t~5 zu!Akj9n%FQ$@)Dl=306+)pKWMbZUN6zx+q3i%DuvyPvFY$cHck$-$pUYBRGr7-#5X zjjKP`7G7!O`=Yeake0WUth-b*Dk^JY-!fk8SvaX+;2jiO?i?Y*%#~FlR|k96(~X^ZF;*D@q;D$FYDt0V<&rI%1 z?yIP3$Az>Ex~>O!8CO1qvVt^|!Z^{s%_qH5_P?Z@4SI0NQR25;_=sTNEh)hc{e@*Z zV>zod{ncj%3&TiZMdeq=w63xeEso@eTpOyJ45vlLrTNrB?2+Hu|ript*4HPh!W9Dl|- zH*$QCbWT)czN_3jL7TE2^BO!~X;b^78%`Lkp+;V>bZ~ue`>W4hIPMOH z+M*J!iiVn>9SzsqSV{Hk9n2H6W#&pe5T9=R9osnl2yVr>JH8uyMkZhaDN)?IA)(%* zBJ<~1C9(+e;@m;o%KuB-esD)S!k0fuW9#iQWJ-LExeV}nZ3!BMEggJltL0g&&uX9} zy0-8_oH1N0Akb@RL|@26QomJYUbkc%f7@UWJmKESbX#YEH378&Mi&`86KYe{Y72{n z4g69@Tg~vssHqr#dH5C>)&c8=32^NRvf;YRR|Z=di**s3T*sH~JGbZuJQskJXGoHM zu{=covMVT+v14btsQZdVoQ{a=iX_xWOwq`~=GMl9EQ9xyQ}QiVR9$QeD~(yEQj(A-fr>Hv2#zW}$N6P0ZgTjd@G zpv2PXxP#1PSvFQ7TI;I?ApqWwtxSCM23Dep-l;$+n(b zaR`2k?Bcxd);;r?bJGXN(_n@STJ>3x*h60NJM*E*1O6@S6JWNlP;e`X&CMm#K#)*U*Yigq{YxrK3T`xKa5j%VMF6wye}&D%f5n zmocO!-q;&%G@t|ndq1|PaOySga@7)tQoX3esX=_z)Mr_NL<2-P6Z-kMvzKAL9v7P& z>};fDU9(pID>aog)I*{_(_5LC5oeuROe+5~`?Dfz(7G4o)X@$w?p`@`YHED8q-QyK z!wY#=p+h&(gwJFd6WZMQ^?@&tWT2bW4YshMA9EumkTf%6*d}cP7Y02#8mKm?2BDD( zK6$?Qn%|G}?#eUlr9(@Ko2|PXcyZ5NUISj_iNXwLWjlwGLu*=A0odDL!FTb({fLzn zRVlaim@YHW@OP$+E0UFX?nMr~Dsw#GLXtl*ZvJ%U*d;gHgGm}c*~CA%)MxDx{(d`w zlkthOPm0uifuVJQu9%hu&2@n~c21Jr_2I?E9U6Yqe_8Tr(HOFJ1TiAMl4NwF_zoqZ z66;{_tK3Y5BR#uhLblL5!!?G0?MH-O_=SV3;~fb-9eTG5p69OD=8U`4E+=y8&6K#S z*Gb`GWLt{l}^XOaBX>hG^ zkN|5#X6S?V^D=+Aq^aA;5K-N|=e@gml>51%W#GFS{H=+h*CNAY>q6yTjY(777f-u?_EMPa>G??LSGJO`i+jpl{JWBn1!{r!Y_0)!FNtC? zbdFW-DC>+dp((g4i#-gqdY0pUK`Mc3M2%XD?AxNmN!X2Cjjj!PbCSGTEX*}_TfVVG zHmPqyPoThg)Woy1OR7M7;-sS(<(ijax!F^@HA#d;y&j&;RqNNQF30=NQ(OfMngJQ2 zkONF|MK(*ElQbWuoRPawe@#&4AVZ}7Vx+2fdjj7EBStdYK>f*TN*c}lfqhLPU6yKi z%-n0f$uyN_$+IWFX07}j4@~ow5<4dYm78{rv*$ct-_(BT{&3otTQKvj8k<~U26!CC zgx`Cj%e)6Zy>^23^E4v$${9dZ>hYuILvZ%tan_cO*LvfU5h%-WL=y7P+vJf*LSLvb zW2|%=IaVrHM1p7NQS^EWCR7-s&KBU`QDXwJcdZir$S+DiwD>K_YJi54t037?Q5Alx z(5iUS01s|{k%U_=##QCAsNQ;s zFsU@dv%2cloro>O*DHH@0}zd&qnOP?SH0F&xn*C6Z2!IY_6o@CmFv8a)3HVe(SbC27?l z)s}+x+m;7z4u-Qz%tMoSI1x~Y5EYCvmR6H9mIgfRy$DbXTvfis5N=ALh>hgjkG*Uy zv76rQIG70|2zsYFRxM{D8Igt5ely1^oBT0h$Rih4xe_E}6})TL#Zheajs%BfD=cvG7DS>@>)qD81V1G&>O5rN6M4Kv@P&1w2w(Nx>09Exvf5UO^K z?k`%@vxBJz&h6H>Yf4Xdyn#%M0yOU%z=``gzo4jNuUaT!wN$^^ziMKDbMs1G->p#e z5IH@g>Yl&A%4-|M24LLy@rachwp4ObXwZ&$9%1C6TWkDNk?|6JpiPMZZS}>D684dVB9F^) z{Cv(jes0+d2YKVbTLtR%m#?f}>c`oU;31ZOQC1kZMlYUHW~Cpt9p&s;=q#p)|MK(N1W&J^4n;TCQ|}e zp~F+^VF94B)w|N5Rz&(QdoOJD2DHaq_4$-pxtQnmU(sd20{2J3qlfb>H7?$XfzNle zI0AMh<%*7FD@F)I_Xk3(c%6;M8inLnU4DN1t__qOGI0%LF<{Y6)i>e^D)2z38}*hXY0#oi~XZV3ZrQf?);zgaajfKYx^-K5$-J8!MKB0sxC-o7Z zD4$Y#Mzdiq{yW^(=D+N?(aFJs3jH~O9;=v04Ld9kh-a`sDTG34 z3S3=^6q!U1Q0Weo^rVO;I%yI76Y2CUM(KGPP4_=`;u+~@57&AxteFlSyZ45eKQc@zdPEBpmSu1Wd7G^i4)!6n4 zbU^SrclYy8Wz!5upIPF>wdb?q_vY|cC2!v6nw4un*}q+)2oJpF2q>GakJ$@bpQgk>Zn_8=5$>n<1hIUW0QB*I(ns}xCfc{wh)t}oQB=9qU8!7Pjs3}P6Yph zO!CJ(0G&~3u9nE7JzyI#=)7FvSi5;y!rWbxA(ELN(kw5LQf1J^dOz38|12)QP@#9( zz@oRaXLVr53ogEyo<6qSt^$=O2CAjrGkwVfyLGJuh_gAA*f9{rJ=4Mjf=?7oTK-%Ob(ohCWi}Hne8>FS)w;5Q2{Gfu^$){)yi@lh$1V7VQ zfEHk}w{!!#JsKEW73 zT80Nw7FpO=RQxWr<}s14XjASx+>XFt8}OkxDrs>mDd{BCh2H{Mj%wFd!Xn3iphDm` z@AI!8K-g<@JnUvl%sCPL@qvZhN(}x>R>w2{{$twYSiKbdi&Y_~`f*P|b2inn+A`zU zf?V;*jdX2ljRtITWy006a&eny$$Pm}V1sIb)vymLKH&SWLiTiQ8_7-$#Ly%OKHE>> zn}njvv&xRkMfBnx{cc^KWQ>{BFPH93$ihZ{Dx)pCW3Ifc$Z%WHY%#*1-M&k<>7z6D z3RdsXJ}CK}*?t%>hzmL-WK0ewZZ)lE>tx`9_We}gE;7NNrDaGD!R@_1ptCtSDleyG zIC(xLeaPCNQ2}z`9a!pT?=YT)?Pc4W5n1JP{1{2_$@)N;f_EwQ=V*>`g?C}|Pcy$4 ztK`iHau8f0XbZWQ_EMh*MRl^>#V+o2M%X9pjC-VryTAF)L?}j!@$@dVR#f`z_;jU6 zYcWT0=3P^aLwhbrwswu>hKqTh{OLFD;2iGzFi+nCWKSq&0P z&C)Oh>wIia!1duMyfjuVP?ebHqJ@9i>imQQ*o^0_c#j`!9vWa!Bf#6qAz;1}mZhqA zKKJqN6lAT+?LU8|*uN58M(U6IK>a#JI8Afo)ZrEEv=%WwLf3!B3T-}ZxK-rXqmXI5 zL6RjE4yEPm`Y#BhKar{(dmyH9Jd~Kb@;)Y@eQdrTG1uB})?Q(NIX&p%tkM~S$Z8pE z!pNgp0%w6yoU+5_?>&yVyQ)IUU7EX_T`(#C2yD0CBQHC7LI*=+9!RI5&}5>w;QN=F z1Hp9-0E-)l$e){hbg$EVpRiw>eVc=Pgx%iHU;481Jv|gel|nY z(9%d9;2PQtq>5Bw{WF5=#+Xl^e0)q|Q7(sL<7~mgI29&OSq@&V(+eD%y)EUC3fbK0 zAx2u{Vdt%TaGPWcbf*S4da^ZD+_&i*@u|w-mX@LfuDOpxrXYVOirc%dsi3={HD6VUd{?dLTd}Z?rdy3|iB%ND zLa%(+Z^p3oGf@{LtQQ|6yGrkNiNBbf^>f@mj}O8c$<2?f?(PjHCUEq@_*-~oNow5Fxz_0=dusSKCJl{!-MA|#eWUSbET;0J(}a;!R5#;eA$+h;tG#pLQ= zW9d0Np(eW4>p8HsX*<2t-8UT)xD|hA-wo!dj`$j0XKB@KV@J&@SjTH1?RyDCPVY90 z7FpZz>|ptXa;4Yg0||Z}V{im^Go@6SBoDWxI)xQ6^IM2bjv5Fh#2T)4%-$P!@raR= z5v{esbidStah77O+EU=Exqe9_-Y4`*c}isxgn`^3-g-+&!g?$DglXfyheiL~hFotM zXkCgB)=IdnQFmKo3f>`>uA95;uWN-B*7I@8zb*eoG2x~wdZ}2MsQx`-XAhT8T=`+J zxzuQ1>C?i?vC%|_?XAf%M!J+N6tfIg_-EH@SQI%9co*cI_*(~A1Cy)P^VWd;Tq}Sn zoI4eH`)nG_git*`c@M~ILEOV_4ou8m-VSw2Mino9OfXLphn#hc{Kzj&{>K5<8Aa&bk$=SoD((oA2MI~ngR z>x+q$#>gWa8(G6nDkVAAAv~_g4m%LSdB;}cg*dN_-&FqAs8|wGI2n`+<__q?KSWGH zN-_#wPfH_`YcyopdWJddWxY}K-cIYk3;n*C$Zr<05(`rXST)`LJxG0Y2t5_=GNzW0 zuaK)UkgnZYsDUjhtb=Cw>&`T{c`bg%*Lr+q^?ou27_oURF!KoDp%r7~@eK}1*ZMIp zT;A&ip+Ww}kd&$yMTuRrXq1NigsgQsA^pokbe^kT0zJ15`bTVGN)GmO7ffdDeA`XE zD}*z9Kj5*esdq#O&noGvQpm@rGD z{E0XXuf(bsuM_m31h0AB&VVQ1i-_H>KU~vp&FEDCm3D z2Qp1%Q|prWe{47Ow+qQs4s6Sf@U}6fo+S*}kVl~4#4CM!2r|5uD}re0j$KuV=VY&O zDP?ky^|e;VxLSI)$MDux#jQCEFn8FO5?+>!#;_qg-a##{0!8-E^{c^5 zW|>E3psy|NFr5~z`c!TbA*rUDJKY%kD)Qgs^G8zB&!)p=wYcVqV{sY1&^U0x`0>=w zI}Bdt9)>wcZZsU__Y6g8;q#+Hr6*BaYC28C$8&{DB6j5lHBU_bHd zO6fS~(4|^6C$-6$A`kx=aSwmu{rh)&6>nOt6UMofL!}muUt>+VkH}P32VJciR&!IR zL6Lj2^V8AUu9J{QdgxDTBG?aoS#mFFtp>Vp6+@?7Ocw8tg}DNOjVAsPFb`+U!eyAP zf9N9Nq!OouxIIP_Lh06X*qqf0(MaCIRR*=GBHSR}0M$HY&N*(yG^u?ax`F;~a`Lp` zzf&KMcEk@R0>gt`)I8WY9l$MnDm$M3SZBS5~ep z3i?8QZ~0ecqX8=ydl=8Ndm`1R0m0*)&IlP4#f7t0ZJJ|ciu04F?C@+#@Axg0jpZB) zq@jD&rl=tlC*EF~)^zHz@PDgs7RFIryY<=z8b7mGxfLc}9JtKnB9mO=#TQhZwPCQ7 zzgiC$3)+MCcc%@+LOztNLht9cfma8%7~;fz+AS9sk4N(;80RQYY-2TTIh1p%paXtwz^n@e;|ZeJs}5? zd;h4n<OufoV>s>zUU6&75fafoS)&J;v z^eJB-7P;Y{EDsFGRo3XIQ3j#k-un5J4lLxoh3QO=1?hXpo3WAx1#3{ z=_s(%qkwj#^ahVKChvZUV$!D;V3KW*?F`B=70(GOpHM;!f)-TNmRL`{rKOk`noBge zB+W@j^bI}j8n*>BwZ8vl?Oi%uU8f$%^r`;Av)Hu$e$c*VXS!^zm%>&;xS(U^q#rEI z&3eUFU4p&Vtdb`2qF#YKwf`C@0<4J=b(_wSm?GWOqa}M2!PAdT$R@OO& zXWyWo-f!wkpN@&7qf&=qG`qaMwOz>|FJ{O=v-^uAO&H&QSvAZad!x3=sSNZxhr?0xSXID@*icv0FEvJ~a!#0L=7v z|6~l&9Fg)&N1NJTWhh3N*9*Q@qETTvjO+d@SpCIClJF0_$~bQn1qxY{kdzA7Z*M+~RV+Rwt}0wDa<$_8AHEzX<07kwIo4!YhaIrXl2OPM{Z9g0|7G@3 z%~I6vyol?$wT%o^y4abn@IWIs8{Ga@n?miemh8vRHc1!|4lxYL4y2Ik6jROpmMN~E ztgpRBi=|2_!1i~0SO}!aUcS<9$|`C#TuUDb3Cy&QWkQro7ULs$CNp*NwI=|7&tgxt zRLPd&Sv$2#qV%LN?{;Pbc4t$|}G?7&+32epUW^Jg)sijf=BpTSh@wIL{{_4f9WuQ@u%3Zx7`!uxZFQ)Wy^YfL=>^I;_N*#^bam0XXOEn z$JNKG!6V(D;9C{(QHT@sozc3;APQltm??O%%nvED>dmMwxclYYw=raJi&I;`&T_tM zO%;gI(3zVDBwbmu7GBhqET&;(WJHjWg~%#XsLVlIp`0AP4UzZFPqB>RUJRE^NK?ur zSE{2=2(g~>ZGbpiAbCpVWTkL5m;B_8NQAi2o;W^>mq7~N!o2o|DJOivs^@2{U>8KS4?#M5j`ijZOi9Jor0gr zemB#eV0=;k{yXiPf&8G7`1m#BI&h#a^9>ySSD4@s#C=_}Y6ajM_I;0zt#&c#;Ss2k z_Nt%$n%I*jjT1Hj{pG~#K4ors(JzgWO&Cmffzd&*zT69ZW+-l9uu!+kJNm>E$(7DW zJf*{NQ}4x54{nHGUGMIq6~pTpFur4yP!=3%hEfDtn<dl`ZU3)X<(yo4e#AIUs@MD6% zVs;UeKot$Ic=c~AKEO#3}NvOh!3YezBwiSr1m^y1y00$?7{i{`D%Lgu`5s1&!;*z0+N~p5l^Yacym5ckw*?k{R8Rd zDb;%=YlFbzZjVpcL#v@cJ2xdZH-u(UGcVGq&_xqM9ZL{NeiZx4`h`hc9FX%;jcN zmKq}ys_26%+)qyJ6UzUA0;*u$gU=-Uv=o|wn{Ih{%GmEoPKhKMwM{)O0agFf46#(r z>JHLokmpsvgp&&Ofb9Ced9eSy)jo87X>q4iQRojJ*M)MP6levyzhXEVl9!w1%ZkM> z*RD0n4_68JKluF-`GLCpt3&k0l?I8LxqDyS+f0KiL*wwTJ5(L>r#aGc zY@x+c0?y<98!bW{K8OdK5?{UwS{qkp4dO?PO*Oqo(;@!EizU3FM+&w7N`PKD0B*8r z+y)57jXb9>Tq{BlaE*jO=q6A`U-z0*4m4iEc~ZW16QO*g`@*ZDu}NT4DjAeO1z!03 ztXKgm{hTM`s=cXR6ZrME&PJPQ&-XqN2TsbE6~bbPbN-%Y+&snwF-l%#el)_z)nAuv zR3(G}sr<-aqi8CFPw$#KHuSnHmkm?3CRWdvC`VQsv==6FLSUOCHK$l~KQT!vpy_Jy zS2jODI0dA48hOH&8s-E{>?Vqz?Gn|hG`CtM&1W7qv==nT;;(cN0@9y{%Lu^zp(wrl zqe36}&rl(c)gv}r)j&O^daZP`sbYKVHEA@4E7@8`wMHhO*o#xWuI2dfDn`vYyC_jA1JZyTueDZG~g zSApl)H#6qC({VuOF6oQK`9Psr0gSGas|VyZFu{B>+Kq6Ye~~P#^*~yf4zb4#zhAgo zGH%Jlpu5N2Ii6I~M%-D>I5V<3nL&Z^VFN2Eee@9z)>?O94Qn2#li=Ynfd8jXB4g*e z*b|39pw?)qnb8603Imkq|G$+({6DseqsPQJ*YVgDM)!T4psMpHi&Gf*9q~p#*YrzX z(Z2Rp)qqqQ>f7BasbX8zftdUs_ox{$d@J|SE?bJ;-=63_02#klui9WB6^rDxiMBhkN-c}r{%_vU`{wZ|tt}x}jU42hFC-K5+#-j(Fmfl@?nP6VXg*yZ(oe`H zy0q0y3WIX71o6Q%hS7%0LOr{jF?K-R%N>8t`I$Kc4*| zZi`>gP=M(4;{K+@0*?BT%(Y5EY&T2B&FuKAtZz!!iseC3Y6@SpBe&U-2&LbK_Ge>LZ zXS;1C3BkvDhN}E}f-KnWcj$1J@+=Qww2sUWg!w!gyH5l-fo8SbW(H8CGF$y=+@Kd~|;hdllib)fd4(T97z@@oSdaeD@bGe1D*~AT)W<`iA)N0vDgNId0?;S5ZXf5-^^R zZ}fbZQ+_#bSsCt1&^PY;sO~%ZcG048^Q0JwRUn|DrZ1V0Ju_66ph$fk4qxbzw(wdw z*`ZlWcdA5BQ_&bU1ocrO_Ks*!G zM|eLAs4rQXJMB_!qg9xNM$;>MKJUjCKYUWkrwR#r_jy+zDz)lX>Cx(vC!-bR)t+`n zqJ#TM!Og$&d;U#YDd$zcXSeabESNFtU{xNwLPJ>9D@p}3TPS=biMr!CU(YN)2-;`*dc4j=3C63Y+T5;Vn~RTVy{$cMcUZ}QU4R@@697;N#hH1&6u3GKC^vWF^0Ii2_cX(OtDAz^&F|@7Y*U;y*>q{x*PiJI zKkix#wzzGOdXFF>4HLPTfbsi*PY0rzgVESQB$ndFV~N4on&du8unNk(GB?tGmI7)n z6d(PzbQV@769L11P6iFOXyfTgugg&#ZzGp>=#TVNd}AR-i!|P-CF!wmC2gZNv@gzM z0_1T{t-yMO48fhX@1)@I4$@Reuv?1t_<-S=qq;%9wpd;K{ZGVEGt3rC^F)5fHAW=t z>QlfkdmFjyvIB*?cYr?dpSCuq50ZXAS9?$QRjer{`@k zy60g1Ueo3&+`>b&lSmr#^-+4dv=RNa`DF;X_cN2w#KRof$46$zs=Mh^SeYiu$d6)A zeNAwA`nQt3=1(OTc~tY*Tx}KRX)RrJn;P6ev4)C{nOP=W)PoGx9|3v(5McjEk7&lA zoN@b*)~fEftQb!B9pi@vzwAZEqXQ=W+=bo^Nr>0+icMb*{}EPhZgkF~Po}U08LQd6 z{j@(3Hh3hP+|!(5FF4@rOkAZoVWY*}lC$-TQ9jdyK6eGkZAz$e?0n^@##34dg#BKg zoT4=9nLTMs4mayB4)<#>3&84?yv}bjPjM3=(=3~Z@bbgW1A5ynb23_-?3T1vsH6 zq*vz#ut2m}k3{@vrA7a9s;oD$ktn12Hus(;IatF+4*_}CFW(kjyv6L!gU!S_4WapR z9V0}&l2Ux8&JZfi1Ct4UX%w~KPwE3!j0WkV11+unz5A@K1JGneKae#WC>{y6!)88Debzh{jcIOIc z3?j$jjlg!~m*CnDldB~UAJ_A5wMEaQkp1+P*p(?W7tsEi7==O|X0I8XU7rAv>&)scrpqL#ssmkx;{Lz?>r`x4koE$qHXTQijP>g;a<4 z*?gEt*8!}ixWz#5R`Yo#t|6Z+zx`lf+q>P|bgN&5P+?;)0{F_P<5vjczh7zC_FHu& z!+z*7NiS9Dln8Mw3*ldf@eJw!72?PiehlXXjLYL?4r^B;w3{H42@zm#DD!Ldn&Tay*Xx{#`YY=HfLKUuGiQy zTltvlQ(tmCQCAYz1NB38@Q%rzJ5oEIm4Nz5NF;sOgT)Ko(bwW|;r}&i#ph2(l}WOF z$jz8}ysT7)6>lm8gQYihLCHMI>Z4MQKQnZ`2(h!5ay&AAcipJizm+kEd22b6SAZ zRDjK#bW`hN1VJKdotZIoOaO@z=uA9Rx}00j&BiRHJDS&zrBNDJ!7{54_|{j3nzu0%F6sK0vtheSF+yg z;VUb7Qv13EhDSyZ=+Pc!8*4VlT+K@ogMD>@EzIW)Zr3V*3l6$%qB_xSL@O|oE84T3 zk{T}wCMS9Qpub!s4oztNKzTNgfO337a85luOrSj=DU#)0Vs12L@#7<^z`U72U{$%f zH_~sQOm<)}a9`vFPpiw+OU0E?Ve;1ly7L}x3|m+00|}E5m&r`9y&G|@S{*F=O2JQa>gz9AsB)py%SvcM%hBZPHntzK4tEc5d)@e>|oA&khz(LSHqo(8(N0GIF#BPcZy12Nmi)YL&ZidcZ$ zByf*au=_&%!o=3d=b7Xid4qAc zQe7d!HZIF--l8+lBL>?w_R%)Pw;=1U7Gdmu7fj|T2QG;AfLya^3hY}~oM*>*s%Boc z^#HCMDl|wrpgZdVUA7=q%FbCv9FZ-cyt-S^v7oZPV`I0^$+u|xDEuqt;>B~*JLz$W z9+*xK3m=^NYRLAhOfkOw+ji1tx3oUAp$VE}&-U4qc_uKx4%AUm=yg6Rxb2*2=rwy> zOvKE|+you`l1BtI>{0Qse3ecfWs8~>UMh1PAlZa$LCICCLiy&UF!#izG`F1@WXRNx zhHf;))^v65N|76y?h+7?LL0Q#Z*eE$8Km-LD3yS}jz*twjU;7aa>caO*>(ZeMplU# zr&o20S&@8EQ-g%A(PA9jNgj@0nY3nZQ>^ib$jfISmE`Fj(g#&~Zt+j%rhn`Nu`vXY zGb~61KXVD^(P%=*3{Y2P4EE95U9ZpuYc)oKce2kjvm5eA7jK!Cj+m7fuWxM(<&8H$ z=)b?lJZd?VJmm9Mw%P-uH`m)>%6r_g>w>kT{z!G*0hWXvPoXo=D%`K^$omM3S>tCXqB#)?bRf4_)^Uv=wtlIgRCpzVP$KNtcH#T0wPMd z8>f^g?(C0=iP}YX?@X_U?987l4%Kroe5L*11>Ov1Fkx(Jmw8yqBjnQ?uBiTXTRa>U z#G?2G*5H6Khr6ajOC^9>C3>4rbF(`M1W6q#9GlEX*ObiH?u=yVC8t7le|yO*<}vzoIhu%> zuN`$Lb?S)_*iq?;{!PwWRz zIukYu35!V^rpsY{v`yh`OC(s(0`U-15P+9~tPh|HJn>){Nymw7yM2EkW{a8&st$6p zRPj%vs0Zb#D|!|}J}ESUVv^nY+N~e^>83zL&e?eK-Aj7pIb8m&usQ=T<}+7G@bo@M zRh$rZyF4R|JB_a7NmTDPrKpY;(kQ*cerPxP0fc9RldTIPmB~r-tvIKjvS%SVRpMc+7l3}h-Bb@4=b8-tbi6@*6x8OSdl276Yt`?}z z)+Vk2NSPz1+4;|)o6=*cO~kX@ef&-!@#NH>q1e1 zIpC{c!?rP4`&hM3VOv<7^#-;IR035EEYjCZ_Hyu{2DlihF25+Hw;$~)k+!ZHoF86)i!T|!7wF5}OTOwIwKZR@jd?V89apAp{SrU)-3uy$Jr~XD&^fPeB8yNdlwvlZX*1@e`_~Bt#{=9 zlT13WCTj%Keic^yni{el5V$se%YCS1a-<)n%Bbk0uE{($R8G9)kVo<9!jCL^xF*Of zPbGW8AAVp)OU%d8dOn;T$gMheueuZDIr6=Hc0Od(5difTS}S}s*)$(Q6_niWG;nl+ zNj)B}8LQ5$FiWV92Eax6qx*A%Y=hT7`voC_H*)=g%Nxoqjrxr5fn&v~DR=3nEtzpT zRpRu%)!gcsWR0Mg_*H`CNTo-Wi*=T+fzDo}9eWaxwOGPb+WX34DVU!8yX@yg!Rl;b z*~~a2*TCVGixA*7tZLXS-=swFT3x<=tw^EnnvlFj*;N?@F3lU9_ws&hvz5##RjbwP z^sXvRBsh9;?pMvecO!Q*Z)t`0?0g#R@OUb>@UcvOv>Qxg4rYXU>xcn2hOL;64cPq)`K)}5yg-asIX?goX6;Jr{_qw#y0In;M8PuLED%B zp~eqFy|nTxxy(h+U8F(LBKs;bqBXK!GFsz}8~3wJP8E?oE<92FI>fRQdbfJZGRHKa zn^*MoQl$0B_ur;pyP?&F9Sx?2a|);UEO$`xNA7G~P-zOk=&|xT%Rjd-%UdOZZC>|Y z1eH;BY@`A{0YJnHOq`F5v0=q`je~7(l1KUhL6yyz^qs7`<+j8e#Ap!9THe?T87V3_ z_TrXU2cq_bkNChiRBc&W)bQ2#-V-Tzr@dBZNs8Vw{+gdFLgNTWyph$Lodvd7oJ8F< zPhz2UBGm}b1rKAb01#@VdN$ID+L>-}d)g$W3TAe8GpgJc*|kzyDD7V}cDWzFvCt4( zeYh23v{hBJa9MU1e-l1~GdEiKQG;7e3GS|pE`YYdVb^JX!CEkVojyyC)%thWZLaS< zp$_N`zW9w60rJrMKsZ+t75+85HWQE0a`_9+NN_^4x`c0 zv)!GJR^$Ok=8T711RqwfFPlVYdQS`8fGqSUL_LkB?zTt4Vo z<-Gs!MmS)9T1AE!ua2ADgvu=9>5@(`o?gNXzws-v&g@W{W+theYG>aZ!ve?T@G4z* z8L3{kQ3fm#Euyr_#IN+;1-bM-Mtekm>Isg1>huuTD_hHAB=A=FDSUgE~GgCO>P0aeDD;YbyWpq31 znRavaVw1=SXkc;D`csubEAMV4Se!Dcy8ElbjwCL`ciLz&C^Bg`BDKSAy~|;|1xi?CmAz+xyuT1 z>^q=l)5^9se2@c^k^0xZjSDrT`2`v71^x-<81(yRoL(ih6WjmLK53{oxeybB5+;3K zmBZNFu(Tj*C6J%{G?7Z*7h(f}o{OLHfkT75V(zV1tRb*i2IlDk2hOecbfEjTXyC(v zOM%lEOLsOMOE!u$8`^hyn(T%hag?ZD`!q0b0L1&WTtdKvom+Qt?y7iCoA~HPkvjYP z4_E_4reL)wpB{IRKq7ZRFqM6Uu@EFOPTT9`JqMjfbV-hfJ-iW_R}{SKUVwuDeWEjN zwpyWv3T0lAXWVYSvQ(=wL^dTZTs!#;BmKBLSZVSuwBp2xojqlaPVJa0`!(TN9 zG!%;WFnl~;hP|~vMaRo&+pjurC0<_v4ZEh-qxcyL>9QtIV4jD=dg*8Kux-S>F zV4yu#fzZ|af$l@zIG^5M17WRdY`fKH<^(05mUK~YFkrQ_bSlNtG9nn|zS%C0SzkPc zaotkHdCWc#xr&NVUmjFvZj{D8sC7fXh9GBR-<-})k8jim925%oJ;54<@*f1n-*#fN z*8(}N{c}5AKN3CH&0jXAK}`1x)vWMzZ{mG35=oKe;K5*~4@H>Pr7V$_v zCe^{V5Z}faR%j)Rl($r^P+hMiTe*vE*g_f#yGjLZhV5e2m)}OGYW5%KoB=$Pf-;D{ zx;s>^`UECA!|sgSry;XSEcTdy3O!`uh$)ge3#$+WiSY-HJ_`bJP$yTZwyCXOZa<6x zkLDN^#z}7!(39>@VB8iTa_;xq^9Fpz#rIu(xTPm$4AN(b=9jCm`@+d9%=12B8cWgB zF*_*fz!8c)`zR6|6aJI1v}Ratzlzbbyg|JW1^+U#?lx)M@%xTFbXXX!v%O|~L@rBlzYLCo!6{`rvcRFs@T`Uq)i@_4J6~S8v{8^L%2|o=#iDCRN z2|&{jFCa&Fn+p&r2DU+|$`$GZ`m$VE?Lf_-N}0P0UIkWE{QMy4-Oy!v9ZqSmRk;)r zT&n@9GLaE^I$#1Su~tnc=HpNj>cfnrOPwG+AT~_n_zR>Y+Q9`Ackh=UYa`w%G z%cX{&q-$F_Ba0qU2`C%$@Dp4{zEQ0%$ts(5hPqr*D^nMOU|QF#2FEa!&(1yd0Js3< z=97TZ*+|7?mb(jo`^GAEknCA2miTU}RZ6ty_8I)F%<<#1q%x1U8ZsPxNopRCKUk{a zGEYW?VH0fNLu52F$3|&YceqR?n%1*r9C(cPbud9n8o$~e?tLCf?H1`@F^9e&DtNOL zFFXKuovF}|)XkR*H&%zQED_G!SEl$_CxBI?uX(RFr^r9`takt;BzM&$-T|_ty8#q! z#Yn$2($&hDXO4iy$XbxbY}DPqFQjAq3BLx;q>|k!8S9wGGbcpIS@Arsgs&SQ7x$xN zZ26fWI7zSJ0EuoNG4#Ce9H}pIdMH`MqgWr~jtNm;&MX%*CT{3?I1crUEG}niFK$iEer`>7zK&8L~)7xphRoer;QYt2WPMqf8v^JY<4kF(Np z8_*n&m#1WHD$*fbk!5Km+tPgh`}vlc5VA+p1?xjtn@*XHO< zFMtVg0DK;K<#}qr)Q~ROgvg}NKel1nCcVAEx{FRRvoyZ9QhNO)797V&3@f)2;=C)| z_&MnOW&5UfUADCtS8$n)@vu6N56k$Zfxk)ySlOjNPQ)a0Hdxptiz*myvq77SRkd9k zdC|tdqI=g{K92X#Q)hN?r)eA3a|Wi}AX8vZW`exOIFi*IGf}^561k%oH4iIgg*3hv zQU{xwFfNdN$x5Z^ddWktraI^wn_WJIvlDo2An$OIxlnS9xjm)FWPD`1x0fA(nd1aN z!P{3w)I8HxtAjJI%u>FAJ4GIIGltyh5#^=TJ^LWtw0I_2bMqnwlCf}7j-zfw8;Phk zI6oz{uTc;H2+p5V$TR$LUH;H{-gs2bmli|2bhA|4ScVjrykdv0Wbg_>5#m$#ER0h& zkKUShFWxKk)^CcW+maFAnaUP-Z=bw7tZ@rsmS${heP*1ILOPDMwGxt^shRaxFU=@S zSDV>rTNdYN)B;l34nS9BKri#ySie!8WoetmgHiDy_wPAzAh+I|mbKoAC~LK5hGp?r z0X~M)dsS%BthMS;1ry@lKVUZW>Z2Z{B|aJz(f|gHEww=&=Yv4Cw?fEQ8SrFvctNy~ zQb*yeIuKC|*H=Y}I*LW`l|b|lFq|8aILdmO2rS3w2D80Lt$3l zJ);-UP*stEkY^pA6zV?ern(wM08x-Bs-$rro77*8!4&E1C--!T2cAFh0F$>jw%5+N4P}0go}qN9SdG*dYSb3>1v>GZoMX7ddoL4Ei4tV@lYe1ocDPrif} zcL_;Z>~4Aug}og7(Ezop6+qnp<soJ65X)=$4G!{*TX#b>jL<^Cvh!D?}J$ZAnK#`l_Lavv(6q6|c|qeV?M!QPQU>|s+&f@7L>AdDNg(!!1UwwUmM zf^@YcUbYyQi$>>2|*fsY2Dvgc(o?g8p@0}rMH|f zo|cQ+E*ccH4M%{uw!f#!8>j)kHXtX*;l0bH|K~)W8B@~Si8fKCJyzXLdyo1iT@Hb> zIzcG6bIBy>E<&u3i>0F;_MBixIo1>Rt;T;PnC-qy!BmqEB^%V{LknSvS5z3ksqV0x z%-MZ~Wj;H;g(-`;K3O!}KHqZn6nm?#HdfuqYxWo;3PcK|*J}3l7zq5`IA(s=6ty4N z_DzE3Koj+-!6<8DC6c1x{t9AU)skviJUB93)Hzb=hNP%@zvEFu$Lr;hf$z=3KsAx! zCaM)lw~?z z>tLJd=LQ?It(;M=pdu|jGU-hn(w)BGg()H{h!|>iRekDhfg=Xm3-be`t-XB3s8E85 z>5sQQw5W#8veIpM)z*?z?Hd%XdM1`PD}NFc4&RoDSnN71mFgVPu&Yy9xywBK*YfOn zSi4?oq70aQe#VFP&fe9$0qJJG{A2C`u@7BD7!bCoO+wMX|Bx?JIX6&*Wa)~SGL+Q^08BPQC ziNPZ-S%3?!>6v-gs;F4L_qGftC?n1+gPjIN)DH*8%hC_Y0{)%&4qx5c^qvnke}Ykg zli0%mIEkf>ZiRY(*%N%bL>?8GsnE{nw(Z1{c(_Qkwc>iW0=_JG)}`#D#36dZP?k@w z)$roA##cWf(A=AW{Y$n4Vrh5GTR%^g6{s!J8!>8E-c^K zKKK~iUy+to9d;)A2|ySP>F-RHw!d(g=j+cq?;mea{>G^rE_rBg>wj?&l%#;}LmanJ ze`6hv6Xif)B(Vz%p4zQ-5b=KgZkI~k|K@MLfaAW*&*X;xgb?v!H|;_J@5^ ze0CifB|}cTx=@B0I})xElDiCtSzO`~FvqjZ-5LQ`;8y&%DW9F*B|rS1KdPDdJKz&I zcQ|x*i7$VwpKk}iT*0R3nsVbq(Hxn(^35l7cKIU~z>bz5`f?i?ZncJt12=xQ_jR{Q zaWKJe2!Y%OpWwz{4iK=OA@e>~m95=*CT@WMOaq}3oC+K}NW~7V&kMJZvD+^1&*|?% zVJJ&%IRJ&3pP|klz@nw=$rYC#da=-D{QIi{7YLS~Ljlv)hb=p$^#U9C zdAHYo#4-VPwFQv2wr@M31WXpkyIpQ^?lHg-1*&gQ)`7@frR7DHuAL2HEp0Nvwzx}k zXP8grAkRj2_2Pznwcsy<`~k+c|_k^mSNk-Sdd1kI+_0h2EA- zWuK{!8~&#wfYbi%>3^Lp37ubV8{sv!*X+^A?XO4m9kW=@t+9K5g%HccIHjH+;3)=V zjYq_T*N#|X+$)u!fL0%4rCv1r!ufMXS-KtjCI1_L(CzU({lCK>M#^MaL!7cPWVAj1 zwi*WIC#_q1Wg4rb1HnU3<3UwEkjxGX6;?g&4uTFj4lm!JGYz? zkOR$+>dbfRuQX->)lelQk*B=NQyr{eK8kwxe0QXL&5T&S%J2uUpYGZab$tXz2G6rB z>3*yu@qe{*ilg4@1_0H#&vcKQv-xj3Cy_c+RPipi+cQzfe%4qUT~vCwjSp?$O^4=rmuNHWYKkNkxA{-_;Xgj;rm`mV8$q0UVZy( zfodj^t52SUvhGX*hLDu^_agHTI!8BnHvS( zjN=2j>?7`%-|ebL-Tn`K$+Wf|C_ldk{=Z7Z%mfyttxp8_mf$@cjbpNb>8Evu6S#+uaFwUPi$-g6?FlK zE5hISluNqkd&BHhk7Y{nA3AW<_4)Oe9m!b#(B~?fNpqQLbje1x9c|U`UAEq`y{aoXchWg^+DD#{^OiCM4)p6mBbug>{_wF zVOaW0s&oE(^p*!HcCI@Z+WY+EE1A)ZMw5{bY4x45TzhQ->8k|>&2*z{>A83HHMh!? zE`c4af=Cj)YW3uP8*4L#>O4;uTWOmYBRjk23#@ z7xN|+Jma7y3hs%)QQ(+mby4KQRg>U380wI-rGhh;MWuH<@c@fGef``3ZWj|4+RE>e zo9O{5UFs~y<^-nM*E`fB8i>fzGuF3?c#V1g@{z=Ez&AxR4l#UAUe`hz7@zgHlbrRN z%bhr?v&{Y{NDmmDG>K}H+$gvQx0)V-`0UXhu6&O4GzMJ_Yt%c_rxC5v(+^U*^EvH{ zWVI~g(>Uhkjf3%C|NbD%tj27$mc{$*>b;VJEys+r6j@yZ>Gmt^!|pi3+1?GE^V2iy zE|8N0N18crCUxd?)w#IJ<@nNN=fr1zf0DQ`T^}rAC6*)l^DX5IT&q5c@S7=*^!WOm zAVlMHSPFhhP0s&NjB({EfoBAS|E8M~K6JorKYKL183tO@sTYV(Mr25a# zbm~50W`#F^EE3&^_`^AwaIkkirjQt&b6+8IdJc@L49qA|0XMtyC&Wl9+&TD{De|JW z=h)N0m>Jt3(`<^~nK#@J#XFDxNL50Yj-6lYd&ewz$S>Mh=F+!sB%@)YU~BGeGn7FA z2^zfULMatjg7QCciws-#4qtEOk|3aK8+Usg$on2TAYK?1b|~7o*ZHHJbDCS=N>6ij z7uT0Effx~?Nr4dF2;Mv*#K?NX>TSiBoFKNEr@C~xiB?Khj&#}m$^~IC!zP@3SndZS zWv+j@XWdr<*m1Z1*rV0~4)5FN!$<`fs<5vWKsvDCw_=BBz1>entCXd_#U0!*qZ8+2 zW1ze9oHMdJ&ts*25HIpyry8hv$}o%ASw=_%raXbZT}2b#{ZeyZP4s*PolBJexU=tT zl^ws)Abm%!Boz{u%`S+?d54tMxlKTI`D^C?d(JhmR9Zykmax~v$f+#19t%JhIV`jp zs6NFr^P)(F-KRu_D;nQflT8x94Hd+#h#L`Opn=a_oV!^?bRAKoYq+$#^4N=6jb=*z z6Onp{VZ70UOgFEGX{lH+Z*B}Q-}EgbkpaTkR&V)C((vl*;B#d)ajni4Ng4-PxIpBn z)%>!MfW`)FXPk zi@l(x2f{VscpUhlbJg3da2MlO^{1jWyd)nO1k{HbS_z#<0DR^@d1mi;qHZEzDq7^6 z_SLnrj@RLS>?Zu~`Y)Wfl43HItT?kv)0bxGkKuO{N8V)h*n2rVv)W~@{^`Q{caG;j zZq>iz>i>E4uMETgSmOW45(!W3e0z1WA0&fP?qq_ax$UWYp9LNJOAuDq1yIF5a-|=6 zrU(T79H(CcUi}|&{uz+Dtx1MbG`+6W||EEThT*lxQMiSq6fvuml+f@+CMid}ytBTPL=q?Xg>CuENP2?7{+l=sV9vaCl5Du{A#4WQjeLbt7R1a$$ zY&q2yp|M5VVvSc3psQLni_XP2O z9gBs9?{@rQF14WtcN@5H3XO);wh3H#a6HiJ9lxuzqOiu(o3!oT?pJ55e@MovB@Gcu z9Aqpjo*0HlNJB_vrTQA{05ewllZcwXv-{AyLkGda4vJ2lR`b&pTjM`McX|DPJ}aE; zGIMPtLN<#9Ez}mjzE5-kf`H7i+vF>sb@sHTY2Ba!%sOm@MDSNYnS!B5srnJ4idVRG zGP3pC2cY%>sPSKo@iW3%B`S?R?c+w4d2afLJdVwB%)K|D?)4pt30M|D>j5?eQAh9* zW=$#VAtyvB0Y!Gy$WJ|iwQZK#44@S^6T&1ZAt`2BX~8fkwbJdw z&>!&{55P8?Ex7iLlCZuk=nv`k@fg*RFO|r-{a5$tSH%HpvS&?k`i%%Dx{*K9#Z}dh zv62O;?2+cOsM%s#%DeZ>d`kATnU6N;b?Uh&7iC`Rh%!sa@|Wf}R8DEzDj2&-Wl23Y zq;P3isV@O+HGg)4r2X);ow=70p$rCeCQ_}2xYiaLYSywDIGdL5U_oV-+Rmfm(9CCF zQ*J?XC>b2!(6@#9Y+t#Q=7DUtz82Kj*01pKaKUQt4HmG*S;3|ceq?rw|%-8F4!wo-3uz$de}PEVOedbz2{w4ko2!!NZ0RKnVi^480WrI zeh{v9#ibUCCcrRC@NM6fwE#S;!jZIlDOLW+cz&9cf=`{5LYd4xJHEyFNYE32aD9}!BWYeymIz(xPXdj zXgGI#nTDZF>L^U0E-S&7w4;}9`mKi@22MZFcpVS zS9Dk>UoQld4&dyAeByHhNC?;GUGr8z>NuKuHttx=-)7zI!z+^R@Ndjm4BtkLse2I> z9(|RP-}EMqfg^G?2J$ZnQ;fTX!N44Q>|u;8_v{_cBkP%*R=Dl}WFY@^g#c#$4P}Wu z0_NJ!|3XT)yLO|1qXD&kR1oC+wVi;n#FucX@n_8(kM9F<3ILpkWRUt8e&8;mytXVf zZsh9}slZ##-^)|SC-RQWtDZ+)fJQ0o@tq_YKilm0Kv}Ag4i4# zl=Uy3ZTp7u!8Q)X6J2ib43l8 z8l+Sy!7;*#pEs;4QI?3s&d>0bf_{O$lE!C$Uf%G1l0sKpqqcTqf|H?Pd$#hS&@xK= zKk)y(g7|b@trnm`+xx_@tS{H=+IIKk+v{Zh2T~upyFT+oSsnguis|+>F7QWEp*H$0 z@%wPy7w27qOrS{<-da{P$~ijIDK!Ap2OIbtr^SHoj9)ujAuD=(R>K)+ZQEC-7{tpU zDg&Qm%`!%2tmu~MX~oscaH`h18H^z@xJ!I$3u%6QTwGC#_jQrDVIM*nXaMPx>w&d~ z&L#p9QHU>x;&)w)|2T1G%AtnQ!h&!u1Fsx+ZBB5^-o4RQURqj){6I0xd+_EA%7rvl zFCNa91e)9h)_*y8=9msLdN{0`vu7_^8^3t>AFz7-=c~lSnbe-|6XSF^48C2xho^G4 zhva|Wl<>1>PsXUvM{D$yN$GL-9TE!H7RDpvWW1Vzu8jY;Zaw{kUOYgsp0$LN&7S-V zIsQ{}dmVVM!O!Q?zcH5oq*?yw(Z3Q-|DUr2LIQ{kz4>;%ZRvkO9o1V8L^&y5R_ z^~j16j<*6oyRfZ%QT&Iu;G0_`Qj$GgwMncDmXFo?l0m5>0F$9`)`1XxH&R)^ zR6C$Xta=OZBT51I^VJ7wzCy8M0lXZ6=YGCcCJ%sct~$K0yG-AxUt@EjVerds$v6)j1c}}G5vz&WtvR`@k_*6nOrR)WaJfs<^XNGn zr(M{_Z0etNAhfJxLI7en?i0DF$m!J(67GMl95|%jB2_@VLe6 zry)Cl0v~;=;BdA{J|QtpXB7xfvJ=0|0);y5quX)2VNW6i5wcy0_2@~3t)<72<#(^7 z@CRp<=baEiABP07prg<52Y(9L(M$UzZ`B!U-@C!h6+;VP4oz3T)$Z%&?{@wWTc2_c z@$|eAIJBoVKEIkSm*b@svo{@x*V}{>GmEmleq=ELU=EG|u$j9e7ax6y7E>LmS)UR? zr#iHKbRwmzDBiivZ421w02}CfY7c}&K&92$4>Q#p*pQu28)c+%K~5sH{k z0G@tj9QfF=6P){y0n1Z?Cu6eUdZ}syB9Kf(f}HioW5@9e_1rd6$etTq?tj#hU|fVB zC{*l5Res`I)G$#xUxF9CTbaEwJ1y3irKifCc&}f5ZRVwjX~JAJ2Q&D;rUC$J4}>?I z>=EfLn}I&DmW0ISBQJ!78rBBytA>UvzPUv`DDB_lj0E_JU<<+9Y{fIWH!D|B5tx$Wk6vui))-@MTsW(vlGhfXQ&)p2NJA7hWFHR-YM&ny8hRY0U^L>o{* z)ksE5yL(ibv6%rQEoneZ{83t=H%W(E;KE8wPyZDZB9ngEvHik8z#iJx&s*Z#K`^6y z-HZL*`Ges?IeYS?=mD1w3B4OeGZ{VL=@dV$%=U$G$?9A;3)|a$U&Ws;oSgR=wzz(V zC(1a2Z~?yDp@d)NjFs;A>n{W8QtAEY<&=b_3k!R>BD6?ZF3!RZnLS|6_?}k?pjEs7 zc$9N(Oy8pchhL(6V*t<`4wOG@;OV;DHu*8Vp9H^*>{2EE<13jd8};&Y#J`Anou6Og zeExTvNc5Mp9w9EXJkG*&eysj^U{?Ndzx^B1M|%t44{-3u{Pa8jFUns62=e0ei%JWR} zn;G)@@YL~9nP&Cr`qVBVr%(1r&)vgTK}EhYZVxxDUW^ z(6+TFi`iVI=mTK*2+>xPEUZ|e1`7$uGn+4IpMM`*`%%vN{f=6eKLQ7%PsU5r)@jk9 zaU?a*k&>TAvBk=p=iR#3NSD|Dd87Fr1kY^(k7Iz0RntAqt%AL(FEPjII!k`g^LVv1Jz=GvCT(A5wvs_1)^0Axl!1C{%5_Wz`)})@5z! zk8ne(dJYHmYX}~nt;|VrMUy3rKlvO}GQJ>X^`xtrQkpU6)1~3B*O;tE}35Dg2k;K6_$EW~=elhVLs{Yv#$+Ls+&bYbvW+uW2 z=2zSa1={P=t!@42+1mhz8f#HL=MORX{bIn9JGy4F&9m(FQKvzjb1CnnCDcY+DDRU?o2Nk>IGnFyOTgm7tCo)$V z&)sTCM*$RaG0$nDWiipMrzKe(;n`X1boo+>SGg_gIy8@mTd;5iyx@$u9+qn~4M%zn z7sEZy&xhE|s|!X~7zR}}KR+0|#C*-jX5_nB8cru^1l5>DfPXD_?m6pY^^l7@YNZ-z z!97{7%_@YDV@7K1)-t+W8#dQvVAz0YVpB!YsWqDKJe1z{k@9=UuW+9JbDK^;VX);;P5d}RjV zQtM{6WOqKV>+ z5OKf=V^9GVP*5-skWm7TQUwA?C^|}$W(Y`Eij6LV3?&c<9SI213Be$usPur8P^1S4 zj2J=@xI5l&?&!?@_5Jvs@5enqPm*&^_F8+NcfIS~Yv~VaK(*u-+U-dOr1^!Fu4(2Z zhqZPqjkRK;k0f~e+f8;)l5MBRQBTo*Xno4*Rjx$RkfWQ#X)H?NWn}WB?GR-_9f30x z?7IWkh|$bxvxRJ5gk@gbVCf`ezLZlD- z`Py`lSm88&?>5ZRbErjglks*@SR=u#u+L`_%^QF}B+7^V!1)1z1D#WgkInUaa|Pq` zO%1r5SYW#dQFJxgn7cz#L&K}ua-ny6WBP|Fi>Xph`)z@Sxm?Od5|M8!4uz8d7iZqJ zYk29?Go0!?RCl&^GB*ZGC#)ikd!vvwKIu{L3{On8Jlwm+3Hx8xie&dxO+7j?67KNkxB!Ab(A=54O%X;_@u z;!$WRCZ-xD1L$BWEuir^QftGLQ}xA;hd9HlCNr)gQm5=$aPKtdp1K##am^RpO&)BR zt_<6IQ!!jll`HEq>nhWxgYadj7rf7ngbAX&Im4I^hqCP=**-3FcRKRzKoSF^BcMd;|LPM?8*?UFNrLJT{qgC%|db8L3SVJwD|rdfESOl&vXlC{EUJz$Po zNJT^cHa;lDhy4oTv6Pml*sQm|J#CY)OP}+g2nREehB|j5yZfh^&bL*ZUbx^Sy6y-^ z{nmE+!U=}%wE>NaLF3|`P?FkFYRJ)?l$%Ln=tg9Lek}tMxvzba!Ev!tHSF3(0Tba> ze#7>ATUT-8A!)2CE86o|bJxr6p++99rdD9X%*fD}ArRh=S|xSSyqtxgh)=VkFvqbJ ztWedEU5<3Cye=wCIm=*1-BJ0HRkG>5zR^I*lUH7?tBBMpj9g`HqYRhAI=20|&dar* zD4#}m?O`I09;wDzDl~`P7+DNU(_XM>TCd1i{gCRY4eI)S#bAQ=c5`Ax*ONQ-I&sSR z4KBDGJjtyhI|iL$DPumfFh!``!glyM7VDo|_eR0(Q7`q>)*i)<``ZfhG8|o3pDa#S z0=pic9malHKFtaGzI$MVVr=b5^%(t$hY|L>}()DLQM8Lrv22+X0D?Lt_(Ep zj2J*)bc*AY)+l&1vAxlicVoMe9Yp8QI1Vjvh#0xbG6{eYVU`WLyr1b;VJ#-sGun=# z@n9#B#nv?9G981j={9)imyNjd4g5^t)H`t5mrFy{NaC+4j>;o3+UXJ56_pioj_f4( z0lx2$m`0RCzDQ4;S=ZC<-zix9q+YyH0L*#da+~xm4ZXtlD_yPvcIz{Pf>}|;Xzp|E z(kwFJ9`vjbR8YJuod6^panuEA|A-bz{|m{@d2vWFbZCPB<{m>3#o7`EufAS(*6uEL ztD{Zv)p@p8didIG%XkZ=!oModK4UGv+;YM%nYU)NxI} zNY`+`yU*=&)>la#1VRs+dN5bIwPihZwwB=T#JN@d@IArV8}o702`Scj0S_*N7(*-)X~u;#N+_fh}1Z}uOOvw63rby z-(zM?JQC6NDn6Jk*Gh;NE6b9mJL`KCWl6W{^9)e&4{N@wri>~l6-f6OzOU*s!|23s zpNA88MdVG5hZ&1A%a5!H7T+?j+aey3W$nPezeTLPLprrV2gZ}IY0?0JWw zy3&db0}`5hg@?;*R#&;*@ogHJ|F`%R`M#M{|1VCRHU)L_uD_#}gYaPa{kolf8oq%K z`vK~EhK)>m`B+w%WH8BG8C^;+JzsQWs_pEq3SjgJn^8Fl71t6ZvCHP_oA`ta3bYRS z>Q|cqJ2rM?@*&ipY;<(}E629so5?C}i6oDgr=PAhyo{m^f;_%+4(L_1zS64m;*Xnf ztNeHxsqR%Ba??a8DCM6pAz3pZeyWxM0usnRMmPNtFu2YC{^i% zUA&riW#K^@RSF4Y-lZtJ_I4dfdyqta+ynGE!y63FouvZIs!AoTR*;gB8bYlpCL4fq zAWzcZHP+<6;tq+uCmq^eJw9qN3Uq9dDwZ;PpL7*?C#tY6t|chY+Yjld*c~Q$K16yC zJQ!>}f*ZWSj|38x#C%3Vjug3|u|$%e>#-tvs3h#nKot>CM0;*+K7>brnlx!a)MRz4 zwRglQJ4rJr>|HADJ^VyTzi*sgnucajO%QUVK9@WLOFgR`sdo*OKS&D-GV`OPivG3( ziGB&Hx=*r{*82gPO*F*Li66fb>^+cqUJ0cYr)JI5C$g6ZX+qbKAk!#XYvY>STvd`J zr|R5&plD~t+7uPvlQ+X8I;U|$IQI&0*P5c!9&PUEF^}HAt~7@)pT!o@&{$q0N$fx5 zdtp~5-dbD%-5)+`I0vKw0_Z&--95h(427ckIFqphDg@%oa;*^Tb+#dS1iKd*xE$ME zG-Rmoi=mHEn<9YeZgjZM04VYf1e(n+p$DUuj+qx95W(gZ!q6W-R$nV4f`Df!fxeG~ z0Bd@6u;&f%&b%kwb8GVE%rQV2X7xN?<*oxM9Kx&-YG_>Jg;yx{a49k^hlN* zr+I?UR&4cTHLqGDQmTXC*jrLqVbGJ87wU>Rj0s}xD#LN*EuS^7E+Ic43)Vrjy-3BO z;y+b6b{5>IzdeGedn^LB{eG?E=PM)y0fA|MIK};D_o>J}BX@EdLRzM+&EByyo1#ZT ztB9n6)@xE>iXhiTjT9&14@n#UHWIjpiF;fxW1cY79cEU>&P#M5vb$O%G(h9+#wscq z1fUD&DRJeG1Uh`&xER_zA}a1oAarazGYM8fro*5Mlw-D=szT}p%~b%;2$3Z3yM8) zpeLU~WVRnqdX)VUrVMDV#fd_tOC2WBm9{!|ch@XDqdvkSK2WuZPGkD4>YYFPRAIhDSM`?9viKQOcMTwRqBhV6fu_eeX@{qdACN6}>&)wYd= zZJCNtae4Lj51r!eB346KfxB?8V9oGsTUr0MDu$M};^+G!)PhV@#KUSZac(i#C`aUBsE6+$ zP|tn*n<7Tknx`Xhmk|T%Ky`E-Ds)l0Id;yCc7U0F2DNYRNynSBQYexa9FN>)Ts$0wi}@}km09l6#JW`2eakKe+mRcwB%9m*tVqJOvN5?Yv+*gy_XVIn zzw0C6T->>8BlWr^{YmQ+rM9-B$0TS}GFXSnIed^XV}eX78E@wMhf`elg$^I$=U-(B zwe@+@cVeefs#}-^Zd|s+iC->e-&j884yzpC@`(;>pows%4+c*3NdhJNLm%Mp-~$$l+mby-TfZm3OYlLepoRO{Wb>`5l8IXCje=dTgFR#7XC8IU#6cz-}c|3TTq7w(gh&D z7R}D%83!O|uJI?)Sh&}@r3JMkBf=MJE2DNnq~thi%E@aRk(*zPWO9_B9n_$Ye&6%s z?{m|YFNjVM$oS!9lEfxymT`dpVIx0nH@jy@_Vpgls~6s05J;mKk0ri|tQE5R9zxFG zBYD9OYKM$GND`1Uy^R5r*HK4-`T6nLce{tQ1Ry3F{Kqlo5AE>}YI2a<&1cR3hY%q{ z0w<-Zoxmre)-<9vssDhGuka}**wY-!?w$wWw<*Kk?w`GtfmQfUVtCOHE$@Nk-v~-~ z9;Js1Ky?2BJb?Tw3{e@VD@B99LcW2w|&g|@NW*D9%PwsM_`<(ln>$>jO_l)(99=>px zgM;I!{+(N992^J892{In4jp8FQgAx@9{b-uu$kUXj`9KiW%io`ZrXRXIXEigj%+{V zV!!|A$sHRo2gkAY-M@XEzJ<;l9IQP3TiO;OpcN+k$!RMjK5P!Xrf^N+@J&AZd_~uv zk90iq>W+Fp*7b;bY}@|`doZRH)EyoF=Zx*|+noFBFU_Ag`{2@h#V`52U%p75cy(w! z>f(2yV=DQ|KXj%2Mrr>blYFJ0{VMl;mG;|y^`M5G!dluSD6}cG4vkUS%8^u%DzOkd zVDXL=8*;mZkf{zKJ16JSpBOK)Y90?h+Q%L<`*m;P5PP1xucsWsU7UX}_dgZp{d;wY zBSQV}l`zNuf8+o2&2z{CVbxS?IvZ!7@Y-(JpBC#*SZs*aZOVJ-nMP8&BF#rUW{COT zFx`-)8kLakn6Q-AZf3WPKDxiSMfbwy%DctPYdum6n%?SWXUvGJov{}rsAWfX7r>{4 zbFYMbou(Ai=#sLpZ(S_;V`VNsY58;NATn3pedo!|wON{$uW{w_Kg+%s6QPpZfAYVE z5BMpqGlu=0mMXj`p1z-t_B!|qJyOnVua{*vm~$W0W?5X~n4qt9jn$A#r9{9Gm4?hzp(Pv}buan@8$I<^`f7=Q< z0IB<9b&9+rn*Bi^!Og*eY=N%>lP~NW6=JQ6Z{=vwQ>sGNh5n2kQPYqO4X0dRULTrY zwDQ$#TL-d|06(_4^sk(!^Dl!Jf{#7BDlsjL3QvhP#`AIHeJzN}8$-<$bT2H|%*W2G zP*99Z7D2r+{H{M;KE{bX?&VkW$>+ud0~zB`g2pX`TLau&RnT47WN#Ge0tQJ*txI+X z;h`;*hOMN2ZV}LvrGI<*E{rk?g(1bhh8<7&GoH+?xsW`8IecX#^z>oXu95^IZ5j&p z%hZ|k*RfwaHu|T=q2UmjU;Y538&fWW1a)pcM!D^}^Ka*6mye+@(;NX+!}E2=d%dXd z?xH^(dhyn!?oe|C57|H~HL-zqr0#5X5??$gQY__66|3)g-=CNJ6#QEUY-keZpR_@% z6GpakJeBSM==jEsXXFdZ=hNTWpzm!G^_TzoF7FfFmGMM9Jpi%K+7-@&r! zMdz#O=PzZ`qZ->^?ZR#q$CqmK*eXW4TFgN?zV6PN2aTNbj(qM*pIph6u0sdGB zXK~{ahtw}vL`i=3uk=Ql+WyOyb8yX!pqBOJ*CUl#DnF`Qz4yhnm)K_=K|ctL&3l)3 zcV2#eMz=8x%l!S80uH$wYCG*MR$J&q85!-w+gX*ACeWY*-7|KlPTrqihR?Ne4ms6o z{H*2=-^8n|**WksFg(r3p3=K8O=%78Ma|0$Ozx)BpImcLWsefU?@1=HD5HJo=SHrZ zuLg8_qfL-}h|CiR!)u8v&r*v#!wTgxOc1m~TFM+8!T}Uc{)Wr5+e?1K^LVRdUO}%D zHwc+qS%$*Whk;!i4zcG_x_K92@7&#;d#bbiS^50_Bl_8LXA=0dWD{buAv%@ruT|@Y zKd6w(|$sFpz2xwtXgZBwJs^QK4VeaC|?u1@Q+vc{trCcZbp_}A4-N#W)MRgPy_m& zix};IN%Ofwc_XzZbmpI+aYKndjpJS5Ac{aDC7iAr0=kF*9PMv z%8h4hG%LGQIZ^?7cf$ zpXPNs&H8)V{h7U#2NGvElDehTg7h~8zu}*qtjx|E!zg}YEb^aNWCsBg2l zx#hj-PM9GAeLWjyOt27w3JU(pT3F)4K_PcN1OlU7zN);icNt>6Fj*>~ zrxYu2&k6gQ(aY`)yQUjn6HYnxD zFF_{+9`^oK)ek2nE4DN3#k*+1+lcx~V^m0k_@F`*n9iX34KqDwO%+h^Yc$CQVG>1&-Lxkn-;m*R_%BmmXx8q?VXW@`Valg&&{pf2e}9{X)Eg))F;W2KIa)TWp!1!$CK1tSx<7vtlJGg{=m9=1*m@avfO) znv4bCg-u7M0gUbllVbgzf+xdiLhK#PWG8ql7|y~FTIag0;4Ho^3X2w89t7Iq<;5jC z?DRg5W^#LC*RtU6fb*gFi|8E^Itj=ka$?Oc%jOE}NBvrTTJbA*AZ&{dP|5hPbpQeu7;hKe^(lKq{gNX3sxi~7iNMTD zlw}shzrkxh+Twy}R`+J24{Vq%o~rrk;yUe{mT&Uu_dV^|n}T}BjQ_k)kZW9Dv#%!i zZ0p>>B$#ceM8SB2y3XSuN#l2P>`=F~vftUvJpeL4bF0u+Yu72nZFUB^K32V@5%O!c zLFx|ABSgx&;0sNWNtQI=jvKA8j=ND_X zf&;3yC3whd;?ZfIjyp*vcNUs_r#ZLGMo(omGZ(WL3`G$PJID@8p7SbmcGYoPw{5$& z{I^;o6`r=L!ODht*06|e z5S0Uv0T^t{uohS%6ugrVIE@tcEuRmyJ^(pX{pARG1isB3o;-fx!`Ih`v+4OpG?k3T zMc4&5w%@QO%D{UyQ6_^I)bPG2}PIb48dHL6ff3M9-)1I=!l2sKwgz|8@Xn zyfW#1Y`%CEWhr11%8@#2f(9=!a6kQ7VVRAcg+?IkL!pu}nW$~o$Rt`%E=~KOrnb%6 zcnW+j{~~ODo|X0+Nqle+0w64J>^uvWJ{!8{7PLLL=x4iyN|aMll*%4@_OuJ;;hHAqxDmZ0M=7LlvM|`&Jrp?v5w9wP9N=YnnOF2?4c^Rj+sK(9s1wT8!H8yf>_$ZMC>j zH}UBr7F}&LZ?2~x8%ttlh0|+`DH%My;ls&eI|jNtC1CfmQOulpE)bhPYSK(9hxI}2 z%b6HXc7jmK5vl%-9@ok^QX>)MW*<6P3O5~c%$y2cXrco3-KkVB0ZhvBbmZB$b1U+l z=qoB)T&6`oK3#tPo5^J+PH-2|e=UbF8Rxo{;fYcET-o>_6Je*i0y< zl-8F&Y?NC)HQ-L`81@+;HgDV36H{lbn`+I=;mZoKZ#JyAC&_5N498cAJI8s^M9-jU zxYF_travsX;j8!p6f^wILlowe*0z1ELK~F-8y0kPGmId+lG-nTV<&w_IXDibbPD3! z2_zEJ?Pbj6HIqpN!iskpdd?oscLAN{m946&(Byo!mhp!iA=jdtzKLbXRH5Ay}&o% z4LO);{&)e^R=o7F@x9hMjY^(KM?yUKxhRGNu~8B=HzF_J5j4 zcNkTaPH;EjFb|IqN9VPqS5khI0Mecu^hB{S2Xvd}D}bSZE-KcI=lZ;3E_dDMqvX?b zzcppeg+evk*_kBUA>){PySej+2=xup8fB{Oao#{)ZQ$6P8C`IOP8zm5C0~Tj!CkC+(f@^An>gN6H6ohu;L&Q;XwD6LfX0@;XNh@^twwv~X5KdwfsE!E&4?c@mxc*ov zKaVjUxB*EoaD4hsgAJLfz0=t7{twwJt& zkw4~Ly6=yrQ}XU@G;X6b02oL`T0O{msB8T`Qaw;GoD{cY4*%I9og!=kS;^+(;5dHv z$Qc|K9+}T#KAa(UgoeBq_!A>;rC@F72|h|L^&ek-3nE@iCT=dlKh^Zi)N2+YVq!;6 zv2CSHmuf5nU)30vZd@4|@#wQ=CkJggj*J%%EtfWH!y1=+;qF?uzDUNgY(aIst9iA6 z$QE=$Jlw*!A-Q&^OdU1Z*N{7^R`R==B^w-|o8l!p%FFnCu6v;g%_!TpKnGEs*wHj( z*IN6Jk!#rKlygt!S{Y0PcS%!JnJ%w&-U+0QG@A@C%@1G0Bz3QpZMYlWq>}rHj{RqD zEFUL_)8z+v_8E-bYsU+Q9|7}k6(VR_+1g1E5Wq>K2?iu=QfgmZsOFDU zUs#9xaB$o>FKvw|usUJQdMengAj0VoI{JxS_z-Aj3H<3=3Z!7=t!HihcVDc;S+kb1 zJP~f(kp#8zz*=4?+-*lMGKn!HbCHkdKefD!7?ZJu#M zlF1H33Yuco*tR>Jt}Y%eR_RbzNtJ%(E`{^V)MV>qtV$&+-?>I+Lq{cmkEuKgR6rlU z8HEs9GecWI*E*BsZVbi+qa_sFc^};qH0pjjr*y=Re@k8voDI$110TCc5^aD)p=47u zLzLi>yy-*%A0}T@j1`LRonhESjB)#=wQ9&}*a%KMEJo;9jaJgLoU#`hY~2$YODAbf z-`gz5>!C(~1Dajt;X2>{BvKgR9JiT#`Ir0#B$(`5RUp?j45PqVx^4cm9&5qq z1Qcw2&c)F`83{)e2=S~Bvx;?4BPUI!Cc%4k}cm~9u*+v$U)cb2nifB($>y&PiucxM0eEA)N*n~kRPkL=wx)U`VP zERHH~SW~EFH1BB4P5xo?)%7@R!k=B>rvUTiAf!$n;?gPWes!&V)yjt{vw@h?>O(KD zMjc;tY&(8fqN`d1z9HnCt*9`7QWqU(Qd)LEu-u;6p=$C_PgNQz$mTqAn;SAy9eq8g zv{Mnj>+?UjAiIF|@6#dI*yzXUM~J9SHtP6LcNbpe;4tCPo^2QDI?SD;~3KB2olw@*nGp|`TEc*@4S0C?nZw{&P;l5vb+}H+>V{L$18H+BaurQ=enr6h|`%YY)vPzsSK5)6W<}duuv1&Vs3X z3xy+<4Vu|qLn9YbA>GGeW!wj6;iFd1tMo>Ikg<5jr@neVQ zs%}>1={Sg{G&#RDvC%wRoV8_J&4`#}ak+cWN>MWm!-;){?zPw2Kq9^Tq~#4&|6g~5 zHKP89E~UD{8qE*ty~O?KEyAkY#*mV#2TfHrJ<1i@C20eS6kHm-OG48dNR@Sdyffeab#yTQ)KJQbR^0TPZ%2v5zZ$2e4(_G>V z2`mYq9o5f8Sse>a5xf8;Bvs>4cUWn%R*j9&+S197b;>>WvqyI*Cp#iLxi9(eMAWQW z1HTPymgXfF#4xE_iDAxI*2hsQ!J7BZ!kY{3gN(O4cY-}z6VIvCjWbHr($(9N2}w4Q zEfl$R;{3_^)F}*4*L&5D&EyL<3lnY&YasWMGK7pwE5ROw(D2+4lFDh@P%rT-^c`JI zn|~kIKAXL9esyO;L1TC_&1t*84Si+dXx3P2aL>SDeA`H+)oPLwbj72^M97^SU0-HY z$3_r_g<^WTesDRtuOd^*s-njmA7u^S#gq`ONjW*1_g&>;m#5DFULWgr*K2ltl23P{ z&S=h+ukt&YQoL$c)IVxTS!&6{EGc-DIn=U@xH7%Z#;qlrsd|T(Om5lSTepW^@6U`6 zz8yQ7I-95?E>%Sz?i=&Q>a*1Dx*Hz$zHMh> z5a4VvAc&Pxw5#tP7!Uz8jz~=z4iP;%yo$OInWGP5T1{%1m1UOiO>6p>%qf|YR;#1e zvJ+oPlFAj^J8_!@u@|746AF@|oSZ}OHY?i^Vn;G4aJm?lC19D{wMh#QC~0XlA0Rks znytd;s#G@7kC~>_&^s61C`0`gVS@wSmSKi{nimY!?l)d&Uwd_ITb>)TDM!_thK^8I zHGLifp438&YL$nE6L~NJPZh2|xqZ>YK1gY#_n_j=R&0{Z+%dRgq6T#F`Zb^OcX#`X z*A_%n8V1!(78s>F*DMSd7F&lj5Cl}S0Ok*I{a#>BCoy3>VUQMQfi5LhEc`hG000mg zF0PG`7>D?^WN$N@_57MFzNDqs%FyIj8HS(jjEhz=!2oYS?-z}62)V%lPkF2D;9#sf#) z@?f_;v?P2D>eq%pAu@etGPbuHQIV}3*3sc3={zel+92e(=3W>QF?&MmlJdPS5#LLA z3k?`aT^Okv)!-|zkJ|6H*|h(}d@E91$~i5}-FvX=oQru9Sl6MphPT+;V}|ObTOl_g zX7{f8Rr2|tk;lG7kKYzEYipcToc9=3vvS5Ys2KLl!V>P+6&C6n$1Mq8y$@%s%K^d* zCc_=ZB$=Y;oC_4B zOE>_Nr21@p%7dcUFCflJB`Tbsk1r!F29BdKJR{MY#)^66mG92E=-B9YTG@_`6$Tq$ZxU;cbpJgnQR|`A$f2tA#GC z+>p$?GdFuUvlMWzTh)^3qFV8q(j$nMQwq>WpQy{L^XqMaD@6N|FySaG0u`hBv|6Y2 zQgFk(3x$?r>A9SHm;0mTQvOs+sT*1)?sKLLWK$-aqw^ifx{;WnK-QIAw(+1=xvv@o zA1Zaj#Y7vx^-8?vS~LN_AyaZ4TB{R1(RHiu2T&xsMp#Q9079U?&Z$y}Dgdg*NvLcx zG)zzq&>Ie%oK8-n}je8E9+n7)g&WKy^m;Pvw%aeGhX5aE+RJx(!oOQx4R# zhIjFgN5901iGhi3Ey~NNMgi8g= zq)9EdXJE3mqY4AzhDgmO`UnK^9P3)1ckr)hoV73aOe*lr*2uK2cVv3a0Q;pNtZM}- zn_6xNt0^Lq%gy>?Ou^zI^1upJ+94>q)n~X#IfHh7(nBHY;a2Q@Tjoc+vw6GeJj}~f zV1IM+GXi5wvHNrEks-MI$RTPIu@CDsz+PLy1Nqf;fw7(T4^u6--yO@LYOGP(I&$G~ z4{T-1M(PT2J0SA%wu0+rTc`Dg2=%g%){&Z2TIpK?whC|OqLU1Olw;!Cj;S-oGdqbZ zCbR#!X6|VtVNY)`VktlFU}h^*UN9GGRl8c&wk(BUY;Dyr!oD%c9aHh5Ccy3Q;J36_2a%I)FbH$q-K(!KDlJc#1eR0DXkwlEHU@o*M;<~}jaeOAuc zPcUIIvoaZ%IkR~_vgM*;es|Mt#g6L}3a4hmBh>NIUz{~)(K=S}|J*bZA`X+I3&vu3 z^cl(c(yOjTrI1>~Xl`X|UgfK#cBM<)j&;Z%Hk8b3$n%PVw;ssmT#Ia}ZeCSt>0?YC z4H~hgrrg=GgLd$=vWdeE=HfwiN>Xc4y|mTpZ;L@xv!|WS24>bo&bpTwZ2H~TLEXw} zt%ACH@J-n>vK6|jg--f?><&#|GAr^L?Y)OTM2_|^i5r8QUsij-^+qbDLVR@bkp#?U z)m$t53n9w!eInZx2veQzj(p$l59OD0ZJh5TvS&#+AD3GfSeu}<6RK{pm6u@VBw)CC z)vIV7OqdwmG9M4Jh^rT!2Kx&pBnM@cIH~-J2N+x!57gA2{n3r9{oq1s4wHTZ&#gm` zNX1~%AA$y}@A|72Ye~TjUqB;TYKWhjk^>vuW+K0jW;d2IKOV+HT1+NEftn)ccO&)F zCs!)+YNcEZiCln@UYf3S3aSfxmSNbs2oru;eu~-E7KQ8vsb++yM(PcAaCRhI)sv08 zV91rz8D|idY!hCN=-dLyhZiC=+)CS)f`HVX&#@K|`X=}Q#9(ma?xHzJuTPWUQoNe> z)9dnaMTW-&tr(;8g`H{;pib|b3o4IkhCuKr-rvy! zQuNnuL}2_;2IZ7B^}01f11#Lga}6>(8t4?goOh(EGxTFwFuG{Ho@cXX0B`egpoj}N@%64t*&BFF%K$TOK~54>H1HD)Kw4&GgrhJ~LTY4UXggdy9hq7u;x~kFWx+ zMsg0-L0)4R4plSMy|R^E{HU|PqU8s~MR#|xoy!~tMOzUZ0{6k`xPW6;U4L|B;tXHs+*R^pZO z`~#D&Asa5TQ%Khv^*e>PWRL3C%a%NTxAxf-5N(fe3cPP-6Kr--lyk_;bVjXS7MB{*b99EPW~KiX+6g8VNOjqvt=}jS?v+~+tR_eWuDCNmDgFM^Vd_Oam&hF( zc20`vc{wJsnp8LMSpC?2eUe# zvZSV_J>A*X82BKBPJPMLIjJ#vsn~G*XVbrHrCA)-C#U5|NY0YyPqAg(ZL5Kl%-!fWVT$C+~8jjY-(J5nT!D!gesKoqMDRaq&d2$3>6qt#u$DY?hxPRhA%0ci7o%^DXm z@K0(Q_h~kaIo?KeuWrPA?R8v#BX3jd_1f1)^G&iwz3}Q@k55+ujNfnUUrURskSY{5 zDIkU(ZE*zbeRT+!b?&MZ??bEI{aZ4o?L*#w_r=omClt@2x9lB}BAPF>P-VOKljBCJ z!0(OE906>#&>39odWT&b>FUmc2Mb zo%(l#SN%Dk3uw;{{~*^~Iyro@7KA4M%>DXE!JpX~l+`Zp2b5LF7BKeS8^VoGf4fB< zL<$2P3Z`F)T?(omF5ua1>FEPu;#SSV$J z@|-fRQa>1YtoPI1W@95)-?(1Jfa+M=$GhiT$R(+Ch_3h~{{q0vt64mO767fDNwXWr z-E%44{_du560Ix}#Xlu&%Z9ftm_;W1q{dhg@n9`2VC}3hMn%y3lza|B{Nz9S>R02Y zupoEh?M69os+5vv&A2^MGMlu%e|MA>-`Ox#kcl1wuO`~BcBIVsmGo$k zKuR}igowJF*LkeGXHB<*@h9@KvkcPODCEo1Y%du%3Mfp_GoTSSsfpfQ~$mXiA5J- z8@{fUcxQEz_~XV^U2X_Z_jR>oWP8_OAaoL{(`W}j3!Zm58E$IT(g z#Rm2qUPY+;Udd0S^(^7>3y3qgRhHM7z@$x}c;KO+lBsa-6)6z~E23H8VnwC5wa+wv zGBcKDAZlKm1!&w!-%?Ex#!QQ|WqC5-5f$N6)&J1e3u2}1(9);0b$ zqSmev3nSF+mdA71z)g~kp_Gu)@YDy*O%nVhGrtic5 zGvt4myjB>r7Sk(Lu1W|gA8^tIWz53u#kDGk6Ftrp!@uyl^m9~(lCF1# zEu3>9JQHmxE=|yJYjU6!*;&B)Z9yJwbt&adge#1xro1uO7q8jR$?*5q9yJMv$$eJ` zs-M@o+;kOSdXCY44+oYq2x+D7ud=mP@%UM|lIOIm)|5+|_+LHy=|k+<%nYBv+59yB zwUcrCm8UG#g0j@cbK6r@tV9^12_5Z1 zu3Bmj#@JIXmUC;E$xhr6pK(oVA+I%Tjo9LcEIsGeftPa&yi@p96+{#|$Ws<(ZjV^GrCTq07V`Q3-y^(VZeQAHm(t4v#EXC@`a(H7pz+^~s< zB7bqs_cxB?L{^Zo!X`yAM*2okkbf4m?bl)*DC|Iypkq~F9vB@e*BC~vAbvi_XL5Zl z$L&>bU;jqY<>rCQ6UEI}%!`3V%z>VmlG(74B(_FK;bibg+6tdZ*pQEVNhk)3o=D`0 z4x{6S_2ze;j%mPh*e@_>vXLTJt9`tWgCw%<>>>GCYA01sqe0$6KRKFKuIi9Bs90~CGVRi8N zZEY@yUHw?kP~IFx=I{3KGjC&m7T=$weGg69FzL&qNoJt7vH!&%?&b~Fir_S#K19!E1~NL$H!J2hL#sZKn~+SEQOf@A0J zwU}AT*!^>ew+bNhE07(u(N)*Cmm^1o2G+M!n@Trm#(+RnvpW` z7t!(y>)BJ`r70SC*)C))9q-|ez9#M#oC0v^^)NA@kPB}Cvl8hSgKa<{k^u<6HSt|u zSk!|R2`#JAZj}X@iub-{5*N!t@=E;AfX}|X*zumQE!oql1)&YWG7&^M$Y*$YR`!OO(4)Ka*ZSR% zq=wzbbmzM=o2(}7<^m4Lu`>=QjryLMtSV^P48|C1Q#oYXW+urOYtX!gk8iD7wh7aB zPFhj101`nS^06OU^K>oG$Rq`-^)YLU=4FlRDy`o*hMFG>vR-vNA|t;Ed7bF%P8C3T zF&)`NK$w!&Wjos@3!4=FaOUmtHYg*nv&HSs$3YR5vdwz@f(2XL>tAYAYlqha2@H(X z6XX@o_K$ja@w@0Bx&G1vUNn#LI{*m{?{oUjf(Jb@I*2I>^+0>_ySS=h%k}T)cQ`t7 zua;2>p?xPuN(bP6{O5iSvL5C;7rDn`yDb*D-vaxwn&QM?+mtw1wYj z!Z^goe?HO?G-XvnE5$6FEt#9qOP|humN^j5f+7nFta?7U zVO+lpm0PNm(Zg5nE1bSESs3>-qGt4dw`xgg^BD5XB5oOsM?_mgXDir%UCelsRh$7U zZy(m&)hDHq`dggVIkNLdWMLv*K=e3aA#w07ooc-cdw%7O@2=czo9(UCUGbqVh z7Z~l_a3FxqfP4lcVf5;p4w_eiYSfw0stq<%uA^iyh2vh@P(*8yy6CaiZm_V^*1maO8UeF zDd00xrkX&zQSa)W?Q^e;xUyGC{;qJm44Hzl`8jUY{xrzTlkO{~4D1Ka{d$N3VwP_b z*|{~TYGb;Pf64=GyuMkT54NDwY!r}h44rNK^WTBH^JlHHuT8oT40pQHTDyfW1(r6! zi*6W56|cSf+{W1rxTEjhS!X8cS#~e~4z*oQ4Yh6|`Lt&YPU0z2WE?E@3yRGcEeEpz z14Va9;EEh^#r9%nROYA?s>2BztNFUEpk1n@v`}}Fo7*`~POV2GTdr?<+{M@7>dl0a zow{zhY+($enh{v%S!GMr8bw-`lLAvj3MD{`4fXGjRll2GYJnN8?Kq>>$-OOr_{> z@iSh5f@qz|joEMWmbOAwCj7MN)XfxKvGmN<#fg-~Wvk+&0>=uST5;-Qrhs(-5bb1W zYbvI;OKN_do`pzb>7;EeVp=b>5T}~YAE6#xnCM|b0$(EVhv6Gk78G8lG17ygZT165 z)_EY}ro2M}66OKGjQ&Tl&n|}o%ZKCIXM`M(Tej<)8Zq@I?)dK-u8mouXw5}`*(xDJ z54&$z3>#+z>y8d9C}MJfT*_OSanUaHz(v-W^G8jo&oX8QLL)5|@((j<;_t%xrv{5qMnatz=8B z8mx{FXV{B}4aD?dXV`pp8UU%n$ZTY-WgqQ^(J8&@l`iU3WwvfX#dGkzn^>Ac)6izx zpicCM9Mm!cj4W7jN(#+dO*aBcc`R-4dejBzUmFrr^t4K+(ZDe|&5r5KT3FN>-?`zC z4j_|1ze9}%PSg>CVs39b`K>pPy|GH~4dT|QX7NzLnNn_r$*V1{4E6-Wo|XT_3JcZ6 zw2_VKB(G}d21BgsAsiK;ymVFe{@f4m=3ISa$FY6S6#2Yf&`)Ny#3j!gwwRJ4R%(Qd zO)W%ZE{Ep%m6hY!&w35GblcM|Pm2VAyv-wbopAybGk?5X2z2nTs}*O$oY@XxH2|_? z0N0;i{6bGojpy3r zGaeyVR~9ttuV6z!D8dsx#UnW^N->VKI^fM=EUE_l`{Om_kRdL?Vs3&}=aCLbjydX3}WC@-Y?WfDRt(&bQsx(Yo# zmverjNQoXtQz@UL2%{c@3vS8A=~JQuNe$BDZ|X_zZ;JC|;1ADifN2mi;R?lkGc!*?t2dppETt;MadQizrq%>F!?Iz`^n2#s7y6O@|mnw1m0~wOou$zT=p^ zizXL;w8*;Of6f_amg42-ox118w5MwDRbgpXK`?0{g_;|VHIohL123u@1 z9`@JlpUltT1nf!SzOP3DVtanXOKz}}M|U*~pl5VdURC*8VT}+Q^%M=u^M6qrQ(oO{ zkZe_N>vFfMd_r>LAVjJ-x3bo-={{RVhjvjM^4~5TNt~FHPX5@>of`&^k*=<?Y&M;>7!FYl*9#Z#4&PWzk~cA(~{C*EtHnzu4?B!Om13{($7OBLtk=_R!mFmecI?{|HUD{U*k?^0laU8mPPIPurgFIJb2k@&MVQH%;C1gSGiiOzPgKmK5z=Ht|QB;VHzDL?xhsD(Gz*hO8VdsM`Vt4)N{@K@5 zU6-&c!oGtalXR~Xw5!b^J#|5*ucZwKO6}N`az}3SK)CmT=SMBkF>MZrtW6kyXOown zqvXi&8;@M*lN+0*b)diXyYO-LVQ0poa!N0|cCQNbEE{1L|H`u=z4f3DVa$mNmG-ia zG2FP7JUKRWkEj;Vb75@63X}Rk<6gBE6^lINysyZunXU$#(MJ?K>+?r*Z!>$EM?%#>y>dfEco2TuIbW+<#*||QhCi*l^+DMM~Agj?U8J7D`8`kzL*N5*|cQhC-G?#VcV)Ml+BuHCKdwu zR=+Khc|KUcZDS(((n-Ixm5q@-O9(&xmt3IDXPh*=gj;jTmd~4aH9Gm){WU#NBS^@| zz@DlYpBg_n9^NC=vEV-L*Y>|cw(NnW?8DC0JHV6gQnTXIxc^CGdZqYgK{gIRmQ@#Y zr9~<;vi@a}Pb=+0+m9n#V!X$l2_>mt(Xoc=N>b)lyoNU}~>-qWi|b)ZDP;iC2LtBNTFJ*#!Q zyw1@11VGwjaN?eYe!pU$+FrQ{z$LYLj>XNSAqGpvoh`T@5vYCE4*q-sZg0hJ9 zu6^UmYGqe3nKSv3s9RpPKG8+0@hb{51~KpZZOuHTD`UE2a8*_bgX0r-m!GuMKOU@@ z4is?Ax>Ya=6I$Xhp|S|<(*_-4L35|9RU=z>X_nDuMlE5Z24bWk&uHc5I5*5F=Rfm(J>Bi+UfP>k-BPrKf}dDTAljR51G--mii00~Vg@ z>R;OGvj8~2>UFnr7YorG$6~r8s9Ml}P$oguqIq<+IK>cQbs^1+`Kmi6xR*chSl8E5 z4=r*z_*MECOLBhuleOUvoCQtTI z=!;&BfW|fG`EQ={@;U8gyi}q(N!iUijOH&H9>%h?Kj~=Fgv`${M7RNay@N(=q*3R! zosil{6SPrt-&4{+#n$9wPhDN`VVS)wWmj-;s2MRUiZVXvL!?o7l*D{yon#><*F86F z#1~=YBjinZd3$?9pRq~rj9PM-t5DyA?}>(pb6J+m)VlpjmwlhG=jRsfKtJi&Uz=T1 zx98X)yF&9_Zq2PCI~OC`7YP`0I|NUe9Pd?>i%(EfSHOy!iJV+-;OX?EFe z+vcI+{AJ-;DC_OpZm|{>E=cf*60M_5tVtrz3NI&$y2}mmOWXcb?j)soiw_VV{8y?e=cdp8W`#3!`F9 zCbHNLx0U`k0sj4bckv?M2?ME#*gq+!hKB15(`i>6+U2+I+fVbptk{qGQxjchah~Mu z)p*4mS%G35sde~o?7e4LQ){;_tji@>!LD=^0hJ~oAYD=Eh|+6NA!Jbk(g{^XMU;+o z2ukmf-V&ms5C}y&p#+s)BcTKcA?FFW-nHLz?O*5YbA8u$&h}p_w;SiT8+lj&ut8jjucW#-UIdVve7)$|E4fQ0B-ZND15DNuQsat76-8WUPV9~j z@+t!r(my#q^Q&m1Az9*Ne_0pjqi$E)bG8Bg-qd&wO6%j+GgvSE%{fkw7OxsoMowO( zs`ok9?7Ia)bII20Csvh3inxH6DqC%t(m*{3FPL*s{?a&84j@<@Wo-d$mybbf13`GMw zr?#A7quL6WMIQ!;0;BXIK7Ujz1~`jcztXD2!`>>cHP_K$`0#95+evw&os~7I%DBdr zpk2$RUn}PU()^n~|KtV(cK%=aa2i+&8CTufiCrXi{>(ARe|(+5N6Z+(P4|Bap>RkW zXd*VuM*p*4^j}WKXJu!HPgLw&l~-fx&)w_F?m+r>a+Y);ikULsMHN1)P5s&0xbM(a zTz7tcj#X8Dv>7Ht+<~X|=!(;5R#SWASid8^pO^!-$j*)J*`xjc0rBW_ zUhZw9B9qzQpvt``!1zVT=Wb>BiY6T(E1aCs+u;JH#`F4OK|fDJOBGD9W`km3&4dwX zvE1Pt$RKl_O`UUM!{9#GHNZ-rGG4oO33AzC^7LzwC}@Y0ApZ4M3;Au+Ik5u8%boa) z6Bu{B*{M3`{_V-@EL`kL<`?b+K;4$Y7rFy>&%!66hlFrK`3%Mnps!;}?cj-prk4Dx zT>DhNmtwx;)JJTdwC=KmVIda^^7EGu{jt0Zh)FA&7;q1p87D$GV~++vb%SalY_dz) zfu=^YehiU=54KccLcDR4U6yJD*dpx~)a0@2+>=|rO>1`8xSi3zp$hOlk)cjOKG<4| zYd=*h6Vz3iA`B8y#{F7Di&sn`BUcuhe6y(P_Kgsr5s;O?4ipVN)ZkfY*eVejuMaQE z9~6T7)P-m0(e>=J^(PkZ66!rM*SPOPo8+OeuPru&^gBXXr9W~jumBz*@b^*Md37r= z^(saiZ7t;IVt6aXD@|rq%~xo()3St~;k-41&_a=O?X4zlsA=Oz-6bvOmQO5zxq;Mb zKVrTElFYG1TB@36ut%iB08G)p@K>xz^H^cDqP+q>&AGI({r(zx?~Iby9aSc=b8ts1 z%M0dfl$K9L;qMA_&DJ=LhlI-@94$7~_evqR)%d$I1`V35``RS9xxb2;((X@XmJXhL z4E?K##BN&7V05jXThTOks~F?VA5#jrab-a|6%{jet-)nrrDm2b`Y(6&y9H}8{g0M> z@+c!Yw+09DDTOpALiCT}sjgxP?A!>NrO$MXNYu+1w7N3qnJ_k7X^lB0IE3>X+1Wl% zHZB9YPRCohkq5y{EPzk`E5pc5_C1RP6Oe!884=1WHD@C%BByWUI9@Rmc953Qpl*oI z1mq`IzkMRE@RwTRmOl6#xazw6Q4)VH`-iVgX< zD|w8eQ*v4L>DpJkR$9I@2%~>q1i4$FA}>tMfbO;4y5jmLqLwu%$UgaWtz%Y+l^avK z6!jig4x1|UGG2m-=zkV&;2~xiSy_99cJsgMQKbyfM<>oL$dsD4@TkYL#6IR@B}y4SEui=AT@dMo5KZtWV|nDffbjw^0x z=~&#$e#5{XmlPtZz26qIMR04~tX9$+oAj~(ij5w*;+7({3HRG+%JMk8f~Q@-x(3F* zKztWM`B&0@S`GoTkb?+zlGdMM9Pe52KEPqKx z?lE6tGq=XxuYl*+7knO;maRm30_}siEQG}7)s04ZRgktRJ=I2zF+u+Jq0CI{FBl8&Uy)p|St3#jJ|m|6DceJ(n7-F`2^rglwL{-7zRU6cs@Wf!c~NMEK%UhIxp+%qV&X2F_DkrX zCIR7O5Drhxa10k$?SOiJZu=1^EYEI6{Za2E~ca zT;($-a9T&E-%1egoigAE%BS=7iRUmQ+OM`C9A~FMjoN0*(S=Ey_(4M~IKa6F&1$I; zlXACxnoORTEJ}53nmE{=ve&q>1e{ZDnAFqYSI|*ANx`wJM@$wPm}=@{_GoJzZv8e^%R`GHo$u z(j(>UXL|z$dXtN#ftaN-nRn`#zVpwmrK7Uz{O+Ls_9TYpNK`odo%K0I8i^KU_ z%+tl?2l0*wqhK?8a|c<}@#iV_n({~cOP1W%J zWEAFM%%j#w`7+}^Z&*l;_u7nGt16G1tSD?C89slizlPYw->DwoQ&?fEU)$K{I~?9$ z>_aSB{*@v|sdmuHTzWgJUql=HBwuMarPQ0V;wfn!IXBW>Wx$~{wJXq5CLuSAX+q%`gF(D)D(86T`GmjAs9l3DRw1;k8AZ=$!)@7(WCXW{n`Qs{8;&U0uy;JR?kRE$=E+PgYfoo3;Bxir|zvFuO8 zWS30^5B0}c`>nwa&Q%4QFDt+L>!QVR_<)hqidKAK=8)EN<-1W%;EU6s_|YNLT!Kp| zxhBDGNLTj0$zmiQm%)Bh6%ND36wF{{bv9+Q5SIO z8HAh@e`%pwvL+XF8@cdCg^SEzzm$eaQy* z5+brn{XCqSvO1uX+cq4=RS3(ItObl%ul?7Px54R6r1L|TxpTDl#k=JA?*`R3Ap&-i z(JIp=5l>+PeHKQ}F{wouiQM?AO%bWx;P?jd+1c?}1Ke7_kcF<}(bOg-CWfSN&?JA} zYVp3ANZ8TU(&lSmTrSn$tnP{ZH%v}DM&>Ag7~NhoGp(FYCS$k60xt!w2lKb^?8ZttW+734$n_4r zb%&4V7yjrDU)yF)WQ{_Yt%-m2J$=Wbl@?qPsBk(WRCGg)xY4jV38hdwwJ#4tey0(v!7r|a3}V4EMV=>Xil?@N4uC%T)lK8XliMIqxOl-RFbZ$P)S=rlhe zb!^W!kvHc&8hzX0dllG0sYd>p0xDJv=hX*yUk@(~Xv`t&sL;S!!b)(-_R!Lniyn zuIpn+Y3f1F2yXT^>&^9!dXg0*uHxmBuXbbdko)e|<>6jqsf(S0Gk$s(|NUh?aRUr(F>UqTqA+Jo`g=|&_&)Q@;Gcckj+U~rt?Xh~cZ?X*v zu#YfiuiE*CpV6?sKKv(Er6{d<)$y*TG+fEZsO~lwoji1W#vyAwty4$x0IRY;Yv2Nd1 zGQhs2%9+`WNeYd>SWugIn@@3}>yCe)6T3LH@dHZ>N%4W@_%->^S_k2!s4?`Q=WQhg zFFh6~`rT4xuw6Qu@$$r2>!}_w|K2sW-Zi{XQK^S~mgRyk9a}jkM^zT)LHZeQm(SMN zn3irAjK)|tZo8*ul+uRs7QjAxa`nS41%FlruGWbYlY}2>0UdLjqcI+N&^d4Mpsw7;9nDztjW_; zlp4QWQY6Yz%YXDR8z>C{ezI{TFKKwF)C?0a$xLc#+C012blgsvT%58womA7t+Im%& z{;U`865K2HgVi_S0&Kewg295&9KPbqL;tEf-XrT4H~LF04VU%ixU2^m?YEw{&G+LZ zfo|!B4cvT6Y^o#sj|4j}yw$JW`iZb}K$s8usD}D;(oZ_11==nyH&DV`ih~5(DmPi1 zRWSS|{DXwbdS9th_bz%FplaYQ@w^HtAV&f5tJSazh#SnPmx0 z+IWFjbC~k2t)~%FRz`myu{CKMV?Kn{eA(reBjj0V-KMV>kr|OyO6v%?01wnl%BQwU6HtQt3;R%3kv!5^2S=~`F?gP-gYL`RC>m-0 zZG3@QD=n{-M^5m#!ZKH$U1tpc%C=|SzQWkicxA;$VQP43#3w|cn_*=^1AD~mX6Q{M z@>L1l3LEKSAxRpYr%@fTy;n$h5C4q-QKZ0hCF9a>!ILHJ#uyzJlsrP6TT4!j^v$d{IR+|hD2 z_dZGs+D{N?+nCN}i)O1AWLDNACl?!bnWiB*9blSSKm=O8lEc;f)urkE{y=J%9 z{PFqL`yD;M17I$o$GV(=Ofk#}I4HeX9Idycow$9hmV?j< z8|A6*rf=h~oa3@Yv`)bK?2zm1&x8~L{H1N9a$iB|8kZEg^uDJKjfT3F99D|8(%o}) zXuW+X?cAtPI3~%ydkZ;6h(}&}yd+pk>LuavtE86Tv}^%R#~%1OsFYs!FrR-~K1r%C z6y00b!7-}UN>a$T+N8!V)f%whg~a^8PDd*j5icnZ(@xf`*hQcpa^`FnU-Q#ZT9mya zHChm*OmnLs*}U6$wh3|95l!`zH(cw^uq8+;zpVtjv_5=<+(3fI1S#gzV&vQW&C58# zN@rE==I&d|%+viF&jr0n!zBg{jZe&ubZwut59C_&I^VH9O1=GZfi^@_>gT5>d9_e_ zDRP!x>lxhEj>PNfvk>+u)>Rk7g6&j8C`l%1LGIQNiCb@sBdT(fg!jrHjgw3>8>NO? zf9t;$=GT;zYM16rv+Udon3}OtyS=Jg;b5t(8mOcs#@vh8`Vr2(s@TE3Z`FULW7SiH ze2eO-7rtzSW0dayL_ky{VeP8jl&o{y+>(&FNc>jBkGp)4b(!1N=z<#lJRO=EVH;}A zgiwxB_Nag}A=>kP*{xbUHGc`z-)E;2!!x>m%~C9|!)p6+XE$pQN&60158r1dWcWJjnUO4u=!Zzj58Xr4K8QA)K61m_-xu8K zcqz+su?;3>lP>}d3XuP=Tr*2eie%rX$ zxhXwknxzIjM%dx)SVJ0a+hZ2iN|U`AR^Y`MAP9?lGyx%o+0OD`Qb!cUzK1{FYV#SFTL8+RvQk|f8KMI{vj!p zCP^h@GRR9=+ytjI=GA(Ck&8j-@+FsunmXk`o9A9U^+ho6*=@My{}R>$_&&gsujyOkK6LfY;`X;p0YXXV86n5kbKI`tfC&3ifqnl5e>Y+Tr`1R zQzWlo*5-e?tdBb_Yt$8cc2ggg7ts%#V~Y#goJ8%Ef1k`jyaFiLWpmrP!VK943|c|| z3P+Kt7Y{vgqpA-0Rc))v{BaFS#m)d5G7`JzUzxY0F%oLnMdgd58+}7vUK*IYPGi3o zlE!H@V*WllFU91sy59A37pd>9l7%8R*W!|%nz;6*vRwAkOm<0IJU$k8tD%{6mMG2D zG-f~4wyq@Q*XDQi)%jlAK@(_=8nW17MP>sDqsOOGznyoOEKz52Y;@4A@#jV>D=*(7 z1m5*;(r-#9y+E3&P@kcfkaDUO^ayH)makUW2JNoKViU|SrD<_7qOP!TJJYW)96C_l zR11@J+xvSIXXwXlj~1>3_VfnMr_di|6j~%E?S*ZrEN#0=Zu4zN-XzX;8N6GEuWH?A z_H3sAr7Tzwqg!qHuma8%Twqy7U$pD)lNf!ts%3;-TWPpDRSlN$9$lWP2I2Wc#3p^! zLB6mIp9PBxkA5Hj6tS zpo^o{z|pWhqUbu)!BMn%oKQs(lyF3QI>H7nQRzRER%k=)h)~^ld}^8ISsCb+f+T<)-BiPP{K0>fVC`ryO~qIgfp8QPBZthbnAOB z-h4B{jPiKyrjr; z4JJfj=4{wG%9u3F(UksywBAHby6Zp0>p!X>nIH)Xl)KP0x6r)jF7mPaCLl?C08}w5&+;Z`=-u;>StG)y&qL(6-iwx~44F zW;s#Lq&Vv-q7+Zzd%Th%+^nqCs6IeX6uPct9U8k$gsv88hdD^}NUZTh`L6}ZA~)W1 zZhaRl)e81Id}>xkU=bbh9r@~sTt8`v7rYp#1y^YCOBmA>RFudnNUVMj@3 z;rpQPMFD<# z#K)qseum7Z;na#5Q@72!SJt+vF~@C9@kYU7+qdu1UW`#cRe9lEFrkJE4kjI?zSK;= zc_49n;POSP!Qq7;y`!gu4W19D23~-|z{!zs-G0-&W z%|3NEb(!5CnKETSnd{QE>GvrUnF>JDw8Cyy@^(mi+Vl8yy|P3_?q){ZFOfgdP8m{C zTjGh+NwGR5-l4+Iw53*qfuYu-Spy?>fVUrzv*JIvW-*5JsdWM-OW zJ1J$#^KMy~?ite#mxsH%iV@>}{G;<`8rS5^#|{+BA+14ewNux2CY+mb55m0QCy71* z&`SyhfgkkQ1HSv&iyQ)eZy?CqE&BfX$NwE4ih**Q@x&m3+#>(UEBv?c`N8_cL15=# z5jjQKWe4r4+4~KUiB+A4EY6W1IFlayP1y;YI-MUHy3s_&GJvWgo zOGS^;P9IC(pGl!>y;=Rq`V1Rq{)*TD=*}FTVggIpGB-#;v94V# z6aRwPhiYq@ZIa=SONh%|@`<;gUpXIwYRn>dTk>(GS;QdEsL?`SPB>jS54y?3?i8Tz zo!gC2mCjp=GFpYqzU3&fXpsF|db|5--+1m`lb{P6_klcsm>4zCFP>QAug_$LkA{LC zIj)qx4^qkPE8FYk+AIs2^`h|E*tx301!v2cTcp|`1pv!?S8djq7thfY)YZ3yR*^X) z?lU(bcHM}?YBdb=aXW-`Fy5k&x6zWU_0vkrb0NX2)aRG4@7bk9NzZUl8wg1YP=v8j=Re$=}(b&wpOaK4PS zwSJKLTJVwUSd%ABt^!p(ODfmSC3lu+B!+#xn7!6ixU*3{W~cxX1s`tt;EZdOS7g=d zlNGzKsYfz#rRZLWF(|%zN z?_kU8{YdX;2wM#V7C%J#O}yad+I?)k6lx>c8M?}Rwm!it5|+zQKEH&z&u44TWTC>p z3;9MG^YxSXZPvN>I0y_5Cq2i>AfN9ost-h5MZuADSe`TJ7{z0Astcx5;;r&9PBf#yHYg zAj2v&n0wsRf3B9A#J#@ z`eosw9e$63-fn?1EmHI>@MOx~^*tT1Z+B0QC$rlPbYDVUkFS{g*(A3jc}un`VR*en z*I#6)z6Gx!Ns~~xxLX(A|IgYK$MTXcF9p4kI30m+_0C}<8}?~*blCFm&?Xf;b`Ap* zU=(?v?jCSGF`@1o1%KJc94p|@g0h`KGdj_&YN+kUqdA3!x`f8!=XF#WBhStUQYgPx8$;GR#oTFW`(Hdqnnl$+Y`Y# zof&Sn=%6`T%V+%Q@vKDO)@R4Yf#l-Ky3!w@Mbm{H>e%V4^0@9FDONRU5B4J(TxE`f zZc}SQ`@ZY_Pu8M&6N#~PzF9?;DuAq%Pd;1N8PByE zw#KRsEe)SFAL+CiHP1H&ue6MS97iiSeO}r2uH8*Y+_L?L@E`LVLS2EXvC8yxN9FOE zDz!?tYyGmF{arH=s!*F>&)n<67oIr!KrF>zOg0}fAiX*~*!liOAPTSOA6Am!&55`+ zQE?HJ_CKmRJpu0%p$8F)xB>;?jH+y?^qCmKj0PKA%l~OWpr4sQuv`}1%~FZ)Bz%&Z zr85Qnxfc4RKD@bg0W{Nd6)PJY@YE3?4NS;D-}PP@l`JHt#d=>jUm}(>vFPpPnp`vZ zr%FyHB;1l6wDY@5Abh(R=1kF8A)f~HeI|Z6PIk`f3sL%NhBX!CZvF);R@|{=YHOMq zgV_@pU@JN-7}lrQ_PP>zE$+oEIjLyK29|RL7_JSiR`Q=N#k|+rR4c~K(Ch=(B=E9> zD-yPzcV3Pc_L5S8G=-{A*~{XsX+!nRkdXK$B(hnZBtKCo*ZzsGSM`H|L4QG|@)eH4 zwK$B&r*)FKy{g3v&d|_iLX~D=|6Q>bWW9LV!XR8v-`C)7K_wq`0lqq4BR*5?=2!+y zqf~veh~Ahl^YpC4U44a{ec!H(MC}d9rSVgod#Y@|L)j%+jU!>H{h!8)8oi|}SJ-bp z@GA0Gkh{JT8o}OydzUkM*DWQZroB0-%|2Nlm}~mQ4-)I+R$L%apO*J^5g+j+!?s*& ziN)$(cI}qHy;PL!|R^}w; zKAT_DkuMe`F7c|%W^3t=`L6BZ1#zBS$9e}*82s%ir*8a#Fs3>iE!WB36B#IAyfryj zYBa-B!z-V9^W75glVX4R2(V1nJ`Jy}gebAi$n!e5kMuIPF8Gr)fD3}LfmT)qobSL^ zu3;Y=iedNXZx!V!BrH`p;4pl)9n`jF$E(Ww&hd00EQ_hvzuC1P@&htx%$PMg{xU9^ z%GrjvJ1^I68e|mN4_QcgIK_$Or1<7vHiVGV9WM#F^Z>B!pCN7Mb*4|10j56aGe(gs z)@klZkk!A-j9^5z(xwoBeO4DQZZAVFuhx6vhqK-1_Reg4|B!(;7Qlkg?mV@^%Wh#9 z{>Kk}U@sb~L9cI>)KC2p*?eT8ox4l2SuZrv2oMD&>L5V;S5*UHPma*6D-rHtJBGNG z$2sJF^)p9PXzjSKkyY3OkuD)$&IKv6vcW-_xy^R#QGurPvo^3imVwq^?E^%XRX+w- z(nuo=7$-nv{R4VB(`28afr#rIP&|1iewyz9v6GQGwZCre+>h}m^QNgY>xCbCWp+ly z~Sgr~t#734L3MXXu%S zO}>Dt?KYPr?$$0X&K3U!nai5U z(8jCjfEXO(K18vXUv$YO9(WlC;Ned#53JJMy*Oi2D|{zRE}BQmcqQl2k|z&^^(`1H zb5I^>Id|7n29x~v*`CV`;^ph<+M7^w%7Di!5dWsR((?6{%n3Am{B&fB;X*~lHI23* z{~BgQ1*WmJLdP@lVaPS48R)~KfXd-ecxrUt{!F^KlHK{dnv?-$#uz9j@rqSBWUObw zyB-a*KJHmp?8RcAu>n>suk$z)fNRlztgSyoH@J=kr~nHZzZUHokT3lxqz;WNRZ9uC z<=Qy|6FcgA-~jfV}1>Bd#PIgFfmyCR*~_beDTG7*SHps*1H*m-zj@;E4SV) zIm^jva22*^khYXasO&sGUIaH(PL>>tp6HW4Tv6V49Tm=B#H4ug-s;1;ZNk*K)~u$) zXXu)a&WwX(*Ba?K^{BDyuk%-3e42nE)B~sg@L*rN=yz03SXsCW+x2Q$b7a-DZ+%YU z9&bCny$~Hjc-|V|G(v5EAX3zJZoG}_(jN~EY@W;Ca{UFJ3&|F;&s2;)^s_5SVGiOD z?~~OT;QE(KEa%o=+83PXI?82};U6*@eN^Uwc)g$_;r$c~xfoX>l9!Wf>)cHgg|>go z%{3cjcE4&AJKvQ55)z{73#ID$a=0-Ks=L+{J^E8?R*?#E4G5J3mck>(injTvyzRH@ z))4_uaC=RS(I&68ssN4p6O0+r*C3HDm}C=s{Dw$uOW)oxEQ%1MKuJKzb{GyKDjM~J zeQp0TfB2_OCz8Pg)$S|o_2)Dki`h9ACOF4ai0Ior;QS_#q!nY*{}e06`tB3j(HodrBT`m3J2w6y~IQBOd=Bed%Hm#+bR zd60Ovp)by9@nNbo3E9lqIX0g~(5*(FCtKyoJbZ9y@579sN(dVoIN~L3hO#0DgeHs* z`~+GuL8OyiukJ-d=zX`#W8V|^m3|t`{%dUS*ufIMcs2g~!BB2x`{zM7aIXlgm{XOs-q>dHxO6eKH&JqIrQvkS>adZfPVnH}RdFAU z)m8G7@^ZQOKDovlJQ1SK-=4;T#vekJY}St`B0gI>guF&Mx5z0OTc<_0tagV6jS!pb zsFy#l5ZoT!U;-ZV55}Ky78W;`mK)&%!d+zr-x#4Uk2F?K^S91=u-qi3^uJV$+w^%e zOV?HI0MT8|E~)?YhbFf1#G6CqUwAHFd_Tiq(mm#yJ1k5AoiIi6W44oRug4Eh5}&)9 z485(FKQHNGc}8Y1Y_w*^)XZFfOF3DpvYU1AYJ9t`^T`8M40ckYu-KXq{v9ACL5Ee9 zkE9FJnOSY%ZUI2!)|>?6`J}J?#<(S^SEz_BaM^mL&8%_jFB@6Z9W4dxM}uQT=Z_S^fX$0{a&^@K}EA=sE;aU$Vwr58r#k=nj9W{i^2WH6dSv*I<3X zv@+G{7QfS;3%aSoJIjaBW&zCN-{O?hSGjPf!?qXDryLWEA$&)DXKgYV8Hoe!en=>g z4_Fa0^nj^lR7UAdIyy@$oiC%S{;Q~1j1FM5z5f!BTD<)wAvDE%SAC+(vpc7IyUXUb zRBA1z;j}@&p8BD zbtlc4j}o{%dfhm(>b4QwE_O|S--(bTxS1U#x~0*-k`ac8VQHUU;&4F8DeOu%r-GBy z=!hO4KJ_N@K32aHRT)^%8^o7!VrIs$$tu>q|GWq;*+KivZVKc+8hLxuCo9V*%V2Ra zwsKISz=F#fIJU}5@&+}E=esmp=Ye2j54XtgVFC08DvhG-29I%$>{qyU*$a5@ee2ao ztl^2z(=9A*X^`-7FtfE5PfSI=1s;NwN~0R0fTy#jlb@@JCf7uk4#t(%x0{+2Ik-++ zK2zYe0@QjNoA0kz(n!&9F9iq&`9!h2c(Wq zVL11;Ar)Ul1Bb--=HH3$2a6yD`7hn9jq3IBu*iNFNr4d5n1sAM|4Cz%hVC05`fiFa z-~K#w=ZOQ4#Wb@PiuFuPK{aEWut{&{qTWbcmF6$G&FIUWFyTR5fPuoLfD0HvG1c{J zpeF#QWCI{)+l;qDj6d3-`@1jVSLV`BTpqPpNYbgCNfnZCMHAO+$gegD6|@s&3P3f9 zPS=z+EY!vrmm0SNpK!bx&8L_bb(`H641KD7Q{hKoM%0=R7zukj#faT=7>UIttL?u1 zY;4olVEf~)HP1u|bvXs6HdFL&B0C%NvJH5&Aa9RC`x>q@Ht3hA9Us+0CZeG4;42tf z8N~}Ud3b%&0^QH(1f?;n?|z3j3GYCD9bgIJqjs#coNtm=RF2U4+5TZ^nOfD;QM`a} zb=`Ty>hanLi;&|_?u7GPg&_$T3L;)52fJpJ3O;xM#vR*-%MTR_FkjbB4xfv zwt#liU!647$QXvAuOpj7#CHDc>eQOEtV&mEfUsFnESsa*{zpeElo}T%hq2KVs+w?+pizYqqCl2)kN0WBYITJ0|Nvn>0O7?i;0PqJjS~v&nd}yr+6|H@ZisUEgEZ|cN zrGlDLei^cx-CtS0Lqgwrrm*6Op)=3IaGY>=D8U7EW`ny5%Ux-Lc!+9A#rGxMOh&qZ zdb`D^cWmVd*j{#jvq)a&5)i6qbHym)AASJADkzDg#s;r?mB- zex=&JP{)^96*rKeiT4G*ckIzYV&=L~5udy4x}lmY-k4K!uC!|dPeloOj=g~asFXg* zmBMd$3zek7`e}lI8 zDF&~#ZU4@P+ec=^y^(q7ADOq^kDZZ_^BWxA_v z7}IA8zOtzNA+H;K7l$-lm=ML{ojsmGf@`(h>+kna81nPb!4R5+~2i4ng z<|R6Vd_KM&p|uh5!I?C}rhN?L<=5@s2TlU0H~djchd*dPrfdERh;37Te(SXhQB+q~ z(Yi#*vH1v*GbUa>Nm*$SI&q?Zuce=v(?ziKhP?cw`CKrmosEUV)h%6S4<3rdP88rJ zl-}M{PI=T;W{Rve1Mc)j%7S=Tq>JsMbB$EDGlUVS^a_O~2GvinifHk?iSus!Oh3=| zRj8#jf&<(Y^1$zR=kj5)Ypio*yyXsLchp=_x4QhT6a4KYmz3VDzU?IjxCffCj?Gl= z`L&L@Br?@>k=!nAnsKWh9OEeY%Rzj`nG3*-4P5ioh2Pf{N&#pzk_BinG<-)-yVVp! ze#&3vg;%Wk)4|v7P5)ga;BW~1&e1iY;yl+c0DiD%&&@l#YvggGzCFh%{tdAawaWs3 zwp;Gq70CRqOaJ$We@N^96<~$&_XMr*%+2hn#ex_*Jq_Y8i&AIJ=T zd*lA{-X*p${;L&mf8lasC#zr=*F3asodDV`(jWn(nnanFx{1wHjgb0+S-bw0Mx2qRge#56T-SPxRN~B*m0hJsq5Jn!XKUhOG z)P7{8gCFm0C>1y5VeDdH1;0ZQW5Dq^d0YWUT;0<1FN@l^>0SpAXcvB2Y`Ta9$p3w{ zSizqsBEH&8PF9BobX`W}QskA=VsHBy2{aYwEi?j8`09(mTbiX{WZGTjM0Z2Dzeh`$ ziOM2G+RAG+Yph6E0^p!x=ZQ=aS#Qp^|^&+CHNo(mI&Ph0=-> zyCO51{Q9jdaW1wQ!0K+G*q*8%8J`d}=i#~W=rZkr-p^S~XTRRAuh+flUG_ovV@7%y z+sk9I{s?&!5@vyok0f+>){M&hAqDVm=EpM=muaBPyYv+?HkI-WO_eEv)=EnrN!k3i z!M$^5FT(yE&7R~L=ZfQ?oRpBXZBN(op=5snmfE+vqhDpkbq%=l!%GUtT0q>FJ<#piq z)F0CoBWw)eh)=w&?NlyH+}X_UKK`>x#yQ4CHcOEHXX6t)K=OpNbiI_$BNr7Ap+FI? zk>e|Wqia=$G%y!vGtS{JZor%=YZvOby}0y1wYbUO3#zVQ$99hTG+6^6n5Z!Uy$WYuK}{-yO$i@^RPW72)A zEn$S`7wgfJh+|(ryD}jHCU*YNpZj--FGQKX?T<3QNi40ine=755CYzu=L^5IqA1CD zv)A(azYKKrL6Ha)#UmN}Q-y2XZq#I3#M;wRX+=5!zB8Fm)Pfm)FPndpxAy&4aCiQp zg8koeHPlDN<<>(RGW5wJG~ zpyxfvBmWdm=I`C@XvN18RYA02`%sow(O68g$pJ8wkcGe#+eV$GnDXzxQx>Ga)xHr+ zJ!ljuBkXljyk^N7E3rrP>s;27dfJNGOJHV~bFIGonyFDuo$;W(kd2lW>I#|GIZ1L9_+U8I5c3* zO1{F1pDwZkG$9*LA3fEfE74F=gf4#kUd~EjF&<`<-sXobpFwL6BX_A(isxxM(7d&` z42vmpxMcufRuz#I8)j*yA%Z7PP+iRy!@UHn5({*A-8BIl85q?eXWphB)#+OIOuH>+ z6;;`0d&wy9kv0=^vHEz=eFnH_IeLW_uLip7V7hWE?GVoDs-hH)u|s8TJN9pn;`$v& z1mxunH?kmv;wq86$c3YpUCORIhRQ>Z{!CMUbqmcYZOu9>TsdA)-?_bXeul0NKPh{K z!##DicI$WF#5ybfQW^jo*`{O9C`unK>Qa2yIO~lOmK^MrJlq5v`bxwyQXzR&5RS2& zwUe^2FlQ~)@kgHhMcRzEC;uca5mGo`xXEJU$rD(a%l&|-)tT0&zI9VuK=~^6qgsw3 zpj(-@)AZCGXAr2O*z0xPW9z}a!<;&1`R(K?(OimLOxYUPJonE19jAlrhpLW zi!1~bM$2K!>Ewa569j~<_9g|2Otk;=z97*Z4P&x}vCgpBUlz$J#sTx_K5eNi;Ceq~ zAr=N&AC8!4&K*IcZ$iSs-WUmvaS8;63?5jlHnV*(Z-@V=QpQ$>ktqx#9ovjl@@;iJ zLJl9oyNmG49pQ{rEZ}0iy1$3;?Q%RmG@Tvq#jX18Sg7eHHXk5o2Xm1U-w(GIIW)G) z4t;UA0JWcq({&JNDrk5ioM9~k)f6kyVYs-}9R>N3b@Gke!uYrdMt38AC1^HqfrZpA zP=~lE9~Nb*(8b2-v)I!?U_Xwy7J3ufR)3*i1821CP zkF9Q2QE8@;_kxcOrNhSn95}j)18=6t|HSRLzkyYxW!D2po_R&Ls^#z3T zqm`)F7QapABJ?KuWK+AjL6dq$5ijA=3d)r^=D9*t~$t<_f1F<7+WoQ<{7gu zqU&XOVZwQpt!xT}GN)xL`B&YC8HgCcl@JVcjEX_d z>fyjTKB*U~M;I22(gL0AfC-|~JJ_h9Db@5!Y|-X>cEPO1>Zw#j8fF>*WX0Ykr{bMr z)|TzT@FF*>Lk%8!cMM!lYz zf#a`_ut1`=DM^{=_cFebIj0myZG;I@oB2m~`s=ZNQD3_iZ^<1Q$CSo~$O+ec!?IpD z*7eCMdkjEBA7gI)jbnj8DX7veL>ZJMy0sqVG)>&AF(fJfy5q#Yb{c*-A&rnV6UjyZt}=xiyV4$@Du_m2Fc(;bGW zlIk~}KMvV0_X&QUp7r|N>!1ATH9mF+HXs^V0+5nFG0s7%<}qgFB}I3I&*w>KqtsPh zp1T~s`X*ZCri@DMrOd-FXI@kvvpp}fPt^9uBWctqBF)WqvUU7oVy4lQH}Va$@g%x~ zo{5=R4oH(VGoF&Ee4-9I3{|(;7mf7fhB+uYKP_rZrxuo{SQi>)UO;YOTR&TkY&QNJ zYsCLJ`reiYU(d*1yy4RT!dHPD^~Ib=J%~5+4bQ6}I6Lmu5t%i4oUF>F;Hu5Vd?sKl z8+C_r@H@Pxhr^h>d8gu9!nrCl2M*=V7fTvpX(NixBgV~vs(j#r_Qyv6tq@o6@I_>m z$6T=IyMmXdl#Uk)d%SO5Fz=N;=EZ%!a$7M@d$@C&;0fP)!{ef;@K|kZ-$K1L`0ZQ~ zchH~fJnH4?^kK~eIBo49_|H8z?sb9=GtbfNFk3<*!2|!Wa%pI%ZPT8cs*LjvfQF23 z@qS}`Yd3v7c3|g1JMr(HKmXXdC-8Oy|DN9Xf9F?FjkE>lxm8r4f32{2Rlsze*dFI7 zuuPnY@8q!JJRWl#6;K-cs5lrF-b=q1g5*un#1xAt;}gqCW<8zV*{K26N}iQm`d&k# z;ESk%HMiIN8TJ&8<<0=YrdPv+u3ytu;RvC)9TT=Qg&&hub_Uo!A<@$Km8-2Sq@q>Y z)*S0Eia*O5+m;jii(@%TeJWOWl+3m4_6yGw%dRvd&-CD6pbKuOQ>LVlVaola|0!dW zj`x00!AZKbI}htdst`Xm%$N2te-Pv7c0%XgW0zzx6gkN zYvvepxfHXW;Ixd*sC8A$8eczp)RMjDN1K-Om=GDcMu7+y zjbhB_w)%t~_hY3!)iN4_X8i&SSkl7tr}$vQDaL_wq7JFKs@5_+Wo@SQ10K$HTs_}r zJmD_N@oQu4(!;~W+ggIXXyRZ~R`ny`TWCU|1l4wJf+7zq$ z0;4)(nj|$>gfT~tnawG8erwYRvzpb1sl{`xJn9#`ws;gVM7`!CpXC6H7@AuNka|D9 zaNMKVs}X+-f?)}a)sx^hL{&J!DDR3BX=x)lucYdss0vz@*IUlIzZMEC z%zGstadj7t`lyQ(O}T$HQ}%rHzKJAbGG$UiA9(&~EQa0QtxvrFDmOM`plZs!{4wNo zKYUT=g6w*oR4gqN+0FZ6%oD z8}N4wUt^7ovKb<<(K#>GxF$Q>GK>^Q1R+P{`z4#5E{Nl6Jjd3on~B zOvj0`B1AM5+ApW+I1HM}NXcf%meJMjOPP!bzARek?(?>79PE!^U-zDYzn(huTzi=y zugStgX|**kQcN?)zdr9=VOFncx8>$+;1-`c0uK~Dj0p2**CBqpFGG%v3(M^e_f(4L zGb+)kkus}tNJ~<2!3Jetvq!;XUz;#!~MDcFra=`Vt<5di*l--gNtcK@wUB) z&`Y%H*T4=TN`Xv%4W9cbAq}qrA(cyP$c}i?_YKzxL)GNd)Eo*1m=2`*aB(4freUYW zkORJ*l6=l-?Gd`OgLaY`gW<*ezsW356M0&`}i`rj9s7+DisHh`|UPsAM z@M?BTJZ7ahg>MlVIJtBUk>^tR*X zJb1cAu8Ln3_s(@>7+eq)YJmVL(zpR)g>&en=v-UK_=1{KnY(EwN#{sz8CCm^VGHF)G~TYq;Z?{jde$3l82;q;UiBIg4cM3 z!=*&Md3%Ff@mH!xS*OAt@T4R>#?qa(H(!u43K$QdFHow5vF;N;d9v2FsC#rMD~`@) zdUc#Y^~$ie+Yn8CEpmq3>Y+Q`)BYJ2KSeY+OLm(6>ucfXmjA)tn};=(?c1VN+ba8} zEE@y_3f*FZNFkkcp-L1~1OybMR}mrvNYMlcA)uvz(p%CuiHb-Y5JCu$Koo@_O(IP~ z6GD_OKoTNE5+H=U6~w*Ix#!&X&i&4P-*>-z4u9DIS!>R@<{ZEA8)Gbv3}XIcy)DEL zwIePq2gi0$(ZsJwa4wV_OX{x24Dwtbeq711rG_UO6|WB zy~|_5@4WcT>|vrHrQSK8a{Ij0*y&Q+ZYl4O1^g_?2UpNN~Pr{lSjU%J&6t~^P#QV4J z^={Zl0V5MAE^O?LHWyq+^5aKk$RRAo)}>adXQ?U;Cv7=9vcfqvoZC&UHRKGiu=>vg zVwU~i?JAX+hv|*w+u`)6O0+#z^3qw<@K@l1PDZkQneXui$vr!ZGPS>d|OLWet6Pv4O}lU9_1y1_WJSr@P(4XKYAwuD3z z@+E_+{K2$_+&n7HZ^MILU*2LTF$re({@kVajKGD^G-VYSeekISiGr0&l~h1Zkpf?@ z9}4807vU+XvWkuvIX_BN~qBp03?gZ7_E^=)LUZNQPv+9>f_Nc%6vUol9MHkRATZI}R} zE_iFN$h`YY!P;Q~{3L)>ep>skuLSevAJ%@+x8~T|6#@G#u<(BSdcMg1{D1wAIF5}t ziCD{(l*){WeE(<>Q|`NyC+sSda`lTTP+7ot@%QT+O!hBw7bUFtKEFB- z_u<%DwEao>YXsH-;9IY3_yz<4_0=IaUD@nJjx_=MKM3=Hb>Oakg3Lg4SHWU^MFp9S zNC|U@Iq+HK&%%Ko!{TSre0zKH(~2Z!_fm9&CyYUZhn_2_+L9DyR7}~`p9Y1bSn&MK z<1}abOz|g1!z6`2>p(3mjsTsAtgRFKgqrG0 zjY?PPm==zHe-A<0_+v#&qa45T6i-$eKzFRK!i+uhs~irxz1At9jf2YSeo|6w&C2f{ z7UX$L$C7o?wllWQ*?tb$UlQ#kgmyfP+@&{IgpK%&*7p6Js2_43Chg9v)~wD!gxD03 z{yzSF2d)5m_C2tJB)eUOT4V}!u6J-4uuqdOS~V9+dY!!wIgARG7Wl9Y4&$jNsKux& z7P}7GS&H=IkJu~tV!~Scqn|&E=XnCKuIWNs!oV}lHB`s=;-2%N7-w}l)Y_=iP?*?c zq@a?(iG*eviu9ZrG|p$o`BufAyXB~B7S1pyDxXNProRz8&K_%M93q%*)XVpD<}_oV0^=go2nmWD?VK4Wz`sXBrXM#4>myz(oP*s zJwHRM2}9=?Gq_7D$$w?$IR$lrG3bt34_>0?f%zF49A-C$<_kI>1ZP}}B?`pG*iT%q zzy-biwN>S&6%cxaM0k=obJqQIlr6%zzk0S;kzV~fn2u^(eQ204M#n)}r8tzV&iEYq z?KX2#*>ZE-3H7aTtb+du4WqeW^Q9tt--U9prg#jsGly!WVW@SG=0nwUjP5!P+_pr} zpHdE76EviC3;cEUfNJGWKnRARp>fGT;S*tJ-E-<#=x2t#D>RAj&nSAUM27kPQVN;7 zctkVWZaC_Twtl-Dx3R7*k%%-jMfJpNnCR%wdFrr#UM<8I(XzMzLVk=2)hPwP_+#aS z8A|b&gc+;tiDh8y0@moTrhLN8RgtPLv{Vzp-7j0nyS{Cws?>LeduE8Ndydxw=w*rF z!huU?-G$FK#&VtXS?lRHP@gU@L3@!^TW~FP zC8X1hy1?A7J%86LM`gaYOtk!>m6YN;Wp_kGm>9MIKN$;js}Hd)YGA-ZJ`ANO&FAqd zkuxA~mse9V9M{%a%W!Ej?$N5g(aTt~yNlG-gGrYuJVd^XRR zT*|1(aAzYHf5u}t%ADSvfCTHi?})JVEeL?d(j*4DQ43o25S${NP7O2Pg~}y+_eRXu z^`tfFaq3bilODIry|59#!YxZ!<&pta>R?nr53;lNF28LPR?y zfo(QI0O}QW5v0b?q;eL0x*hxy){O0p3Zd_WQ-n(_a=d5joA(CFm>)CVYmT4irs_Vk zSE+}VCJ~D$9mmB-ex6Kv^|-cJ(ikM>3@@-uE7ZMXQHt}2)Ly}BQ~YLvygzFk0HEIx zy|}R}rj|KWU2ogPJRHw-Jv1&WLWVn=0OVH^nv~PEFuY=}Axt!2BZNP{J-ToYRsq`) z7ZO#zV$4kiy9acCYAUG>z+1b8oqe(7Dv~<&M`)K)tgJh=`-6_$90gn%0F(%4tXDFV zJK&*t!5dIj*WYQopKpV_Qn}v+Ks%8Y&WfCN$%u6i+;qR}Q|Qo=d!u)lr0IB9wtPfD z8)Wfjt)Doy0zughI;;HZWRxnkGZr$lXs#tV;B%<(c97j8jvUP{+#Yf}Ga{Bbq4t59 zIMa6*2LPIEURY$P*%rv~$ZDu-qtb--r%4w~66U@FaL~xmS`^JI-z;P<75r!o@a-9M z9kW$JlJ*WBGP0S;QQEBmV&9A-La8PFKQuMtcA~!+8xnrkm>lX1+Or=cP1vDRe`|t`(i4yg|x{h54@8`8yh)eKFk)T}eZA^k|{eZ60E%dZ0t)5Rm%1)OdpR zxxx8y{KATd_oqr3sw)oz|8(lU(){puKEkRGxQbH7Q;lmLG6eMJ!>?HDS5m4YW+Il@ z!n1==wO|K7OPfk!H{{QNdY*4YYq{7b7)BVvJ_tfxzI)RPP<1_>%aC8S0&sM_sOQ^& z(%qDR7-^{dOJDYo{W84%kd@^F2YN=E18nCz-2i2_Zps_A**jnS2zsk6+8=lB8tz{F zjlh>7gXV95FRWI=)bF|W){ld?0sbAhy!fw|cSfv%%lFVo7;J@p?N|RG|6i1O`;QTi zLlZYG{m%4MbIE=6U6>3jhAVyBeIFR_VXw4GelQu|VcdBF-85me@c}J^{`-#n zfd2i|>d9ye8f%7<%vdMDn$9<7j>fl)HU!mLs`RDP^=CjU=Zc2so{j9s00q1Z2v4+%7=w{?&haB;=$)yar#qpixM4}4GpCEy5ATUjN-UPPh0^v3+2F^X zi}A!FNuwNdVH0g>)jf`URUkY=`BUcG)Hv?jhd)e(yY_Efu8HoQ z83lRJB*4D>n)3~+0wpMOk9NfTR7QhIM-%E2#CtSLP|wcrR<@bu)M{CoQjXP#s+CKLb}2nM2$a2Wx?FjMeR;0RC74*xhEI50XMj+)&x>BM z+zgTZh9t0)7uty(B1#P1FWmItuF0N?Ta&KhiHj}Ep$2F?fU^yr4agWVLf>GZtAtgQ zMhx)Z6G%_t>Tq6!@VJ2xqGe;Inoa~0%W6|~t1~EhC%SJK@iEo8fmGG6;tObOS=IeR zlMJIv$!;~Z(Ej8IEGt53wcXI#xYQkj&S&0l@aB20a6teFd#5$YxZwtFS@opDl5Kp8 zyPU|wadV4GElnmx1u=r5k)m!!A;cs?82d9_GR>Uz6P%bm3ao&U?dx#D9o&WmH(%?_ zvwR_F2OSInMbBLsjC2^h>|hhsRMf{j-eufz9C%gI%lA&CPnK9~>ud{q`l1OikS7Gg zSk_CeuF3g!liY5}KuWv~Wr$;%eWwa-Asxf9U>n(Wb4; zv3CUNJO7~61kJLYw?oVch!zkF>$QUgofh{MDu+pp<;LSj zNTwCAn7M0ZJg-NUJK+^DTyS2Qn==HtDiC37WC6?Wn(JfWt2sWQt3T*)(1W&@*0~Kb zXtHP2Ic@oH<#6T5Yx+yEjR2L}|9sC|+x*bC=)i^ky!jN%lpYGArZ&tudnIp{Anl%Y z%cwR}`7A68-RM*)zV37Hk>jH`W^}4JB+9sO+opDMe^f-lEfSp33%9xt5Zx;TU3D@ZQLfgnvc7l%VpqV?^5azS`nOKBQzb+>1f@*R);qYE*_S z822^D>}!p7U4znw_~4RIcW> zuyg=HfiPlkJ@@X!(@S~YP3qh|;e=~)C>fb{vd%6X(w}PMnh=thCM2xx*}=@a8f09d zo_T8KY8WATXGR=w(V{n7I9N$Oyc@oib<^=>yl42JSOpY7eXg#-DzvX^Ig6^NZ^<8W zTI5!%Xe5_1mzSt4>by*N6ps9MC^e?)7m4>_KVJI8%Rnu@!^IMSVfMy6QIFg-Qm%Y? z#BK&Z|A(VIz|`GB*7n;_gQY>!XI?Y>Ry*ryGb^>HjjzQvM2Ncw0g8=IWzY~vy8?8* zX7C?_DY*?|=Dd^Z`zny`wPeko=N)k7=?6*0CNWgY;>iPETd`9_mL=GSY=0dV}v;*t&1h93l6CDHmDT}MgPkDhcbkQDpD+sd}XG=v4%K@ZAxQ*`%zHb9LA zc_e?T26OAhyw&3ck>1WnPmtxdz{}LWa(T_*UlO_enA{Ib68`a@OZt;%cONN_rgXgR zeIYp=>n-joEyGnE_QmXRv?;)*sO{+nsJ-c?M%-*cutKB1|JX*%L->9FmFGeN)H{h7 z?dsUctp^5Qa?$_f16>hf>=u&1)TcF&tz>i`A=yHgd<@^QGxgu$^$5=3v-xc2!?_LI z?7IbMe;6J9qezxt3f;JVlvHYuV;YPLTtQ`nD#<0!O3rVv52mx1SdRhRIERzG95RQo ztXw#^b(kFP*n=&w`Wibo%H^#5OqCOY>Mu(09g{n1HNACbnx<=?LEEDOTZpM5ze~&* zes?e_PE-6x8`2(w7|!%L5=!8F2W{NpE1&gBI;i(^CaAj`p7N*lR3!S56+OJ!nP0oG zq=L%S4Hw=_VI`I}8ad}=?;k8geM-{~7|qd*YA@#}-Hx&Fd{W3~d}aQ?PR$SAoSO(V zDK#)J{P)N`y*trpxIISK`=1maFq7M_0L$cffO4>I73D8Pq@@VibQmgp z_9%>QsP>_;21GX2Bz>NJo5=3Dbh*${TavR;SE9q_vVJgzto+_mfNLQ`F|e&HeE)|q z8=KCsX^n8#U&e<&hxif}f?v2R>O=nkw=ER|a4#Xzg5Ff5kvn4ojl@_3{n}Ez3tj6_ znL6F&qu2f@E@BQvk5K8tv<9v1)b8rvc4}yw%h^v}avs-B^>M`oMP2!-?1FkLW&}l|gQ0f)tQ z#*Y6n?D(JSbGvl`EqU43-I&D%AaHKYb_D$OZ(mou3`YLfqEP>TVvq?R;)r2%34XOq zQ04^(GgwFwP(%Y?tA-C``gSwamY~OdQcPQrr@I*|r9mpA6y<|;Wj7;p3aZplBTOW~5oI{9J&~K0&H@38 zsq8(|rB%bJ{U%xkM^ggQy;F6SdTAx{T_)7y;t=zarukHGrK#=<`ePpxc*ZrMle2ah z^$z$ZqF^cU=Uu(+x4S^zx^CfpU-+ImQN{)5D#6TaNe#>P&R9CAlU-RRB&!hiKRJW-D!(EGwT$ABTFs)`m%hA#+@b<)zZg&*4I% zGVE6iVMMYICvqh-8`iayE4Cl57XW(e`Hz@Tf@ z_@x71rt|~$^<;HP@)=Zmf!TdIxx6oUO93 z(BhH<&#*s*>`1x=^texvW(|U^Ql1NSl~(303Zrnzf@XP=Z+-&nbnwC8RXBt+$!>1U z(bnq8;}j&qb^^C}Wpj!pi@(RziQML;6Rcla8CUe>@0`Z|wYzryP3BXWj|52l)vIj; z;53i27Ma+Ve=@?B9~xh(&@SpDPYN_l-sk?|s9@S{dNtAsAg1bj<~rF3I$KVAc}U_H z_X=!}aspElUw7J&%h8Jaq)Y{=#KNY{w|h#{`=v{fco2;oi5^z>7tP7<$ny&XiLBM_ z1Fd4bD7sFiku{<2Z3wNh?$J(?Ie0~+#l})4p&9A3cqi5a!e}^O9?7gN`yy*JfA_g{ zH;`oZz}O}a0cJ*P&PbVylJp10M%7*~)!qAkqFtUyB(itGhQRrQDxFkHqOo>@pOEd+ zFvN0L9l)4u&@9{Jkww+GB(VjVQ5Bns9eG(3CY1Rz+Vka-7b-JReixKBT}l}<4Akl2k?1Y*#UfJ{TOW+iZDmPl(Shc(+_~&GVaTE#itv;UY34m_64>a zpXh$A%}A24Am}Zb*T*^#xy|m;k6>K>AVGWuJ1L%W&neDycz%Y{nVp`2&>1&3p?YF7 zTFg+{JoY0VgEp!%`$u^K``g855g1!p!QQK$~0=+Qi_al(YCh zbUKM^E2Qsv^Ze~=-7=yRu!AV6Qg;M*EPw&h@=?vaCRlgIjXid}>l(;ee9PRCjd?=5 zA0-O5FJO1~>oKDQt`t)uZ(DoH7-b@f<4tUUXiZ@LXq@8nIIq0IMi+dYZXV-L; zVcGCD4zW5o;@8s-p(IPvE)g*}IH#8No$HXSNdN9**CCk^3J!W)q0q4dzN!HG{7^bp zI&y~g$5VUew#MIaCFhk)$onG=-U5n~eNeQU8bX?HIJM@t?E08vSaV=FRE*nlKDj0 zQEEa6FzD2a5`Rr-jh3BOr@pAJgQl<4*VBfZ2Za%X&@I$u%ThaNHrQ8d!Cj$|rJd)IaHO{MfZ@dDeYt zbp~{rd||rT)Ilz@5!bokIHF1N0)Dz!VZCAn9tkPGmEVE}x^oOqOU~2S)kYgDrTYzl z#E&hVPNw-gvIlo3wW{eX^PU=-y4p8kOq6kWd8MXed+D&V@VejaUXYRr!mI%3b{R1W zU*-e-&&Q&gB8EXx_f5_sUsGv5@olDB`T^sswdJ>yKzEFao;}`@l*0V51Q4FVWYAPy z@iVG0-E@et_-I*k!ACmOj2yzi#LHZs=izR6zaL);^sWcSWCBIxy*E9I?qwF2LG%#SU$V@ltC$62P*Rx`(}V`Pcnz;pMt#^ayaId6KrL}Au{Jka5t7B^pC_x=Ah+Q@kmcaReJ6ewJ z<1i*3l&^Q^P+9ayVe(&&1g^!kd_nF9+)Yl-HqpDzRii6e635E79!_jOAR1-NxAUYHA;#QV`s za73vmnb;$hH{Y`u4hxBANFquj0as*HI>*>!h|~T-`+#=~VqM_Sqh!M#*$)V(wSg)MLhWb=plLB0yT<+><@UV*qu zWgj~4njX6M$)o*XA|^}1Rb!OJ^E}ZHTeaCM_4n`G#o z(=~aM&u3KFjhC@Xm6B#VKwM7a9+LvEcZ z?M3-ltmfFS&S{9*l`2lAnf|P)#67@J9N%`QRwjYG%I&#`aBz5R!r(j??(KIefH8#A ze~<(76&}twN7}vbK<~w?NXkcl+!p$l+27BofEF>L{XG2(>nbbr$HW z{B&-8>)dx1^$4cc2PbW)d^KF|C_l%oN`FA)n(f$|iJMxr*Uk-SM_D<3ftWAJX*W2# zLNlGk1($~Nd+xJ(ayll{ZE^up7`-%|ML=-~@<6}5L&tO$P*N^jOo~>aeJH#%8&+7K zazl+g)(|xP+!}`GOx;l3Y{2jk*W7X#KHYSfe}501`j*_mYMT*PC+t>Y4i{^jf^HXp zn=&gbi#o$zGj%>`dhw_NC8%(lL7o#-ZRSXm;nh4XeE1J8Q+aZZqED&|LoBnYb8X#w zD4tEirdH_Uu(U_SY+O!toZxpwhTS9^wWVR5#@2`nY_bWKbuz)?R}PjM#?7`!)emsLLw8=PKL<1@ zpn(g1wV+&%X0BvvEFCE1X{WPJBxJ@)F2J1HuoS|Xrr#|{Xej|W7&o6-zrU@j)Sbbo zEVg7&kxyKW1mp*g$nc4%qznw3Cb2hP>a4L2&lpUh49RGAsrY(e=b1AGPc2#$eaFW2`%16>- zjYVDxRZUFmAA(b$-WZJWxPkU^yx0|I-UViR5R{HXo%}!6*06B|7e&SKyLwU?ct*l1GOf|4OKOP1a`KVmj^901VXpHNyw!!%DfyvG#?FMiRQ6yI%z8_(zdp3M zXg;ZIwv*I;26y`hY}Qi)od90HD_%>P)-93?9}bhK=Hn~KO(4O;8QzwBsl~Jwt4-|I zaHI9fY`t1Lxr_h0Q(1?6JgSmOlu4vA8d9u%?{qGWyczgxz+LO-B;UsuS{g$=LIVc~ovV#{ z^R>=E1x|nVRifXiIeraNuL=z$^bG|AexMU0X;Z#uK9tNJ)&R&Vtgei3w%=p|#s7GB z0^qP2wPh`xCy7J2@=OP!27Pr*@kl`O)KuuvgLDk5XyozLJA8m`+pMzw#P}W^|Ngff zi&CZjeS$O7Z8xTOkA46|yp6Ukodr!7J9ug4n&_+A+I(QxF03E6LP{-3Y!0x||JETy zDy#*VHm(0wj{e6eT*t9hA%0=Gp9FHtz_^%x0hFU@CBPi!&g`306kC?dv@0@q*<`#- zioByV*mSKwLDxG{s8x5!1y)CJv)V0_^xnm9UkBa*`g`(3-A-Gp66ilzIQM;&VRqky z6Y@qyN`RM$4jfkO5p!16OyqM_4$H>OKgzR$$KGpcPMeJy>DJehvI6l(Lz)&E`w#rW zN{iqvf&l?#nkHI=7>Krgbo&xplvxR^pU-;F*H>p}h59)IZS6{DZ$(+H;6Y&cOkj8E ze$<~F`~JNo zPSDA4$BLq~gQREAEbki<>tC_{2SKYG{0l6qE#Y5CWa$R_Jl7NB^6NcKjbDvEG$ad~ z0Dlq%_GOmwG1;kK|EFuz5t6qVKShW0PU({ z8-FO0NH!be&W#VPLtOk{WNdWCJuJz-MwUttmjU_sdvf^2{;SR}#@{bvWb`LcHzNxM#4s21Kb7rnM z0)q?<-8GYgOPRxp_lI+FhsH5@Q0{`}!XW#0vu&qg(cB3$cka7TYW1I(8Tr79f?$3C zZ#vm`Mfkv)7jpJ$$Bj!eIRXU=!0{&yLGS9VgFVQVWZfB*J^u(N=SkqgUT>*Czjw-? zGvHCrHvY+RUii>Z{jAgPy6 zda5@a@M{8r#f<3LYO?}9|BXa~9tueInVnPOK;QgeD8PRJt7sbp*;&mRTodDOP|Cde z18}*51|Ny_{yKBuG4Fr0-q7m@j;`!K_GzYXe-B`rM`&`D>6N9MUA*RBDQNV$}Y@_DP<=Jd^ zVZ6W~y}iqM(S5JU-Xwqqug&o4%>ncxIeFcAxD;-|F%}{2L-DC`tC^u9RJP}1q@l<5 zs|zzk!;(2_<30cT$vbT#My(oigtO|1o6RUH{}{ZaaS=XpIDg&e1^rl+3HZErBGcQ= z+yzb$>07dicAtY08Bk2pkUDqV_(}`WSDhVhCEsjz^BK(86_)A#s!H^H&(>}IK0L^9 zaMNUy8cBqDA_yq)n)zRGtBvud1+=Y(at@u~~l zXeo5cHaMMi7SD#hQBEl%qpH{<{Ku2(BVYuQgW4hSnrEo=fx0- zlWL6kv9g@!Iq;hr1n^nc1iR?*bTLhFU?@cDGqPv?YuOaAsJ&J<1y0)z!Bgl$YxO7S zUL-%dt)as%bZWy$6fK6hWg$R4Nl`8S$i`hxru^QexwKOUkEwWwr<39(72OT~;*h$r zI_n+hU|pT#^FI`{-yH`OC^m7~OV|GRCPA^$gtnF8%|?l2&u!1TBz`K|*><;9l3SB< zkBbF3>adf)0AShw(jPNqF)BkdW*m#Mg};p(LiNKZNx?fU`zq+HW%igJl~EF9D8Zv-opCR=rDC9zy_vbCx9Ta zz?t)U^qX3wMIOFu*>WsaX*5a&8x{I|gu82EY|~Np2_fNMOPswvRh%9-U=+bvovrFQ zwrc3!&afEd-=O%Jvm-l{Fj+r3ZUym9r?@z@;x3Rc_>g8jE8YV>Q`d%KdnV7&@Vav1 zi48rQj+Rq}VYu_`X{SGGT~$vTq>$akeef5!Hj&Z%DPQTNxqo2#t%1KC;``YAqY|oI zsk(wd3(NKQEZ8-CPU#VmG?eF8_+z6Ekg?fQ`KJL{v3c7wJ#80CeUwSx{eMQ<}_hT^^$vXEJI{>&#;O{1PeKv@m_dHux2$<0rOkZ$dFJx*ly>_OMj|LET{ zKU5A2is8S`rQLt;a!KN#JbS`yWZDC`@a1Xvtj8&(P2ss+XaoGZ9qkN2GE|c+DAI|a zgr4QIQTVKEPfuhE-&TEHB4dIjm)a_4@ASwC0dLQsS5?B^^8v}hZ5(*x;6^1$)99HP zaI=R7&1qtj zNTBeV9V?p+++(ZcK2~A>()jfGospjTV-Kv6Yu3(h-dqPMD?Ws_+?KN z!YlZe!>ISAF>n|nLv!DnYfr&M)ID4Tc{J~YJYxM&or%`<1-Ue!9!d^Wv0V>;s|9D5 zDZ>6$f5`}snhYn!c>(4*TrF+I|MJvXMgSt#6T2YVtBw7UDJT_7%Sk`-b)89>>RYfj zBnVKBg*UE0EQownC~ZQ1V&181^^eOttTYs|awlm%tC$}inoA#OI|)@6(s)$vjE!U2u|Vad=Jb;vxoT z{YflTSe~7Jv~;TR$CPak^7O~g0i1pC#5QH)8NE9lhsGY`_&yz5-2-kYYr4G;)f4Bj z5IE?0oT&b@Xc}X3`ZV8m)XRx(P)Q%MqD!3k6k;woiSER1_&io$>AW=76sx3HDVp9u zJ4<5O71rtXUSm`$+v~Um9xJhQKu|S8oR176a^&j{p20z-ZS6PK!u$P|LlhW>Rof zTG4Zzs?lYP>Y+6ua^N}FX7MWd6INuMWkLBQik*h^_3sU{+X7J$X%ZM1->afsy*XCk zv@Y*20IK^jgz3SZLfHaQ(`a% zTrQy+iWcW;%rX8hnl^)?zJt)^I8U#X-TEO(KFj)|>m(;mnC^Zu7ooimSGja?ZdHFM z<*@0i3SDtz#pnt%RaD4cQ5X7L4eo3PUUFz!$de6=ECvHu7bu$-Wv$g_BV_!SzXN)s zxf;7L{_%xi=fOy8pf<{z(^MWp2WKaGN7uH-**JGF(k`&ms4sQ>YqRsJHe3^)cee>J z5}V8c9CM^ho?jTw%}`#siIJ{6+c-R?)&rkFfw{$A$wrbh5DdCIq*5F7`NMJ^tX~>_ zB``SePjovqup1Z~XEvCwi6RBBL@om!$I@#+1txT*GJQjG`kT7H?IdY_XzNd!H!F+P%tC4 zBEowBvmAaYSj}O<2+d0s`dz{R!vFgbsa@)VC)ynqC&$x$>}OJlLDMcR{+Wgutb#%y z3!|yk?pzHn8#%7yN&Qua;lkFNFfhNFt1J>6;(D6o2NgV>=xFz+PQ4KTwV72ojUu62Hy}N0N zU~^PAp!Dz{cW$qeH@TQeHB?9Mc681_p1C9Cs?00oPk7h5oxkGDy$*mNaYQ*16-qux z*2zEq;QY}C@X9VIdRulFMrdNCZd;Wc%%dRl5VyTo(=X@Brv_}#i-u036B@1OvM5Uk zGjzAV_vs-LIsKqa6#m_nuR^lnU%`j+J%|Elcv~>|Po{>nLI)h1sTdo17AQ2TC$u~L zD2awgLv?*W$_eTq?t?{uQcwqTm3z|%O|Kc+pkgb%0|Cl;A)+tPzmNu@}5Ko zg8;p`0lAt)t(-@E&*X1+1^O+mq?1XCJHpt*lJfB^*=5}Q+gH&}Z2g2YAS-h$AhFY> z$nwagut_Z^eVwHT#A`s{gfz$@kY8Rm;^P%x1-XpX6yzkq|910JpktV6$4|w(S z`EVkw_Dj{uS;xUSCPyAdJ~+Gm?G^!cPlLNnn*eaODu|q&+Hy(MGSXyPv>)B+z1AAP z`s3GmJKA&2vtG6GO>(G{NgWq(QNr7j4eMFtJb7Q4!Ldo3v!aD5laTwE(gN?(1{3c4 zZ)Iv5K?6!ZtI_q6R&#J37ij+I@s^J&f>IKyXn}Tifs2!$ivO#)! z_g_jKvn?*zu&mU@rz?q%sp^@ip6O;kBUUQdN4ZB5;!rPVFtMgLnuON~-EghmOyndN z=^eGSCxt#@%@AjinNcO4a21k9B;I3UPH}nG0Qy{mD?aZm5^u$>0JstBIsVolJ2P%? z^ii9l1lG2?>QrzN3t4=5PL$U>Y%}s}!?5R?ZfEO1x4d~X_cy0-!h`k@EGsMJWo!Ik z;E09owv2kJ=>+W{1kBKa;nTfmuf5#~Q@eYubaQ$Kz3+LlW0bTLIn(z$-?Q62`s-|b zD%JY@wr&`(X-ye`e(mtHAOlmhA$5wr6#u zSU1YdgpoJ#7zoi*s$0t`+wsLm`jS>Caa!ZY3!4`Wl%iNltla7c4;C6+7nPrlh7=H1 z50RZeHsy{XqBkXs+`mSk1^ZlyQ^W(YkfO%Xi1vC86e~3NnAdv+nn0IHzyg9B=~}0{ zq$Zleup6wDXg;Kak0^9jExB%IB;BvT0EH* z!vcapGN6Euc|`depCe4|k2Ei_@R8}Pd$cbt@J}QoP15O|@16p$<{S&onGMTW!&G0B1VllVXL%iW?(SdMmuHO*sgv2U@{`WsYAPcLeReYnEs^X*{Gf+Z_noZmUTBX5eN0W>={%-iVCYwiVHLhrlGDv07N{RBALGHK@~*dlSu%;$4M2+)>XhQS z61N6V;KeN!sMaNMb-n23up0@h?`u)HrgRb;w3G)cuf=qHBI;i!Upy|KQrhq5IeOLa zY}Fi~i!%`WFP{uh5+%n*FfanY{*p5Ka=$oU`(1`1RS8Ll?ek6>6j2PV-+(9BbyHRQ z>-dw4fWBqGR1?07E03hJ{)Fv{?KGL1>b){N6gB<=(86AdS%6MO;&D+TeY#0rjj0id zJvJj9F|^<()*o5jnJ3nI2BEVG!^k{~6vNS*m!iL|w zOW>Act#0!ic!NCT?{7i*`hjJ*&-L@Wsf8jZpAyebQ@*Uj2V|$M&W$AN?i1Z)7U`!Y z?kXJwG6hDKkf=dVl^wpua5Ia>s&03Ot#BA843ZM2c0&^AhOKDq&mXMogJ;ALPb>B< z`Inx&paDBcvVb(ILf<29Kut>^OqAz<=tu>stI+?_@PetvgWp644x{+v3_92!$}}^2 zeM%Ard?UbVZykGl%I)IIkWatel!U<+dtNl|5@WZ(Cq z1&T80w>CN8#a#;}Srd@#7|x|}UufG?9uQZs$8r0~fDqpVaHQJmUOj~v_v`)RIVh(}2`{SIUt_=0Y@+4G;qHYKTl8l>8>r&Z=1j5;b^I@DEmyNzE@h$Ehuy{9-y z?4QI_=d0!yc!E;|cfQb4s?-=0;wHTHLaFJH@1$r;j33n76q^Wq^ukLS|A|622MSj%GD!`O|K={uZ2 zw}(KgT*e4^dNH{$x$5dK>g?e-btROOORNc_!^8#+m|Y)jL4TO)EDol4;zIL&O(8h< z>-o8L41!?FXB>x$aI3%o$UV-3R7PG7pY~ijU)eKPR<{XslLAFgcy?@o1Gim%Y<%g# z2MTnXDEtT+;A)}&u=RPg85L}z3UYG&=H!Drcfr+~>se4yU2}oUOfB7~F~63yK~vP} zEM%B5(RD?G+*_*9X=Fz7t_K$Dy+md85O0{j6Q4MKBkGRxiPd^+zAWqH8pW1*t;Qmrx*WMu(AlMU_oKOE~O;+|S3|7UPa4 z*QXHn3$cB0O2P#W1`*a9xyf}R!6*Zi;qhf&Ir=@~8k zZz5<*)ON(bxx01LF0r5<#!Pk(0CF`0=`%yqagBY_WFNq6GYJ+E_2 z7(AiMPd>%fY}^f-L+-CtzrK7RitV4Fv3GuwxkN#hw&TQ4D}rHsbm%R(w70V2E469Rw_i- zXf{!sLw_5{+Pt5($h&xC2M0Yj*FmO@o9`rjxNY&gNip<{rJ5Z@@9oh9R{#D%xLtoh zj-)W{l`y+5gpz!}@TqeEmdf7nm6bFyk&CL5p$5mdtl*raN zK#nw|DCLsU$AMP$;BrBwgvjEGnj{Xz?IO&56=zzXm z;};+WZCe1=8KST2JLU}iwWL{=iQMdY(l=YATHSCfG*493_2<4}#y-S8azs9xaMw$s zbhSj5s#hw??wVRwmu)?N`<8stuQ5L?JXejlD7>#eN$A(o1Pex(Md ze=)fcC+oAkj5xp2Dx0$l>vNXWil@U9R*!FV&!p=I*_O*hfrD{nP`mZBbgrlAfUDD*4_Jy>+ z#`JYUfT}=Wv#~3ZK=Z6+*1F|y2yD3(xC0P0F-v#&&ME^%%9bSe6Q<;AWk4T$6$HqB zA(7lziVFt{1Vs2y%1AH*Y^R*ALNMM#T0!R-RzyK^2QFM~6tmP#F1c)m^TP(mqQvF1 zS%eyrzORxXHlh}e2U;|*IbWl_f#A@c@EvZp%xIk=ZbJE_GP8GD6S$Aq3N0(?WQE}W zY45$mno9eAU+0+_87wd(mH`B;AVm-mkv@uwQbn4yFp3Z%6d^zekT5nFrS}dZy-RO_ zC@7soT0#gUQUin}LVy4vWUrvkJkPV=bFTN@`|Ll?Iq&@EHCMvQeXn(|U;BQ(yv#s9 ziqtO%Z}!=uhi7W`%CP0iQkuapqZW`rMllYBaE=LU!2t;>c96IyyLOjZF}#td?d#`5 zxT3%lA&{c$TTF85&6@ny8(NGRRzmtDaZ^eF7^OM0OVv9&ZMtSd^-F!@ydLbACHAgv zskV*_NDmy3l_6>&dYnPxVQg)eI%P)egW+-NC2DX^@)|>uNl@$L#oc3B;EwS~V0Is9 zHIhro0|omPsp%DdE*Tmg<@Yq5Pvuo}o>OrfzEtMtSCLJuvoYx(i)eJU&rdZF=@(#q}4RYO~cY0I?xo5_}Q7&OKMW7qb2SO)X+f1lxC)y#;_- z6k3dy?H)(FXb0+bPfmYmc;FmE+Z?yCJOD2lS+UTc&I-&)ZC+aPU_-6DAWK6JS3)A2 zVk&9?Nq|RTD(j*95n{YDFlmq1E>n=NW&txrw)?1VRI|RNAvX!N-2E>AoC#i87i*8> z#xM!3d#T!`=fjEBho%>$F~5$Nlc<@FsnJQLqm-@bCg=0jENhw~nnel(emLsZw?GgR z&05Pz=Sc``>~Mu8Xd8g(f9G096Y)-8O zK#w(P0S1L!SAij0lO@0=xe<0xlK- zbDtu<4puJx?~s!94~WayI%A_{Z=7@^xGBp81g^5U zX8#>vW=ul$ z9i*O(BaDoV{$g?Jmyt;o%+AvMI;VS3)UITdw@f6!PU7_Y7rpi$9B~LA<4WT59^qf1htysfRV(a=`*e2`(T6mw^V zN4-n<1@d&A1qTSiu%L}xzlO)fN;j{Q7~Z6hsfN;CL^tJZCU^AqaRrn#KHTB*?b%CC ziJ~iVR9hd{Q8m`JwlOYEqiLN`KDA%d^6<)i$}s#CNS%@fknM-qD`8IJyUdrve}`i! z4=;TlO84GGttdo4UV+zTzPUik8mtA*vhME~dpZ4F zbvAXGAW=NN_or%}*#Oc6GI5#G3o0sRomA zq=@)tUX8n(B)paVi`w;{LKXw(!XE$T|7f_t{7WzT)wi3<@=tGBUT0H^_YuH@^9v*C zzwsk`{!5-20J;t+hU0>Lk+pIRH+D0CuKV3ICcu3h1q=*3s@wypr@+3SM)Kw`e)|4v zh+-HWmb>G5-SyJku4ceUgVV7GWlh9NCx#bKc$P{Az0RI_X6(-5b)$d#_3N;|von74 z`j=V%%?<<)KxV_wqu(9foxV`{2ABbaja=9?YYO{J^AoL-SMlVM3V@Mk-n=!F>(jYd zSHfKS^M<342*>YshD5nE87Ce!Mhq8l7%mF-uz0U5PN*@_!;*Fhau~lN7s?cXedE=1 zJ@9;b^6H(a;Q|hoJkn&}Iihpi%I!=siNgS>Vm#vs#l59#s%7$#N6L9I&_M#_p?y2M zlWMkp z-4Yr_Usz27;JS@|t48O9(K!Q^VT-i+!OXo7IHFm8JbgFvvFSC8HW+Ws*=g~pd3n65 zQ|PZ5=Npv|^mWVDFdJJY7tJdXOIy!60Adv_scg+rGE%I*POW@DUMd0Qfe^2YG`-9R zR1|3lk(P|X+G=Nx{2J29zVnOg1daOAXnw4ovb1bwWrp(w0+;YHJb@rui|JJ76&q>M zuug64l(xcz=}NvYhOS+r?mXL4!#W6L5GTGddH?I^VHI_vN`wne1p60sP9_bz(0wgT znPl0B*!7k8i3jUlb^+c+fuNO-03_3UqX7rURRc4+gEmaHI~DmY?BN-LGnN92M-6*$ z$AfTNjc)7_%*LUUST{!o`3?BWHyFP)P!{0!VB~LGmRNB|gd3Cv4$1ry5luF& zAMQA9-xO=O(Dt}(i?R4Q?^d7$1}wCaj?F`Od>pt3Nom0@MlG}?@tzNXug^y@KOFfB z7S#GWTD4>#f3QkO2h6KwSz-g$t90lKuQtbrcS&>WZqQxOoF_pinA63(yc^*3Xg;|x zWhgD(?{LxxaaqQ1AynHc1HHP1 z0Sql&DT^_z3`8t_eG>qXSi1;PphW>MaB%#8O76v=%Qpd@YlWF+zOaHIq9YN|h>5Ee zotIVTwHTi~`-XVxQm({2Q)j?F9Z5`ElR2HKRTmqbgeZJGRdsYC*wuN+h)})o64`%_ z_vQ|x0tnM!^B2u#0iSJcpaj&43$QpeWgRgn9*R(H57+!XWv){SJV)GMnI5z+_PNvV zEXEoj%=p_`$=eSX(f!24eb9KAnPl|6nTFlC`vm~en7ewOOJ2>9uYG?=D z(HK8wR4O-PJuNoZUxV{KTgs6%<3%@BNe*%h-iW|oOAH{i#pZWZTOaJQij@@#F8>S4 z_Yw^CpOx7)nu_9^lt0Q``-*AHF1gABxSV5*&cbpW3Y;sSB8+CI@#}9XyegvhyyGzd#^>`EheqK$&jY~JJ@EVinEvgKpufDaK7W(1vBS^Xw)kyV8w84I z_JS66AOnDRjS+xnm*Ou4zkp-4`^Mlz&?>Gw9-o1zCdkeM}WF9_sgUC(K_RAp&aZ10B$ykPM8uD(54>l5r)p)+jb>4^q> z!1}-VfVq>)#nznBYwFtxzUltvEjVqBW5N9z zVlQZ_LQ2b>i9hwy~h0|)-egQ>zbcNYR$@&#xwfQLN zWe}3}Wr}MgTyNUZD?_#OQ_Lfvp(yh8t!4pK^?$*le$8ocOF4d0uTK`OX_=LKm8sH+(PaAUxkY)9{4klQ-(^?OH%^`$ZwNEU#a>$hfiX}?@)3H&Jn z5FzE3Bbu@=x5`wRPCqd-S*G!^qR$AGb&WjVlM_6xnS|S{Vccv6AiL4LL zI;Sp)KL=zciPxKaJ_rQo`3g9A)3Z77BJ|TX&Y1154rLfm*+{bM1bRv_17a)?80=Y+ z!S=50eSsJeA7W|%^(9Zfe~or`lAitV3f$iRi@>cZD&lDJ=gIf6Kp+bezt0Q83P%|t zWyF7ufximD^hO+=uN|=;erR1&RMVxn{g31px#IPyb@4z2*KrXIyGoSP1&i=&eqGFt zPAe~kM0FFimm`3$X>xI{fnfQN-{5e}cjQ~~*!h&JQZy8LJRWkxV!qMeWH8Qy-Zq&v`mqS)jBN^r&siEf@@%WuXDY zkmjq-&3a#-!X6|!7YV`r3h@z}KMdwh#LvCab8G}c=N zqhJNLpc2gapi7Dk=F?{7q(;N?a_(Qs0Fp3k@RyeFw>|$i{Ku`Xv?0z*>qmK?{R~Wk zrfk~nO}Ze}xb7pZ-+NjLN_tAK*}@x24!{e7E47K#0!m|WqtEeA?w5k=l9>KM>I>g9;Xm%SbHDw_e_?q4z4TZ%`X0BM{>`Z?twz_hnlp#xl`m^?FG>mq2zqh+H&k;L$7lMyi`nz(H#}{L4evU3 zBI|kTHS%#0IJcoZIpu!lsAFdu!z7XP=OXf$ZK-EHyY4ot;YFc9g%^JL?#i#oVR9ns=ZcQAklO+N~m`@6Vyw>l=c!ld#5KzA?(Do*oixP&Nuy)5eeFxvjUIr3K ze~qt&o_}@}0ane@KGRe_Yh+2Va~No}J}y$DGPfCB$_n)xV@NaioF+zonXghR-FMxq z?HS;{Q6vurWbVC!32^JJNYq|HwRfyq#Fy+t{HW@LZnzH&2m@u+Hr9M2xjhMS8OWD# zTJ+wUL(LPuf7Fxq)*diC=W;*etk;5&*jmWXsy2W~?YmA9ljNW7BX&Yr&7=D4H=Uk0 zMo41ke@yF=Upr8c?j;mkx~o!KlGfb22w%R!6RBMo_+XQG;ob#PxLs6Uu!~z3NvZ0IyrTtM<)x~h!jzqkwDKa;etuYI zJ?|u9F#Z&+Ib(g4qw%@XVdNPq+k?e8KWS&{_9+_Bk?juo{kgDD2D7BoUt;}jp$^VZbn;G55363&`QmiJ}sQy1_T3l|F1z+A~s*3*JA z;$`Qxk&rU8jn)>PJKgcy+067z~w%V*ZYTwN|ixtHU{I!t`-1>FIZl~`MzL2&K2zJ2OJIC6RqDcxPl*L@pk zF>Z$6YyIYU(pe*6-I6mzlX9=Z1wQhV;b3;%BcAF%BIrNaGxYQ>&w&-_o$(Y*crjQ>jF z1nS35}svr=i?i|!~*c&TVg1nLfG%y zrCd$jXNu|TD@i1D0HZ9}R3)VnJM99(FIkoQa?rE!*nZbxaH-|lK>(E}@Wst$4@rAh zWTBwu(#acdc)SnpsrE%g_iX#Stx$G!#{D!yT zP?R!|Y2jP~;WPgDJ{Od)H#Qfl$@0f+_AS0c={-NzD9HvHEx0E6a)&S+ z#p#1{%QK-El|HSef}wsA=lpiFVsVomZyEn6-enJ?@P}9SiaRH$UXe}10X}^CMwoMP z(5vx6X}kgTf^_~Hf(7wg*jECnG$iI+R_{aS!Ld1dymRJkeDKcg@14^!U7S{gjaZ41 z(9fjw6$R(j+OK(z)<&DK#&TS9MYAbFOEXW3Fu0t-gtD58fq4O(*FH)BYz(9dh`~IT$Sqcc0%*YR9l@0t z{FruJ_k1@tFEH99iT>_2TC_Ko#dGZ1^G0s>7Gd~?paAt(eL|v2kZMkPt=wAWrrT8z zC`B3@UM2}9xb-owhUW8ZidA6PABvCW?Rp`00rX~ z`|FQf(*}W?Go=>;uCCawE}9B&wJ6!Ub^?;YHR)W!N*T@sEQ-OBI9^!(c)zZ+k%vbV zrtYpSYf(l$eyAZ(rVqk-mVxI3D?3LVpglURvq5x~f$Hs@V~GS7xz!mO-sYc!P++2? z5^z~Ji7~6ioA7RO|KQi3^hFZJ`$-hRiB6~e>7SL^Pd5qtx+Wfh9kh=CRoX2Lrm__% zeSx6-dEa*!0Z`^1it1@>pAy^M=EbD4>zbzI;z2l{R2c%Jfp<*B#Gi^6D;o4T15LPk zFO05Ji$rQVVXw(VQef zw*2#Um|iguE{|)@rQ+f`6Y`fjkx55ZP|A}pd$3@OG+&86m6oo%oylKW7fmMXDg;L+ zcA+)JsbL(5-5DV~&r-k{3+NdF=zD;GP@!}K;U+HFzxWug_}rljaFqcqI~@tp5TYe1 z(5kp{bBY4qOqDU%kXkm(o1vT(NN=v zoEK%;Saty}qo#0)bf)z23S!@-)fm^whEyZ4e)& zuUIBm+DKlrtScABCbSND6y2N+;%50S^#TBf{do39;`-ezV3{m4Y9y%VC-|V+bBgz{ z%N}XB!5LZXhDW;Oc#cxolp3a4k_FnuHEf~&{*a`w@m$k@ zm@KYYTc=1(qk)_+Do?>rhPZb{nm<2w>u{3Q`=Stqf{cbWCBO;K%ENfbA%Jv>$`3h+ zV5QgBq)v+{3^tLi0xH`FV~R}+82JgcSi(dnb8t;wVa9q4IqWtWRLV(FohPxxD}cH@ z9nq5l>C0Pt%Y0a7>SweLcTy3BGG?oEGo4qx4y(6gMDcuawMKNU_yGO4h#hB`QwkjOY#rAy|4cX-?Nhrlp7xcCTZx( z`mUWY{WE6jXfn9eFejk3euuLKSS2lsRh;}bfB=-=Pkj0&JNvgtWz74I6=SFHan$a} zLe|4`YT0H)PRH#b-Vo-e^Iuy$cTdiwT&AefrS6_&{3VM2L+Rf=kPmmJa>Xoysq!>N z1fVk-uI{%M=i!_mi;9RvQOADETKBZEAIr+4x67|N$b@GNO|mAI`)TVd%MbJ`OH2=P z0%2ZhdF$3L?;JNCl{=QCs_*r6Uy1}gm5X8Az?X+{l~^iID@OaG6@wA~jVc!wm4N){ zWvZJ+lclvk+PAm%y8fdhHGD>N9nz{#9{Ql{I6>^-@09uH}8by(8D6vpuUBrkM3D}Kq-tiE@ ze}@H3jqaA)54f&^m9+~NkYKnjJ~=-)ZFdF&Z3L+)sjSiI#q0nm0lQy=3;nY+NYGvz zUO~ZXJ*pi)8Bi7=RZi5=Ay$B0_Nu z?8{5*+7TpU!gArzSU#(S|EycX=uT=^|b@=xXQZPNOB{re%;5m6YU;B zlbfIbji7W+13GTm0I}Y3E)xQ22|TWxGq9C$m!kdvGon>7({)RQFg-UKMm8anA@_(+ z)^J{$lzUG=eNCa$fW6wP_4pQgKdhAbupTK$Q}-Qda=^1|+QrBNkO(p=d&h&Ksqns( zSwqLF!^X9vlKH*oG^40(zL4#vH3BE_VyB@$*wfO^>K>~8GJTvm`X;cCe#Krnm<&iu zp2`9b=r@~U5TpxuJ@JgAwK4`c&8+w>C*#H>>;53hHeb*w_lTA*8Fr^GWE^pPJJKx9^p-? zmf^WEn$`57Or(?(#gzfMgKyGO z=zN5&?abuVuX574ff?Ay*!Ed8TPw@9SMy|r2gid(m{p4kfCftrL!39R3oKl1Ybx+K zT-Vr}&ZcCB`X0~-5?K#NxB31GAR#B3a3=j)_U;u~8$F`4<4~Eff?0IuOe&dTAO)Lj?TbJBnpgiB#R1b_{Lj{~INk^a_e#uNVL3U;WgIp>^RjzJ0 z_Gj($l$bei7CDE^vq9hf2Atix3Ibwg02wi)n57shwlY|+*_(b&l{KHK1ug$wJ;Q8g zAzt7p4&pJiFlnoAq&-{;e}5?pA4hzSFb_e=u=t<7gw@578yr5n;BQZ|Y4R4ACp^%; zzV^1}{8aLh>C>l0L@pQPly%bdWR3~I4H(rvL4cs>9Jsu_@3yQ1T}?1+&nloaJBy~R z^zA5FmN1D|?^bCOi4ld>q!$5B)gNS7!f=y7bqs-gEa_%);W3M5im4SvTLT@(ldHL{ z*V#AS8OPrN^kHl@F3ZN{F*>3OAh?x@N%iH_;wnSR5Lq-c%429A(@jNrQqm`H<+vQD z%=kn)Ok8`QzjS)s6KaoZ)dJknNzj!|tt}c*yL&76ZB$fYm*WlZH>v}iB!Ki*XmXrU zA80o@aNgIJS_8lX$d8i(ozJsr*>scfAvZt3&Sh2Gpp&mH2k5UX7rs_*ckKn&uN2>cWZq@$YWvhNJv1IPo*(GXW`UC|dui=JV-~Ssa1#~^3y6r2*yUT{> zpq}YVd5#*%WTOBoH>|a}KWLoh4cM?X>(;FC!Mb|KHb_?;NpLxll^(F}(fI!KGrX3%&R;fw#{gQ}JOe%(%pG9f zEP-FFGn;V7geL+jilPme;ZO=nNvD~N07E6p>@Uom7d!m;!pqUe53d4sZ}Q8g*6S3< zyqj~!f|eMNW?>8PqjWi0_M=xl;x|Z~x==gDec-?>I&i2Lt}zEc}FA@VgI3nQftI z(q~_i?@?Ch)t{l0+P0mHdOGx;a~4Y*%g!+0U5*9z0xPg=26a2vM?A*Jes@yYGs)N* z|ER3_vnAo+7`M?k_$8(C9a2-(tFdGwf2=90l>IwsWipe3Vt#(%;hfQiC`OgHsYM{? zZrG#AIoN-BVtB*QFy)?KB-0%T6(YMuB~Z>QLDZhX%oUi=QghvW1`a5~e|Nl_);2Nk&7 zb-Kcta%JxHhOhL2Q$Ji0iO=P~!hh-g&++$vI<>ImacZg}^>(ewy+6Zri~UCO9~xCV zU;NYKK{EXZsdxN_^3Ue(ceh>j7J8&`wCdPN(DqxIbAyo~V#ni;6^v4Qfl{9*_>Fwr0h| zFDk2OIk&yL`g!Jfx4G}pkfogJW|np7surqG(<3rv0&2{ahH;VKMITZpdA7J%L>U-# zbkBHSS&O3ToXhTQ`?FPB-EnCa`~mGg|Ox!%WH^ z$&=?gJC*L$tF5m&62@^(#C$UCm`PY9?o!u#be*9F=k-DA{1P4Uk9U5*35dINp|AUz zT&?wDrDrLVsZ`UI7qA1C>WxZj?I{}I0Dvxi;s-GrHI z^`1PRu~#voC+2@z=}8vqY2ly1ReS*r$sB3??n&f{ioOMVzW~?-a{dw$v77bX?*4Ky zz$byvo{HZ6@a{i;yM99Tw~xPksZam$@;8q^UY-6*K|Atze9slVFC~aQGXL29>M!Ut z^B;CE@=G3A_|weZpVXgxDbU{j<^R=x2$aJ9^i+LwbqKqjSG14|*uf+9cz49j7ISvO zW??xbI>(nUsP{<;$R=W$F73XzIiaBFu$iB$N*jqe%l{`nZbox z)s?ho@Q8a0!b`D3NGEM$7()Jg`}3_%dytO(j)zP-bBPswdaQ$Mu=a?sRwtyZG0cEL z*h{V=N1WDGs@Q2w=iV1&E+xmBtDl}0({^&Yp+ohLM~8j4=Pg6%%Z`UHO!yj~D}c?> z5D+(KmuY!Z}S9i zztP~`T|_C({Qy@_XUek7Ze1b0A6jZ%{Qk;O^>KMbPH$DRVb}IU^ux0Y<(bwuCr&4&GCO^2(SLR?WrXL(^(rZANgs=h_n4Ml z&GxyLzi*9iZ041N7VTzzxS`-JNJeVA&(*2rXMM%+%tHC$foYKiH&aB%1I1RkrxMn9 z2MvbE1U}bC_3)NIqeVsydRB5;tA7#eu5?)}?p@S=r_eF^-Y&bVvFjYBo=gA6I3#<~ z@0ybx-rgp0`Q}}L#>feAT6%nn!f+5VN6B$cB5qPgxTX{Fd?yUQS-rMf2T0KH?wspVZ${kImh7Sx~6!gVW_4?@aM zYq^SV6?&QL@KM?GNaMU#DIC--wj=Xaii1tUhsIkAkRn(uaA^5!eLJ*>S4fpR$=jdZ z%&K(2PE3Ms zoP&g*yhSeRtb#+Sf$JL|(GzbL<$L-jRBjpT<<%FEe+XFCkd5`>gZG(HN#WI9*noSebgO2qR~g6)l%1BsKuaxS#gDjnuTqX z5wa9+q}1?=UPOguCOx7g_Z4p~msAj>o&A0-zZv9m-T728H?vpC+jfG1Nlvf&UP&wT z!F`J<)y`b3u+;!+Z7kPZC&qaq|6TY!#X<816L)l9zLnGaXE0a$5pt5Uy`}a1IINgf z+{A^uxHmqeXWfA~x9J52wda#rv*u?`E;RQlDSZ-$m>UyViW(x*vPu=Y#oG-^-kfxy z*9mt`plFL*@@F5|RfIAWDKb>;O|NTClI6M|uSSFWyKS;QH!=rRi>RG5PvH6b?)7#G zTON7zk~K9*)DT=Ax?icZbCdF8^^LIo{A{(NKuKJm`be_kk^WxkEP7opjp8qLGHhMz z^w45+qH)(NsYiga^RwRW(J1TasnSJll2WLL#v+#QDhZ1B2x9Bbuw`V4^b-t)C?{%? zJ0W1bYZ$(LO19DA(pg9D=~2;{=V#aM+j)>Rl(2T{cjkNGHXzx(h_1b|;OoGsi$~7m& z7C8hxNi!L;e-Of_sS$~zXGh?_QyU$WFDu_t$vU6*hWe}^v;T)15G8I31Z-Jo-ysB1 z3M|3}U4k~y?D-Jq*ZSdDRR7rcqUt--{=6S=b#|_Nm$sRn9B;`ah|Z*_k(U%!Er@3d zL|pCPrHaiw_h}Bd<35FeZ3pfeIdn@ebTJ02JJsQ&Ejw2&+6hlo%R^RrnwWm1Ms}Jd zjId32(C#X6iB)!|XZ+!seoKX7OoiqMoY5kmXQ#FxH@(t*drERy`4Dj%a9`h=VJq_L zDf(?pcsIWZC(|+PKqvmeat~u$RTwztJfG~oimW9_k^*WEgT)_;J2ny;c(t#uys5EQGTMs|E_pkw^T9`p`g*YbQTxY6UeWbQmozMH^-o^nFKLPy zNbZw&(#%c`6^p;VG+iYi)}xjk6DV-h^ISp!EIY3??#)g$e{*_)-bX0c(3%l>x?=JATrx9KSBKApn;*GmzbXi!WyGw*{3+1Y1t6#S zr1Dp|@gw}+=45BO8)q8ISVNaL`^|iKz@en8d6&SZ)qo!>A<->W9I4U8e&9Z|RLXv5 zq00Q-zRsIiwPRJnojT#E0IZWsVwYHpS^wBV0%bD-ym}^3djj%BIQ_Zri8TgB2a#(LN+Td74-O_kW0qoUgU%M9W&J|Gi z+PFb9g~Xo(rX@qKRz9^?6f%}PcJzVseZwtvh2FIWWiYil0o4LMpuEM}=NrsEu+oPb zZx%VoNcPj#+4TY3bWlM>2BrETku-w7?=rY&O4SvJ_z4@-Blf^^lGu; zDGI{v8AAuxSMLa_mhOZ-MYYs?_hi1!XTbEvhozvNPz%5o$4+zdh3B3bi{mr2yok_h zSGhG9o7*>DRfi2^ zR_}N$oF+V9IR*Si&E}I#l!g@l3XsoMT={l3DRc!o+)?n3Wiw(asf3eBf-4}Q^+x|l zT)Ep#I>ooq!o1VHb6`vh_(@&rD~+exC2T!~4vNuL70~B?fHQiph~1fpxK+XIz%5=9 zZmpJF)?qI_sS3uP7k95FOWK?b4Y(hx|6!)>yAmBC^$Bqq)C*rhcTK~FYB>bPf3Z!e zyrViWx}b*$KDBY=5QH9%q*6+7m^REUrh@M^en7RrPqyZggffZJ1>rsKZr309q%w(ZGZCRWKKu& z@;uPfzYh}Fk|)+lBPl^jwKVu9APs#8LYn@xLP|0gTXOfj*#S3mr-t*hs<^2av$U>& za(oY~vVA&`RJW%GG)JRp4(7q`mcf!;?1h*E7j3dwgL{9=9^=O6=Iq_oY*e4kTB&yRiY6&R0Y-{1 z>CNK0N4fuTV*f}__Gth^|2h))!s|NTtR7hJk;2HU>EvT5gM>x1xoICfMd^|BQUKB2#dC1)Z$A>L0vCa- z=m(LnLABZyklD~Jp^voZjeh+Def+beXC73%)NV?Z z@bb$&Hn%FT(bDXp<0Fc0-Tfbh5B|TCDgB?!{VMtppRZ&DZ7qP=gTp?9^$L4NEeb6K z#EOz!(N(ZBJ}BF*o#8Vlv#Rd%*>2_4w~*>fOU?1U1MRhesXQq->g%=`@_8?;j)_VW zFH`_Y_fvcCv5Et4n@2PymwgPII6b78$QQtKYJ4lyB*p6K0g`1fcS1Zir*+mTS5|Ui zBRPFV7D$ZVmVf&;MmhAVT8px0g^wtZabsb01(UOB#OQbj;Xhy{2cDwHXL&E1KU8mg zJ&cW`3)|}8BtwVTBS)ZElNlTWV%GP6eFQS&U+xps_}xdb2ZQu`QM997h|b)csEiWPgJb3pRx zP@MPVX%##_UW-(cg!_(@K0Ll=BQ9SWcT~M}PV1fyo8Pl>4PoZ^Fsdh9*vzdH24qT~ z>`(3%BoZp?g=Y?y0vV8=V&nSn_Lm&odzok=0Fcq2BZRm)Raezk%BRE}^|WoatMA^~ zu*=-$7^uz3=-?#cBr@Ig*D0lL+JWcrt&&~euT#x^t`EMju(nH$y=j6@SIaNQ`}M9h zLt@h-^(&h4+slSI7Q$)}KBa7CzNT;Boh?j8(&x9ZLnhz{`PDd?isp05*J;x*%QKAz z(RhUJh?VbDlgUWF>LIf=i!A$f#$bvVq9oQ%zsvtxLYDh&JY=>_Raj*wSvb(s#N7Fi ztKHDVog891+xSiqWpQKQ`QENSWboNZpo(th=G(>H^vJJK1OPcMIz;sH6Z!%QDTmN%`M4Zg z;}IMD#8t`T{axGhPRfy8t1sn>RpfryP3E>ns6$1WX1rw1>wU%1o$?99@kO;_l_=^T z*gjny@CXP|M~7SA@F-b>@rCj-NQYnJHtB`6e_kZAlp(FBm{1(Fu16Wj!Blx>?*7NV z9kA|u;X)?Wv23_)U0r7HG@szCyw@={Q5oOkoc+Ve^hb^G?iP}OJEmOP- zTBUzT+ypZOOti?V&HXcQwlZ+wH0yqg=5)FO<-;PLL#()sXB#CXZWWYdHm;)uXN5S8 zigH$z``-N{b2xPUS=LnMCbJ$poTWt*E%kn)`MZPWy+l(;+pCS$Xo6`1%cAXv3*ePC z;PRrzX0KwMgfsu_4r=3XDqRn=TDnd0@++Q+)Rg+EuVf_K?&{vCLLFlBcnB@SK2<8d z2XdkLV>&zSwrND~4+at@`h&H)SY=qC9!On0Zt^j6&}MZG4B!tAbagsVr+^Ws20sCGYu0bv-n6PO{UO z*N<%9x8mHikCUdt3^%}g%@ud%F2as3iefM~*C-ieRWvVvBOUx!NTFT2R6L&z zm00s=(sNd|C!y=lu$WEbJ$OhACMG#oTr*aRONj1wGxio$@2Vd21X7~rOj-qX2{;p6 zz;6cSVl*JJD6zPdh5Nd=NM-iCn|ew#VL@70d_J-DmQ;A0Y37=H}EQQr1 zduD~d)7Z{}g(5Kc$9*1+1VW9bOxYWSF7+o`YAp*Xr1N!-^pXBUk$2JcNlg`VzN@j_50y;&EHdg{z46wIyCe_M6)2uO}r;bOs@90$Okc`nxOF zEA>{&${i&#`Z=h)a;+pO2$hmPS}N^^MQeQ!*`T=sIn*~;mM zMQYkEp^8n2Wz$>LW~IZCsK#F0fPH977~{!KwZr8(AQ42@Ia=L~m zN3;KQ8C{)TT7vRHCcFDEIj{0M76n!`J54zW0xPAzRr*tM-~yQRAmr(f)s7W2m)AvS z;4oeF@h@r1zg&NdLeVib!Vav@4D0?$!Zwx|AZKNd?;Efdnz5!qx%NFtE?K4S-Ze)@ zrO>~heQ>qti15xRQV24yyU-G&s1Wnf^O*{om6#kG>Ds)r+&g!6pYg9kgWtuLAm$&T zYoAt_M;AE!I^c*ds%tU|VKjh#o;()$9Jn8|VdPqZXR+3avH)#Z%VYw-IHP!31APw+ z#kOn(52w>Ra`DLaVsWOMe&}5s81|;l5wWgh35y1E6YUpo1n(TI9FZtlVL!i=_Y}p- z8|W*!;5^a!nsjSk(*%ljT7Z&7Y>y>+sbJce@+y&>z3_F)ajnySk7f%;Izjj| zYGZSa^o+okXKTk-5eyqvlu0@ZbseXL-rjoH#1Ku|ufRPgJR|LUPuLH4A&+gHuqk_f z(-9rWzoH3ni012;1m1&mq&UZ4m*>=ul~0cbM;9;%ztm>6Sf_={MTRh`H+hYQ9^tQ% zcMJ7|iNw36_1|@?&@*QWX5+1}XD7FxR(jnL=N3AEV&dGzKDv8$#cdDRlyG>dh;6+x z!mT@w?|9jdNL#*{PceJd%*_sm6RxUA^bEXCy6XSTb@pS7Akz~$5uwpgTpq~La?q<+ zvjH=r*($}<*?IBKrBdvbK=4%B1&DtxiDbGF(w7Wd$T4qRNegdxm%suM?<>16os*tX z;(--E{HctmSq4}&hNp~LWCk_ivB5pHIWy;rA$j08R@z=I6GwlWB@lNriceg@{~)Cx z@&Rp~)_-p6e7^Mc5BzsuhnF$^Kh~-GpZhwWOzjo)1o8S^6>@JD^J)tm)EBpVUOr2m zY5T4IVn|R=kAA}NagXSyBSTEgSfpNHVTyiK*ge+B84-bmwm5X2N&=^-cF< zBcr;d(Ml331iaMz@}}X@>0YIc z7+)=)yR|8I6F)q-Iw$pd(5s=x)hvgd8cz?oDzP=ckRq!n%y6*`_K178kbD4XcTQv2 z&zhIe)G)Gi^dTJ-(0+^h(=yXd^uem|5*6(kbw5Z%?JXdYqHUV!I~X8Z#{n1gS94XT z!c?X5STm2WrnHEIBU!f7Df8=fzR$GdmA`);xGGK-p0UU>M;K~Ql_ifw5~K4l=QB(#u3;`L!PPA_N_M~IzyZcJ4d7K&b7 zUp06TaQ!HwVe4Q<5*3C&^LJL0zhek4a|s4oKF}{MA4Gk6@dyXk^W1Y7aY*q+p?09J zf9}@J$U1f?^enxyU3@MNe=_B6!bZR10<>e~c)|zv^TzcEc?115As3vx$b(1sjEz^`!18PQjy%u8vPsBtyD{SAx~uZ&vVOaIdE3GmH&4L)QA zNtVlutJS`@c6>{6g!Re8P(B%49RF+R8p19HUOU$mw2|1Fm5Pr=U~d}6ZZ-O;VQc3D z2Za(+o^6R6!VDs$@sRS&?9`;S^s<%Jw&;tg-az{PkH5D$lfZ9P1C_6hiCY@FMP)i< z>R(DqShBPP?J%<+0I}s-R^3^aeyvN*q6FIpPe?2SN11Qx@)|$36?U9U^4&~Yx*NAf zUgteJ>HQegj6178T7ywPAe^{SUrg958D=@m4=jqXZWM)Kvn&5{G6U7A8$}B9j zl~;)AK}26yC4&;RSr}=k`;~x<+go!INxGd_O$_Rt^UDn{P<{VhUwBFy*@892(F=?g%v z9jj~%zsZ;FN~i2wQ!un8t1=1U8Yh2Y$pcQWN)CrJF@g=5)f3Ja(8OPWH0Y|wAHN4W b>}wxC%TF+27Z?Pe4!|M|NU$1$ZWPS2cnV z5Kweod@l96eX=1SI4@LFzW>C}eEk#}XgU#uJwHD?Y1um@AZ0JS5%^r-vvL!C^p*Nw zQsYmrHoY}rAmO|Bri%MB|J7#_!l}Z25mGfQ4|;kM?+fUal-zvK^Q!y9xNSBllBIlu=bGc}A~xra&r32v9X8q-qQa3>H$|4;3 zGHyYzjnYE@A#3+67-8Bax(trM`{D2E~l`*%5(_+Z`ckxR#LvL`(YKq1+wDrO^X(y+o{w8b2@O@~cnWhMN%tQ^(WZK{8Y6D{#k)-S#8J6$f9 zsFbSu$=1s~=AAu>Lb$#7Vp{p+PAI{9-7Pc`aj5PW{RY|0`Zo1xN_I96wzbK68BCO2 zWp2eOsai+l5YzJNq-&Jp%bkEp`Rrd>s)Gu5?zKTm3=La+AMRe#PrKs|s9|xEoS!PA zWZtFwQ8Oak%cq%Z_1@S1si$Qf9RsWjVVRL17U}iz{TU;wlU?r71GO-ck1d8DTJmrW z1uJn|NBdIk;vYu6F-A4cZkmaat+wU|@^7z$I1Kz0I;T{(2(q6xpkZh7ye2gd512>? zsgkIF|L;4ARN{YFSnN|WPD_^4On(_e=7KF_2>8TqIC(?+eu;a&dJ6?7dqZOvsSa!T z^0P9SQb*O@8{l-LHkLex4=)5r@%Sb)Sc+$+O02dGF{byTf+T0m?qeO6yqM6SUV&{R zD;3_x!chWJ+Y)MTQJdabcWWsOG*6kRsi-$RV7fjoI@lY-V%%@u z0c&QCFM>}QQLQsI2q+Wyc8YNx6W9+Pn*mtfFAgFnXqHmvQuR1OwqOGIS05Qg3Bwl|Y>&8jM%heh%@(I!eU zCN8(onP0YTlOdZ}m=rM)7!AE1mkjl-GKkap)X$MME7;EaeS$;GBSGD1h)keT0-^uO(7!f!jwa+Y%alIaqZ&_KXyQQlf7ZHZp z)wGdgOqFI_cH2z)w9&R3^}4W7ruILfL|Xt3KHiY({(bLCWyk;gk{%x|G!Dh{F%gC zOn#MX2y1Dy)hhRU2;1IuyA)ojsomRA*?j`cDY+(N7ozs$tVLws`(VTK*~Y3%#bKTy zAzl1UAK3M8OaF~4LFwMCOTky_Md34+hQZ0+4jvPMUv)r|Pd^(S<` zGg5If0Af08P_myfU_1A-P){E-kaJfC5~SslRQ3rjVEyFBa!nujpKkn}^)^I=55)lI zv1el)O_Yn)GgEy&wUp>VTA7z%R1uJ+>`9MJqDHlXKhEgFx(ZBMA(3I~>5!_yX3HF@ zfcN)qy&Xi+44K-qQ8?GV;%Hzg)PAf(_TTVj{)lxOcn$Df)z^h@LghrcMix@1DUBlR zM1B$mjn=lB1RN8yxvt)~A`^+Y4!Z6n_>RV76^lt}sMoLsbYOLgIYrqgW4)QjnI(cs zVVrk}++6TYJrgykF57~S4Su zn$}HnWUah~FGiv=<;&MPGeS+sB%<(axWE(oMz|m_$>p!Jwgj_YY`};=CdnoPb$8fZ zSteNz;jZkT{YK3XvKdJA`4jrSfrQ%cH|@o*wSKx+mi2)+1|IXfV97ZH-)Iy}1h4;{ zJ#8KkDc-GIAf~kM2g7X;h!f&Twe&P~2=-<7?tZXTLi`Vyt5JezKg9SuEZjPx><3-y zi_kCkJe(CDn4}qVwrcaHWpV}-o<9q{+a-Nq?qL2v=Wj^*GU_5|RS8|4kWu(5u{sTC zSh<#-z6t*r|B`y4^CoMF!w;^Fmd;s}joF4?hQ(cUU+D+++mjW^%Q?n6jdgCu+~V&_ zBd-3p4}>o4&VSecN5a^_R}`ScewzU_`hETNe`DjCdqoZs=sfWi4I|mdcPb!_{|f^N z2yWUxxI$@h>X<_#^?O*b<4{j~YwPeAL$h61~{1yeGo>s~|Pfc)NHNJ=2l|5mE^zmBt?)3ExDuq1~qe05K@J1kV0y3;rw z0_MR>7@7>^y^6%MFLpznvjUf+UIGKG0jys^w^ zoI8eRi`=s&1u-twd=?bw2ZF#WsT0oRYBoXwQPz*6)@ND8L#>h~_bF8URy>WWW%<5a zlSWahu+@2&Bp;zxh$yeD;ZJUpGF{Vvs$j!B9fE@tSz8S=^RfyaA(55I72(2Bz1E{{ z4S5PB(~Oau3If$iEDlE0*R>;{CswOQSlsc*=|lElR_>^Kgrl9EN{)K_my1zTo@e8p z&FepR`7q!W?F6_$yD}or*+?>0Oa7d?*?H}X$W+bdVh(>^bD=rN-}$2$e`zA5+*aMX zZ#O+KWyZW#k*DW7)_2GLPM?`Y%i7J;O+?~5JHCX6kfLYEKZ1N2W&L$<`~8BF(32`H z#vC&a=xN^)87O)>zB^HvbfTrwa3e-dgeqw(*L8K8DKVJo!>W7ZAa0Viugr7JgeU)*-g9VLE9$dJE4$+wStSz5 zH0l-{+xvm~`vy)~b9Na#i1~ds=N079+3`G1VVUT*+47-M2?$`pjGD()6Y9%U_<%CL@%GsWRjvOWSk)qiA_dKMlJ@xTatX> z#tzs(wahiZZCPYn35v2Vi8w7>Zaxd!t6>@Ll1>%yU%U-(5nf9*FdJ>QE}waBBpEn` ztTT1YlfY&ar@?KdZT|@Ai?WDS7|E^cx13>0K6I$#1nwJPsTjPuu5Ps!D4)!{0#X33E?wELddyz-Wk?i;hzxz=D3iXYT0;blmkl zT}0VvwN`NYah0sa6o1;3E#SuZdaDmxKj>>O`m0!uU$55}ba7l!x`NWxnA!zjr=VCS z=ZYxqLoY;Cv1u!_m&wuT&437ke|3(zhkUwM8*2h(U&CcyC8jjC@Kl@*w~a}~X)z4q73%l`!6hs_`XNGOtJ7WbjEEvK_qHnuyjnous$uy2@Je)k< zz2laOXh^exKJCIZBl!~|dUI1!H(YkcKSwSe4Cm$;8~Wo^9MVHlp71#*7dN!A@n_L5 zRnTR0jozF#*UB|!_bzSQrxZwdyK|-J#Ir;*WjlgrdYl8*2hHAm8UYRsxQI0QX)tkkZAn0`k7Hrj1nOPsW; zjcc`+fuHU+*=o0oWwh*@GTbJPVDW4aMXu;|3P2bqC<_PHyqo(3ogkZHMtf*Gh~}KX z#U0uPcnP$5@VM+K2;*&D|HVxHh#CTN(#Q3F7&d2bto*@MF9}5*b!L$-<0hs#bmaZw zo|vy!G8}_ior6D$l|MrtsJh3(Oyl@gG3J&Q{uL#=vU|ZkH(Q9$P7Q-oxPQ%$Wvu-5 ztvy8-rjR3TQuls1G5PIrhdk5i6{0KVaiKYciD0U<-}#qThpFdQ53Y`=90PH&(LNFV zc{nV5^3LxeH2gZ{z^n{T(x2s(!g>O|-Ing+HVxR)**!e6NH(o6DMwIE>GOi28N0X1 zfvSdCP!G*~(u(}%^`Vb3#Rd?!q*i)g^_AEd&sNEsLt*<+8@#B?=+3itKI10kHPBdo z=5#?rThj)oSzx5thLE=(%E>wyBPwePT~RSYGa`*VgUqs~mW>=f89g5uhtqG=e{W;c zA#9efK1+0VLvia&&fk02A*O_ogo|RD9?|D+CL-+D^zSR#ZMlceoP9EyZ+N^mD9r!a zA-Q;umO1;E1lno%1~$FaZZP@Eayn{An!id+z(NA$d@f##-N?#mam}lc)2dmMTp1s4 zcr?vyf@UmrT`@&Aq!oB~DR~-Bx~s>M+4XIZA=TeWBRSJDWb9hWF$n)UU$s&Rgww1y zLr%nAkz-{fye_gpd<$RRglrWcTTxwFhi3nJQJ!a&C%6!HrDS(<-r)1*5v*mXetwiA zZv3cM>|3_RWWDn&cR6Y(=-I|3hA~|9=EP-49A7+1rf7(y(yTRIr|e2g`sv~35V$7{ zKLFA4KF`5m-Gf0p^nN&t(q>|ry43U5jhQwu=kLEOZw-I7x@}2z>+SX*x4p8pNCx^` z7<6}GFcdv#dxNoo>!WGrn7hJ{ENR^RDtjd%zrIX`uLkUL;mgnF#`{0c+S-;=+4~jV zX7b_wxM@X9`3H*d<7cv4J2!OkHD+U$TSANCRkI;;*j9-N_}>6I z`XP;7EQv`a3Bv~I!ntYY90iZ4A6HhZsv~_ve+TRp8_nZY^?jjFJ`X~>6&uI>AX1tZ z^PJl>=3O2B8)7pUo6*)Z^$b`2DK>m;hRYo4X9oo_rSyJ^#?a;^HJd$0U6X8oTI#61 z8On-33lu^HQz}EsDxjmc!%21_JTKgl2I9x}m0i0q1(4!C@!C6mA2K5ssqDEjy*_$W zJ)TS(OV9DelPy+gtr9OLPdRDP3HpuX`L5C&&C#sK2Kam8LH=vn=iIw@8yAoY@{xw~ z8+1!;5ZAV$Bk3Q0K^+oojjqYl(LpUbUll}9&X|3_5fRGq8IBZv!7AL1E!!)U+NZFK?;T0WgE^Z~)|#8iWQPyD)r=lAB-4Uy)~(n;Gfku-~z z8PhwJkL5~Tm$EU->rol)Tijc+nSp2QdeTVKl@=9;=@E{N(13`R-gQ{l)aH);;7*z1 zr;(0pB$N+L2~nr^P@TU)jIjMzBFZy&by!|_sFKi&k$oUyHS0Ox>~+x`{xKi*s+;7& zXboE_Rdv8S230BB8PD%X2K|^V?XJ@W`Kdd*lUbfou=uha&Md@Lkn{dGwzg)Ob_Jc? zOZ=jDH$v4)BbmXp-xxz+=moxvPK_?ASqWDDP|q)`x2}Tplz7cjCc^L5ChUD!iy573 ze$?PL$EQ^aN^D)8|8?DaQL>C;g3Hto@1Tc^W>-wFApc>$B@|-q82qZJ?biW`5q)qy z1)OFZMto%XYhN5bIE>00n#xlj(s17tEBOprdGTW2wgQ%VoK3u$T!w~~gmfjARsD1} zs$&V7bw-LCx`sEDQ_1_~HZOdc+R4b!SiF>DZT%>X=7E?2N$#YMM~I=0z+fi)c?|_> zX~Lcqlnt5n(s8`XZMY>5HmR^!j$kg&8Waz%GQT|(#AQ~@tuQWUCu@)E(4g#Hqz|xY zaLSwup+bJ~IX7DSyWOpaUy*`prXnmxZ5wLr!^U_&G#D4z^HWO9=v12XkgZJ9(?F#6 z`6~uc?<{A}Ua`%ip4ONXAH_1$%7&45oY{=V=oOypkTcUbv+d`8MY^*ZJX#)Gd(H4V za!!7BjI}-JHKG?yLTQN9D5aQ4lIy*1o*FS9%{dyBp;KuuX6~e09aKK3o-!ov@9F-d zh-W3+Gr;w*CM;4-2Kx~IOYat+HN)36u`85GgrWTp#fR1H5!KCMa>{yP?P`25r8U#= z{Af|;3hey166zbfQs-zmC@Q9734!-|7)bxQ*Z~A&0j#H|i=aSo5~& zD~H>USR7_kDp`<|TUlnkz_Py1(CG%|4&7C#n;h0;YSixt`ZyDuJ9Wc!D#E;ixv zB^T@+1$L5O5KtkHKfm1LfU8c1Fq{# znErgNqraG1nPGi(T>lURKby@s9z63h+uSl#!oq#D!tu;I^v|%JXy_VW^Y3>tJB!x1 zMJ&r-f2C4L$^ecwa3P^dlxiAkab?TI^|E__1x}%(T`ZS$jbPymnv2=n$^mxwRu@;{ zOWZlbmWFAf1-f`^%Km$yG7*k9L5@)G%^`T916-pf2(K~PvdYqI8!KUnhlP81q=2i` zA}@)coOEk2?EY)!CS5f(;wSW>GBVyT3JxS3(vyN&oGxRMn$xYT5m0{WM8D{wFy7&e6oZ^AnTE55k@)U-L2@#j=`Xh5OX1f5 zCZWFOEFI$59JqWENIt5>-4@hS^^j-SW<-X2%Qj7WI~T!BR~(zq5l$h6`?+(nG@SE1 zc7I-|M#2F*!}soOZsXsDMi1V*XvO;Nj9%koG0{08 zLlt~#j2>n<;ZMf|LMs2#3c#%Y|E%9%p%4)KUuFFi`^^H8j?Iqt%0=hs`K6vldHHg$ zycRb(VpR1%phTNLj3jw*DzC(y{!}nzP3veet&=0EqSfpyuk^Dv?`-j=N!DhiTxw_>7DW!S8^f*k=$)L4pj9&v`3p9Xz&$JzJf9D20cO(}_aTUi$C9i2Hcvj?Wxpp6O8M{i99OXo9(SJ=&3LyG4>ovArB$0n?qOx-oS#g#nAbqxs3?S%k@hsD7sKz1pQbNzfZ z^=eo+US=|DinpLlJdv*GCPcu0jpr$VeOE4ikZnVb1>a5U^nQY<+%q4>>r031&cn}N zoHb~6JWeZ5tVNO!)VrP5YPy^>WXowZPI0Y>F~*;7&$N05Z>Utxef=NK0eX*44i+vE zi=jC(g2R0tiKYTmNP!pfm-dE>nm!gv@3SvONzFg?NEKsNRo!WK$=JOo6|+w`YCenE z(|+STL6F~v&j0ZEfC)kvHY0s!CihBpJKT1(+}Kz1f%lXxkWt%7E*$~g3SIh^{Z4p3 z84{5zb2~-VqESq+pImE)4ggjaQk#|dwai7i#scP2`deJJRRuzR8G1VGQZAk>-)}yS zxqnn{GzC)%5A=NWpkmHn5#a{Dn-)nevLARM7_#v?9!&$TkjQjoN>dhP^l5eK9DT#}B?pOK5OhFQg2zQ|y^lzHkGCv?5m14Sfu+$lQMQHq5Q_d+dsvu1j@rrYFnJDL91 zQ3EMm-{9~;tCSt``U3KU@3ZELj{%HLh7R&z#b_%X`F->AokDo?R49Q{<0Wbs5O9j{BZlq*n5$POWM26(`wUcUG^l&bM*?W^xEJl8lOY2&vDQ18EM^A3bCX$`9N?r6xw|qb zsvtOmUig%)pp_OmBh8Y1#5_6R)~Olyk90%pq5>@F6A85!LM$a#$pE=yH1kL-1i({L z2O;zmAL-=S2U4Ar`h+Vfyum#A`=V|8K*sY&j?I)H)}5n-Vy~&swM=q(V_f>iH6luc zMCfV2v9CWSCn()||MHDAUoc4{(Q-JKyx+)6yVOXM7fCJ2_uI72t~JiUT^QH?0YpRSdV{ z0j7&|JIEOmk^ez`*`bmI)#CBu*t8KD?lSAHd2wQi^gk^IoTu%mccTfzS)}bW&k=d? zBc)57jZcHSNZBKbdKVe9meT(s!5Q08I@v2?i*tUz_>OYskLt3$Cqrse2~`N&753#P z@CI0u_7ImPf`XV)^d;*d1KE2!{emGw(}y7(tHP_*Q?Yb^c1||kw4aH2aEdpRQ08h- zDX!9m>7^LpRlC3WmZ!q=0>=JNF9ge&#>h*%nvg_a2{;#7r`Dx~5vr>|Z=2S6{+7O@ zeMjzgfb5^9g<5`({giQzabXg={rgm-lH|S6|M>so`ZT~ZKS`8kj!?su;3us|tiM~= zF!BC*QA*V7gZZ-8muE$|%DLVKZvs;yN-k-pX;p?(Z@m?U`QyX7(V(>sI&;#CzrXyx z3b*JOYhk3-Xo?H{A3N$kJ!1K8`z4;1EBc?76qA!T&{F>2`O|w?9XjqX5%cB05It8?m(p<0HmSqYG9dkuWP+$fUt(m1a! zU`sOkNk~0y>M~n29+uyGq7KavwE|o`uH`Vcm#qf6bhUF!e4wMrP^V4jsS*657ut6! z7U3?;c4;m%ZQJby~6}4py?91hI9A>SO10wL2??plAy;E|v zErOdWfH%A{I4>T*yCkar;Mh0kc#Gx7m-$Y(`LZS#E!gj8P|Urn%zwPi%k9s}AaV#sCh4-C*S!XgC zBLUDhe%}W=I~sq@wk)38YViPC*nA(#B&LQxLyrjwlji&^)>jGHx{{8UXU(^g&HHMGBQnsb=PqF5QKZckVo#e|qxT#X! zs;`hN-Bi;Uny<4na!bO=xJTRJU!ZBdQJ+LFD}Eh!LWHd7xn+IDXFox)Sm}4zC*?+R ze_ZMCeb?M0YV&Qgszj7!8&>5tu|n7LV?MZc>g+wpP%4Egpu>Nxbg*$3~4FnxH@qHkCDig-xk_~8$eJk1w!XKAKsyRY581a9xZ*?*4C zBUyMwB5Xx0KDArF>6qgZAC6@Anc>fql0XM9lnPm_QTXp?j|hgnZoN$s9vMVQf=tmc zK_Bm9ua}JpjvRjzUMQFBrI-D|J9L)j_9zscKOML*Jk=qKF^yrV3+U8qt5@1!xYAO(=?0WGC;QTcO{RW6IZsrTO4}$pncHSPVma`dQA0cDA;jE!+0Z z`Ir{@x?-IV$G+M?V7b3g76|ZkrqU*p-eh!Qj@iJUY$=*_T~Oc686V>5mZfcSyeusTh(*Kvo6Mbn<|vx_f7NH>HZX*OL~^Zu@uyt zCFan)$GDA4fRoE!Cf}kPB|j|@43yY@Q&$*9h<#T>C22A0*&q!{2#Oqy+2gL(ovQ}# zl2G~wI2>BMKWzygOk0b`eUozA(tQDiRPdMN-uh;0(#dxr=`jaD(ujHY=8)mVowM9) z2hmOSCLipKoGWp-_d5ZI*833$Y`^1kR|rEw{6ablL5P~`^>wbDN3H9xj!XpheLvwW z4G)xzVu380^CT0cMy*B1^N;KA07l-Ipx-V4y<{W4^;9M#$UCVHtBGTCKW`wTsJEFa zK4RdzDGER7WNzP8mn&GQM7p#E4ESJf82VSVGn+F0Nbm5$%*Jy8yu~lc^(xt&+pOiO z@!xo{l^SV+%Q!i}pIn%Jf+}^%QOp<*Z<#G$1Pxe>D3xi`r*qt(B02ZITLz6^GQ91Z zM&XiT3zb64qexL>w%xk6qX!ujsz0e+%~r2_kKg9X^K*NLd&uvuecau9=DKSu!gdO? zyayzOHz^$EY1EjrDA)~3Q7Z#?z{&ip6huU4Eo)zz+1aL1i`R$hDkO#Y^m!Jmu28}@ zW+6EiX8gRyT_x69>$sL{cf4KdwJ!{BF{)qU$FWs7JtLzRQ5w0^TqFQ>uSR%T7rSP~ zpS8~sI>gFHWp=&}TPiY?l%_1--+QR1b5?w2K1d3BIv~Gcb%Ts#O^m`$@(j<{n^O1# zL^)l1x2;hrGtJNz{3(S2+=L(3{UZ3$(HuF)^mz3<$ydHmb%e*wmV?v{!?u#%g^4_# z)^Bt?aD`N~{LId=q$^s+r!@p)chWBZfQVAj3NW0O)@0Y-3TLBPnw+Z)pAG@w<)<=F zr7IyOUDHrS7w^1H-3*K4Og=e@%dqgNd2nSJJ~^rtd=L=OhFB&H#zvl4d=2kiqPS#h zgHzqgyZV{DaAol7y25^4bS+{&jot66bKYs8jCa}Dm}pm1!-Z6|E0Y|mCUjY7MY6rN z(EEySHcJM~?CBi2f4Na%v7LujOb47vvTTn5hc(9V!Ei-N>ZV78^{C3(_`J6?`TE@a zgWmc;pZ4}ub5d?jW^lo>--tiYN$1M*qBb0WA);BJc_pChGJNg`|D3?Bcqrq5Y?{($ zGZAhf)i?6ZKu0QYR`ZSqxZ8j10 zFPT zySoRZ)W9Yw8Jsb1f63JqhWOf?=2Q*(hbgdF6(>xO{@6Utxx+Z{h*LB-H!}o*vJ27Q z)YUkfHwWgohumbxr zG#-8FPZ(1crbPR&koO~R@k+sppU_EE>Zw|O_+^?zfGM%VZ@AiDt@CIre$%{7P&_+f z@{XS)S5@9R8n%&$ar+1dKxa$*6YN_?Du-PR|IYLUiXzp15t)Jz@kglsoG}rBFXGoAV{`gO(7tZ?a2{K&7xZ3ngUDY<|?8Ag|w{UNmgBNv@H7OC|coKfk)5`Mc1f z=lZE<#*vqrzskoDN0Dm9Uw$fvvz#+(<=NgamGhT1&F^f!70pFU-(fYG_4X%>XluWk z+G6oC$NP*ny;#KWYVvljlt(HFCj%Lx)$)M3Eq67Zjb(w;D+5p%%@0hB)SqczSN!pa zV@`EfC>_t1<)ex~@}k;oY}1YGBPP?g!L$=5Y}v;sjf{;0iGDA))4*NaQ`UWm-_2Vyk2eV(VvBIoE+5~;cTE{HdKDL&&pQdWSVS9HTbb5fLZaywU2C9AL^6liBoz9V0 z33~ADh}Bp?ytSRv=gDV0#^D-hk8zGHuw*~C25_ohlW`r4+7`@L*gYog|PR9>^ z%uwI}o4)Bm`9WtvZUq{4H9{iPvS&kNoqsq%2#>2Q_Sc9rLoT*t6WejkLypWnaCd$Xt_Y}T-^2q0YvX?T`fN5P_eQM z?x$`WQHH|Aj;*GWYJV~i;Ps(gMA*7J-^CniTW?tsMSfO~*>za-vU=M4vBnK@_e(zw zcySh3<73~-LBD{)L#GR#Cfe%*?DnrqIN?9m^XAaTvavH#f1}9Q>ke|j(Vn_0ALA3p zV$rUR`=0dM7%I~|>lgXmMGAl+;%A-Sb^P@JB=pY0Aa`cMi<8Zt3tgB0zqI=2CvArR zhkyEWYPQeWpD)9_tX9NYw8yz%2%0zmSD?wk1RMbWTwLYS5Z9FXqZ}O*yO@zaDNAmUL)kohZTpu=ELG0K zQ<2KxmThbH=M1|R8jQaabh4`;){=gHo5Xd)$1@&MVfJXjEM)y9cp%CtJrm zg46$;xw2A=(_CUzSu^7MmIpom3S)4j_!}G!Wbp}AMBCj~HwdoBpclM|DeV-Za1E*% zwzB};QQg_V!ZeW(KzaXr2j9(!fY7_`_j@okL`X9OC-Bn}T*+Kz!2H^&6EDeKpb0Tz z%55AgwFXB)h`j-#B^sUYS?IOF9RCa~P#s$3B|fNHZc;7Fny}g9H8o8^%KTOp*)`{6 zZP3VFTg+|7q0~J7f8Y+9(_#Wh*xG6z8y~EleWZ4DPt$Sxow}u{`ExscFUw`Q5;!)U zGU@$7VwfO%&j1cL)fDbz!0XFTM(fJ5(mB?AC3B`ASbkh)!OFXRHNDDnXXt_ZkgJN!ahA5VtT+TMAJ%VL27Q-CXmQRddto91pAXHOhjMF%A&)Zv{<0y3%$D7V_ZreO~j)&%0Ut@Z! zvHlDk#CXeu?Rl)U2OFPUIjX=#r<*(P+B4APhE`7aW8Y5bSL(5yk~!w(>>=qxq~#Qc zTtnR!)voMLoX4A>)rR~_5bf%!sY$uukaPTr`|?73w|2pBv3-apb?Cl!T$G>bhj2UQ z5`cW}TtD>vAEtLTQwSE$EOb4sykbFvgi*{gsdn1nRo2#z2F>zC1qvfY-9#Z zDV`x9eb;QmMRvR+Iy*mAB#Z=?$v%&|EtTilNP(r7MKMeErcOt84&BCz|J7Q8yUbyc zEI-!hJ1n;>%?(@I=|8lV>SJb9a6GGY<8u_7Nr&7d&o{frwar{Jeqdy%F6i@b)OB(q zs9b-b_g-!(y>Zrg#h$kIjJW(@*3OA4DHf%~-P5h(j3c6+(0GBykMIACWT@ z2KiR5%SzpS4ACwLsDUyTq#pGqa{9Sf=zrxyVs4rmT3VHcb}=4brfHw=(i*Poujlq zTl=4)=oxN%yUs4pT+`a7$C)4$R~%oqosok2>I%Lvt?wuaAPd)V>ovPa&3IM=5MT1= zKUIY^;}8=`92_)j2_8< zDJJ!1!z_1NCn6`KOYA5tW~~_(9&wqJR3V{RYid(*3w?!qv+WwyRAaGH3k5O#+9PW4 zlXT1nBr8;8GylN$CI!gLVtHj^P9|@x_NyEyjD*N+?_2$7i&LD}=33$g3dp67oy@d6 z{jBYOPqTT@xJj|1^Gk!P81Q{QN}XX~XCm3c_slKW9)qo_kae(y*nCg3>pN=j{y6ud zvse4|LN26vT;63f?}DegyuZIMJA^-HX$Ck^+)vxLZ*De<;b6AD3cNY8xhWBuP#f4f zS(&X$p#!Cw(~7pgWv1=yGoMvY|A7iY~X&BxO=XE<-;RKXGjqB(gxUCeTO#UKlP>V5r849M#Hu^)F)5z zF`KpscU6z6#^4A;yArQeoBl`{o2QN@jU6^soMB^4uBl9v#Q`ddpM4%gTrjqz$d8xw zYE7W#-E2=*Qv1n4h(v$cdQDSS*{p&hpOP|6j5^pVw%;k+ciCw~nu=$3$9BJ+dSaK} zY74zbEKQXnpb@?tj~B^~N2^x)#;Q!7osxF6rZ(wdQ}|bX0v>mp`*z(5BWar9%xMg9 z(cawRNODUO-!Q9LerGRR$%~!iBHy*|is{8&B}?^n=QydmIM_53M~p? zppp#8;>`q7&YYNJ3XfSFrrY!z-29({N=g&o>7iUxCw(A6xe~pN&qH^`nrerd$85fb zXAOD(;O1wbxBc!7V~O=@RcStT)VE84&S`1q=oi!!J>&Rdkw}3R6k-J@h^I2~3Yz}%j z*^Fp=M89X6V=dI1>rru~qTaW2=y;vl7jHl|v$PB{)~&=ArWM$Z8JFxvNZ0Usk=HEB zt$g(+s=V*IrF&>z6C!avC?p#?%e3?Itl|{c2UO zxJlV1tg-Du22Z7qwEg}wcY3tMz-4SbBwEI6;H$KrG#=#ODC#{i9N)hzYBl-w52w-L zC{tQKN=Wn=L2rrYCLmE!xg4jl^XZ#TzGIbEE~g)vSbNxZ0cCUR4H_8z%T z7AC$ibYeCbiqiJ4nM=kUK!b98vyF=_9=(60{=agL_h{>3+Hpts1uaK)`(-0P(}~)` z!iUxI6c8;wQf7*L}8byK+AxXL5izMP4;DQjBJ`joZ2Da zjKA&$L?eVv+3T}#s`#;(<$cWz1MAIvvt#)|$GZKS9mGaE*}J+I``T_R-aB|s+%?4c zzT0?2s!Toh-^F+s)1E_J~+)CU=@@mU&O2A}zC?S3H_L9cj@w9$I-5qA*pF290a(wGhWiG?#v7?DW@}+axDna^BU< z8vDIzE(l@FanUo`tZ3ZV`|h$=JLsQ71#Uu)O7|Q!s!g0kE~-K1%HeG_{&Vkw+hHQd zAnkLCA>k_uk7_D_nrf79I~#fJ>3qgU`$A{&yVH~LorfKUWWk%ZlcI|&ZN73QlK!|R zT!))m+Ys4yBo1_u+!XSm0;)2mi)H+1qv%9BeMl~Z&?r5ugfsL%BJy7Dj4$qJJZE)#KF39(_1B6lyrM#~ zcdKm1mdR}n)xqID%Ymu!!qWIoG5Qy{#DSZC%*O5Fw;6`vn8Jq`N@7w(%}`ArY4jd_ zdY-<{iuv7JV-*7m+s+3`zaKK{Gz^efBqFq6_+FD#DzHsXXc=99n9BtG!% zOnv`&lB6G(*U;Oe8QNoc+T-?(l>R2eni+srUW+BMH#!a;Xg2Rz+^tMB60N)jY$gSV zXTl2e5}R_u)pW`XlXh*a|CiXuxJi5^&ZKCYftazx5hIaBikNy z6_?@KYRmki7;F}U+^(#mXK>D^SZ~G5_M`&m0x*CsiXii(;Nrm4vopdA@fbRv!k1Z_gUE#ZLAi-w}tcdf}!POqUeKYPkS{3 z>$KfhjeJcV#^(s+JZhcYw;8ZkVB4a0Ae(w4hOE~D5ikh zTn>h2`PC+Ogk?cVOd%Ua>V1~xPz@l;Wn9M@OS99a=F#BMYEKVKWj&Fhz_M`IwMBoXq*hCD~q6`H=O9-IMU^p97+=(=se#hmfH8ZE=8eVf6 zgfWK$DWrU-r-Lg!y14toSQivjItfAnQ}K+12A-WJLp%KuIjqP*^SkdHL5YvD^YgC( z(CCRJ%b{#-`EWbsaO!R`BAKUMn-VDBXvf@xN`y2_)lwx{UPlgUJ6Su*=-Wnagz#Pa_p?t%vGrIvU_vGcg1|N`tg%O`IilOu@RuN}zUt ziV7XRs#u^jvURws&&dQHY>W$HE|WaOH``?xZN@0&@ry?sHIEg1MldyY2Bl5;XtbMJ zILAe@#&RY>XTx-aWwRH2u3e#2npJ9Wsi}0yciI?eZ>$Z$%AB3K=&eNKDsX)u2cx1c zk6Rw)!S~1e+=K1MZ@MNmn^&T@-#@+f58qDl#+)+R8CQ`zi*RQhlEW|+v&Lnn;#a1g;~I#jr@jUTn=F5Jg* z7xkHzd;m^JP)erUd(Uk?9t4&9CXubK@=GVTXYnSJF}sr#$jvX!%UY}Ik{Y>zy#%%& z(^4vung^|BPPdVFSnqqW{?s16s2Gk1nBT+^@IIH94e}Ft-w7?7$nB>*8;66}rc;id zxb|QlF$A9$zuip-OK(9lOqWcHvq7XC>*)M9zxtO=8>3mO+pG+Mn)v@^9WwXp(`teh zraEzhP@5gb(;(=I!gxJ`0h}(QUs>E|rqlYX%RbrT=C1reY7jyRN!1o2LSMa4x-*W4 zt+ZE$brD>tw1DMz|8MNQXIN8P+ck=Upqr8{Y!yMkf*^{3ARsj=N|9zqI!F`gCA1Kt zqF_OaH0dHBQls=1BBJynH9)9R0z_IM1QL>*8PxqezVDxNo$H)G=la&qz{*;g^PY3w zWsGshP}MtY$CAf1ZcqA$)L`>pcfL$t3QixxX>dSt2>ZBJlGDq($u|}qau&RtXDxWE z+93Nq;V*1MG!Bx}r}^c|%h9N!O1mOd<*G_xTgrLbdgE~V>qi8+rf&`U()3tL?Ohc%=D~;TtEn4bO&}(T~^m>yre$ zI#MJcs>NC+OV|df@|E?RMBSQ(cnQx|tj`q(O`Der>ouZIt}FPP{J93m>XIMCXec~F zHTJ$OtnroGSczGo5?NBI{D*@7rL;ADd7jF~0TXDCAp4|hPN2-U+4;2k5xUZAN=k#c z7mp&m@rO{F!U^z81V3N-Th-4dyPS!C_Ymmoi0hQ>70<+7k8ZW!3aZ)M-;JM+bsIZT zp5{bJHWl8_&9gExr;b%rwcz$F<5^ni1Ay z9HseHxnPh%K*cGeD6a~)`q00(cMsC@do3JVl1-&yThv~9_cxXTmqvKlS#=N=Svdy$Ch`+AG` zRB0xP4Kj$)Kt!Mt2(=xRsnOC=mj%3`WuMdxaq#-0rSHL^pO^YRr@<4bLPW4VRu{KX zRade2YD>((2}5-sL%M}MunU$3E>IsnVFl3eO^FV}))Hps-IQ70oZV-i4+zvIv$s(k zQrHVSe0-@uA0=Dhh7{$yiDO^GrDB8D63JHKqYWZE8)9@`zObn108ox#WecFlQYJwaQK_0W8K6SIWx86w@ig3ruk#cgJ{3KP}F>J2AQFuSAt= zET^Y_Div_8H!d4q5ISJg%$ig)7UiXn%d|EKAYy-vMXaj0^i8gIPbNncc6kh$)WIei&xo;++6;AH5@P+kC#ZVEh)cQ zrdM8Y-YerN^;3+*)~J-2pA1-Lr373uH4siHCVo2Pb$TZ`dLbZ7J#krSm2BG`D9FxRDXo+`z$r0EdtHQuY}R?VmQ#lS+PCTloDw1 z?cA-2ko={W!-xZbv$Sh0?}<9Oh^+DjZKdMlkihW2y%7Wx?^k5>+pY;P4@;T-pssNK z3^<$P&6OYN$cS4WPOB_Yb-HUm=I?oJ6t?T1{GQJe$Y;qS_e3RARnorN9l#?6l(D-z z38Hgsb@ANt2fZKgsKl1ntWt&N~b`Y%Ly z1$MP0CH`7fCpsE^;ED&7Sw7`&X9B6t1$QvMPj=VZ6c5X{yKt$JzbjoYuhdmeinrDO z!Wm|b9}{!(@otWmmm3aQ0K~X_yP;!F#gZ*5_^)?jaB|!EUNJznU}5QaFR@Mjg@xF9 zgkE$s#+aS-=jESlyX-ZeB_ZQ&zRgmIAVFOvBelvfq7i}3Kg9&Zc$jzpy#D_zWu{j# zK9TIDE^#Bu zy=OeqaQndTQTmYdcU^rpRIP8W&x(`(uPzsFQ`3~+e>{w>fCzifikiNcIb;%A;QIUX zNJldc$dGB>e$g;CHId(+@()#T3wsBJL~)^FBKp53|0^W-kAiD(=!BlAxnx048A=ZI zyGCXDuXKbvC9HM(7ddkC$e#6SvgFyi9*$gd+9YW;PN#9KH^(XKR!W+byQDejk@gQN zzK*n&l~}3Og#QF@f;TXT_Bg1@rIhY%qs@*?iLoVOiJgp%2-%GYOpK*?JHe8X5t!X| zUES)Y{#kUb;dk*@ywCyhsB`~foyXe*aL)Z(U@fb#D zrf9^E=*tINMWQfa3DY99l$zHcRipb|aQLgY|4RU3qia=(P`es2#P?lfTE)0ExWpBw&Y3$K8C2YCfFOFg z?S2;$MoiE=+uPl-L-&q*9FPgh{v0w4vgA6VqFa|{6zr=1>wYX-vYE14ls9n$+MGTkdtt#($FLub5&$AM(wHO5)l6H598D z0bnS-Iq{|%!EbI!V@^RC+V$+6T4$6th}Z(VpW~JHD^q=Qmf)E)sCIY{_py24T-4PY z%erO9J=3*yi0!f5a&`|A%v6<;@>o7hLuIxl?{!UGq0( z7pg!wKzC%?d&I^V_fMaUm(32nlqgrZ24MGl@BF64e7biAtX8j%&BtxY{x9Uu^Y1Cp zC6p`>ZW^x#=)G$E+^dB%9z<$T(s(2A-vtrrxumM!^51 zrt(8wtL9cc>$x%b5{`S-4+_vM4@ST)(aYR%DCuW-P2ycdvTrhV)%c?y^R!QKb^Q}R zKzA8c-OFNdfcD7a>pr8_O+2G)tKSXLvc=fB*_$2obB7P}!lz^}uq@U;ZRvMq0Xr3s z@Xa%P`Vc!?zALQ!kiuF@z-F6+RP-37V5}@?$4`TwOjCdQet~t!z{&~A66rY10l5M{ z6=pkdd}JnU&6`E$C~w>PgMSt_-_)^ssk1!M2eLc9V88VaF{q-<3uYD(y$n$7S^w_Kug|UNJ`SI_ zM}774jl`so9UP|coh;bLoWd+`_@XVwr#AKq^8#L@-9a|*9h<0 zW$w+EeZ~a)f;hY~xhdqUaqAANN1MaJLfww)5V;@oG3{jJ1*wu4C`?@(5~xdNa2ww< zhjP8Bok(u4Ub#D7*3jBxq6(e-schr zYfZ#0;F4u;6vmvAK7Cnw*TRa=y8z}f`iB4C@2Cn;_14wG3l9c%#?Haw#;sf0s4U|sa@(RebL_%(rBdJ_Yy{I>gjCtIz1N2QENM&L{u`aYQfpxO3}l} zn2%+v!=4QXl1&DCyRBQ?qO|674T3iQW;t<2(QhLp@OZ`6XXU|6HFn{xi}weg7!j5) zkK#u%^6adw_>VE_p-@TrmpYIO}_h{YvxCq&2JPjU=@> zEVt8_{pq#HF=;_nI?o_woB>wQ< z8N9W;d@1FoJcxPmt3^N9-OO(I~Enm=}H}-#Ob;D@6Px>5In6U{4DT zRggCJ4^kejXU;|zjLpXtw2wt{8&+$?dIut+uqS-70;l(opIN(%-@ns$QpKCb=gX(g z!hb6tI75qZ+%s}cT9Ymv1WxEukiCyO_GE-=g^7Y+S`>GhuSRgM!xZPD!}oVk<48*K zf?r+W?Ri;;BsJo&oX||M&kJY2np>&3tO@txz(GzI4Ds=zO=+M(#00V0HaMu?6?a!X zeknf0o_1&{u95x;hfk}Z4pU||iq;-Bj&x4P?UOYxZ>NVr4lzO+X*P3l@@+BXqrkw{Wgxp3itiF?J=0! zKKED$AFV~#9FV#f7j7lQBJ z8cLw73g+FCDE$t)y45-8J&7)F4+ojuQ(o5XabcbPVdE2!q!eWl9g<1K_1X`!QaM#7 z0;AH7&#o7H0YLVlvs}THK=SvW(`x@KYjE|+e}(qhwHos zdLb!+O5dEAK5qTLn^uODpA3p7Zc)1T$7dkRV2qs8zh3&@AWr>*LSJy5n@p?AYp*nR z9v%%|XPN3je_pb`@eP~#)e%}nkNdu1ah7DqMLbS;ZlFs-C5&g`$s3di8GdhMdOhe~ z&H z`GKD7Pk=c8%er9o^mzp-n6`$#eej*l?{4P|$@f{~d5Tw_^5D|BO^$I14--=i%$4)C zuDZ8*u$=1|?##2g8W4YTNbW1`%BlUD@>%s{cao0;$Yz{N3G09Q6s46M{t~CEwA*LWz#` z-#-msV7}#k@RlF;h^TJO7&At!O*yQ-3#8Nqr)x+l85UH%yy=<|VJ685G^gRr&EO5iFT`1731lWBa@#+YNacfo?`t) zaC#|BJPLJ^PCNDj^TDKcT}!AaXPw&ZxwIpcp5N-768F)C)9O2@5(X!dB6|+U@(90c zE!yvL-sIG+p{Me8Re_b)1k*KzRRiZgUnUN4!ZRn6@}^rAW13@h-H7vTe;s2P=Mq%* z+Ym}vJpYC&I4=K?Cfr#aiyZczSRVLqa|aqM)`eA|kpN7iu7xQ7Fp2NScjAs614 zP_6}Cb@TchD$%nT*XYd{Gn-i3SS|+({rn2^=w`-sBhy})wG`t)6SBr;KO*W;i5`Mf zp6ucb+riJ!ge_ZDyu3Q6nZpTXL^_i~=C7J!)+?NS1QLMeDF1;`KaBH0RN;+AcFW?~ zp)m1~9pidHGWT0^oSP-7M)6Bz-sD{96z8ZaM>bFdLN-=ohcxlThDdu@R!Sn+_u&F^> zqCGn+jgS+!@}nCDNotKTn$n>TfJ?C*c{Y@H?N#N%9={&c?p-M>@2%yiS9!O}3=VUz z@n+%Akdll()sCNXOu6I#6v0D~>(3z!h3Vvm;(NDLn0w$};GH~qNgba!WqBu>@5F|M z+shW@aRy2QSr&V3JA|FY4siVpEk4biXWC3*Fvmi{2T~r=hMTV>RAURzI3hyV_?!;K z1*n}U2nPP&7hqf4*S0SsSY8+0(Yd^)=4Kqs23xMiL`EWvfSED7UX|f9xfQoLhA5Tl%X+l&vqUp(j0y7>+_F z@%=e4+`91{z-tjdp3vs`Tf^}PlWq?}uQHg12@o|dZ_YJ!A!yo<&bxG%I#$Aj+;e{g za6di72D{}C8=QG5Z_Y3H&u72df-;Oo2l>1#Fx^|^4%_YtjJqYrToFUot~9K+A539E zy>tfixv&4X_G9>~QHc0HRc~#itQ1watUxf~7Q#b^#~TPAW()kcL+K2HpqDR{Z(Mj7 zutaT4RytIrSR)N(P$Q-`pp0y9RnHR{Vcm9B2CX2Kp;^uy*Hz_0m2BfxJ=ZeQK-yLd zKN)wWb@b_)$TQuorU#GVNwz_`fzhDgqzfOpGB+f$jlm?7TdM9OX~so`Sm$D|%KQPL zxY>_Dn5e$Hcy!^`87D7o3dB2;oOM#+%IwukRRV3DfapDa#c@e6@{Sqdnf3|GrLb+J zsm@WyjuoKzon*6){Yj)7{V2?()B4J)>AIA>;o22HAX(|BMO%G`E64s36` z<_dj5&|{LbfhCr_3<`2;ENC;$01|@P>N$3h5Dd1uuA-`!u7YMLFcqeEYRBu|bL&VR zjE?O2P-}J(5HTkK4YN{z$Y)*WtaTZ@XUqV;PgGR+g$`r%m#E}tiNDQCwiPACfG%cn zEi-%Ys+U68N$pNU6IZJ;Ua1Pl!6=k|O-CHME3Z{8?n~4MZ`Py1kw zTR^^5Q738r?*Zsx+8M-KjN$F97O^a3yk);-+~_C}ff69V5-@Jrp~04EJ>#i;?rGmi zwc1}qivaIRFqc(>f6z9qQFD)o;T3Pp`bllCfHPd{%4)pBpaMO>PRYzJ)!E7bM6097{bv9B(35J)F;|~R zWWS_|f9F*@dw2pp(!Fkf_{50|%Q86^TdUP}CjWi(9OhEl^Q`WgYQv+-9QAv+kFaN| zeG@!(!)JM1I8P2AtQ5~hRmp6?xdzlLh+B2}i@tgn_@lkyEnGFUxj%0dsR$i7loaNA>4_3T~aB> zrGSC=%=s!bvR~R&o_(2NKpXT#ORk#f_}9Bg}+U0h}g3obHvWZ-gN87 zD#q*1_zGUuk?WI>As}aoi!tQUu0Ei7uC6}H!VP`-iJOI|7zjCc*Q?j7gH(wf$M*~!!SDpYp$-9T8lp|F7rWYW5n{D*SW2) ziaaW74(9f!@_zaQA2Y7pkCSF*3>_X#!}?_>M?Cj@1xf4=`f-kbU%z&|)n|T~ckwZ9 z-DTSeVlIhaC%DVf6a+_iMYp8x=mMWs;j27}vTIuT9@yUJe!0zn@ z{2+dN757Zra8vg`K|GoX$VUZ#3 zZSvQrNB`#&z)<-Ab~MDymfaUC@y#H2-U*l zd2kWaM_5ic0~tvpOL%ag3u)4G?71dB*!qN#na9gy@Zb5&FlWu{QyCL?3#{FT6@;aG zRrL})a{|5U6{qWsFw#g$i^V2Uq|f#-*XBLLr-sOYT)=OhObTKjh+}1GU|*=qRWnAR z*EK8W-VJt;t9~p6oR1tm7J!;n^U8%^$X@E4yKbzczGzr}pwa>}w|10+MQ=A{Da}QR zg;^Vm>+5T6>5FK!iBOlo1d%w1$UbL%W&su<(zR zd|Yigg^EpbOFh=}u3ZLdDLy@&7F$&GSLGJb^RP5pYt5ny=#>khL)R=$+ObyklLk92 zPAQIL!Ym!=9VenWrEBg4QOl)c3=#9b4QJIj)DEbLt0}5!sTu9u&GO0ba`ld@10WTC zdt~$0W?$-3Yue&sRA?g}IEF@l0jfIm4>*AMOE4 zT#NoUgps+of>!3qDOhAb{dwSuE+_LLa|i5a0>K(YjN!m|Yl<(Vn;0wU7FK+}?{rL7 z=6ZnP92zEzegne+8k)q7cV>58GDSMql1amJm%(tk{d!ce0S`@^c5)M|HWrUWfMMP$ zGuRwVRZJP5xV5k%<5s)6K(Qdf2sNo_( zVQ9Iq{_jD78S$NJzG(KfK$Zv392bZEi;xUUu-%#_e0<6@BT!46J@=L}gDR}m2pSqZ z#h}karm_~0CutVT%fpK05U23aH$94)j+JeuH8*FBhWa<{o3@6S4V%h3Zr#S{2c%j` z*u}wEh&|+LU@KPMT}$>uasztf; zVZo80quoyF*e^Z>EG&~oGC;Q0#jfG42hNoxrm2EU7?fSu@$HzBMMGQMqOjvwv5}9T za#(g5>`HwiH<9$RvnixNW$Iah^f_Ud(W9+`{Y?vfzEcZ()0r`2Ko)Re3+7+wdcLWz z;yIsKJLb~bSYyumDvaEG5?950Pel7l4XUfE@E-$Ui4FS^o|Ks+d~)TpOhLLbv@$gvdJ*0KUOoC@(_&g6&@b^sgU=rt8iU5r+- z$MwHtcghqe+RQnY6cqQs^_ME|rVe3c6-UnE|IrgwGg3gqZmLsYm>)%jBIq@fS^0D< zD~q0_a?LAt988`kyiF39QXqS|SH^e(RmSMy&E{y&L{pCn&2W+IV*S-4)l5*&K}XBFA(F01w2{nrx( zMi$E-0s!AJ;>s4_$Z==@?OfVf_x6qtIprGn+d7#{#sEo)=1F_Y0M*hUHI|~is!WxD z2R%CQw+2ZH^+V?aO#*4$N?Q|WHkU(%ItU@_>HwiACEr2eQF=q*!5nc4#@RYSPlzOX zhHB`qlWa_)1Ig|;YpF0?RhN}*V>a3B4EjxJTda|VgUbWymar$Dh9eg%k5uH(T%Kdh z+HB5dL!&KQIx|mIxQ2am@}vYrv9h?&GwVgn{6s6K(^O?VX5sN}@>cV)@E`8!{4VBW zaVWL2YwF}m@8vg>uZBR*fSqlR&X-Bv1ww2gUeGSpxR%+qEZSpk40pt(K&WWR{o<>} zE%pnFeosYS=xklpt^(B7PWo%nXg1g- zwNZ8yN8A~R83DXTQUv6D*|pVOZ-IaKbuQ)NAa7kzFvEqWe;aI8yFijkR^smGOHYf1nm0-Qr|4ty9*M~Ifr(r77zBm^DgH`YyQin%f<)dBGmtCrHmJqDNdybL7KSJ z=Ht?)xrI~X&>Yov?O4p|CHk5}H>2NYbMl~;kiVxDEZG+PUDf-pkt7)u-q z5{N>Zmbm(EieQ{Apj6nL(!GB4N{MMu11qsj&{d6QT^vz47w|j_dkOV&A#n$bOx(6{ zRgYT}r;6R|E zUWe5Wr>6kMt3%o)KAmTnImMc40-x6B?$YMK)HNRMtxL+ulBP_)?9pSd!-P14#DJxI z-T!Qs0NRR};zSn&g5Eu9_av{1Zny5Zfz+l<3b?a9+TjLQt4+OqgOU2wOO>XQcjX53 z!JegeN%p$_n+^pp>o-Rvq#s8pzod!Oh`CJVe;YL9hht*0G7=J)!Tlg2gQYUU;h8D4;;@TvbZ zjJ%Bwr%uJDy(UG)XDK_F7bulyc%IwJRUH|Sl&-N`r`Gz84n^1Zj)(;5D_d^8J3AGF zfSRwLnn`pV1U4H&v7b9Tt-g`xmN1FF(eDb^=XpJ;3iO)2U4sUSI?t;;iJ9_is~Q}8 zRZfEu%3FPr|HMMVd7ysZ;08--IJJW}jg?HLYL@RhO~S8%Mjltv$LsQ`sEFx!C_*E3 zaN}*ws%}&SlyX3c(qCLOH&{M3+jx|=AeUUE=qy9s*sz=$iBF4@@2ounrGg!3FzvKQ ze*pw3Dv|_)!U?$U)gSftgS>Ok@4Io{Si+{o6=G-yqAaIXYPZ%BXG=et%dvdg$gDp# zC!_mw6EAA4k5IOVQiG3v2-$tCtDhuf?1wi>fy6{{mrk*>m2C>N%t*b06^wM3DoWB= zEh5+*I@rNpgswg7l~k}-Cv{o;g%9DSGk3n?{SxuZl2cqLki`jWo88z6bQw@1{QQp= zBooTT=hwnKL!Ej$S-1td3Gwo0o42meDnTv@7u>w&@cC~NG{5fh&8m}Iq+3OINuCKm zNzEWh1YDMXa^+RI*&(-zoO?8lqlV~`imGZ;2=)|b8jF}J;;`+;s*0+lA!ehWi(5m%7w ziqjm|3RWAtH)GS+({7=b%1m-Tl1_eGRiNsSzr2s-Rw-*WDHh32hx!ehlriKTcgOUd zpImSl##);E^Sbe_xjQaDJuKwCIEq)OQ^vH#Peo=y&sD4!{I0tsX_l{ihB52Yl3?Q|D=2M zVN=)WlTn|Bp1sAWr_L=W4%{KPmQNjFkh0^NMj6MuG4)9ELuj#M@YkFyGPie$$M-MS z-L{tmvFKkQE2D95vOKf=ab2)UPfg$E`GbQ~-DN;8;Wj!)gNvAs2bgUL-=*S z-E5GqkveNFF8rRE9ge;hcm?I$qLHqf^+)SiWn?q(oR@&xH!SKhWMhrl%zt`(=aM6; z@#0ajD6~-b@YT~jH*hX7%PYfH-Fx=$Oxv@IKn@VS7x5GUFKe#)dTZ|QNzn4;`jPjj zmOTox!4Cm3${MsiT#Q9{@dDRQCIx1+)+~bim@=D7=RazISn3Fh8KXmUs zy(Q1}>ebxwBTgN=_YFHt(BQ4lv#9Om<~|yjxISTi<*2fy*)vMt3JG}aNw6m`X!n{I zV$YVk++M!=A$8{!Z`GkBQMJt27UkNbDf8nKA-gPNp@qov^or9*MG*f);YoN3szroy z;Sp%GcI1OWaOgeX`v~Q$1BaIa*P?2GG?GD9dHWvWl2k_b-=gD~OF`=nbqk?Py*)m| zUk)Jvac(=&l9Y_<%z9qovdcfJ zEhNXx3cC&P8ZJKrwMQ#k&94WZ{jN5Jd=WgT%!jNRr+4tR0`EGAu*GRRqL>=(pu)JA3cSqz!`7nv z$NVFb;fwzLi2e4Q!ZpI&XkD~;U|!br`QZd-aFB@_ez6^+9PCiA)2&L;tQi1=GUefb z()5o`&jhXXESJLPga_wq%h(}W0a;=)YkMA;Wqjv+tXN`VI!;UMu}Occ>UARP*o@o+ z`t%GwwrJ_jqPvTM%$%F$C7Rv4AY@99#zcjY13)$aGIr8v|B~GV2P^%9TkCZKn#S=c z6pSy6=Gn>;g-cF0v4M8FG z6Kc}6C!rfU?%_{V>*~8iT77-b>gq4^KY9dOz4tHDu<$EV@J-<%WY%*PtX>SOwvu7u$+{Q3Jb>r7R4(Y_YCVD^0Np zG)L{dKQ3I)>`|y(d&^x!$l}g9@=7-LD5wh$dV{wZ$gl1(R;)M9o_MqG=%|(e~ zsb#lBTip6PE%D=p(PwnM0NIcS0s_&xAOisaTH7n-eO_U$g)7QJ5?L|px|sol2}&y4 z1;n@-6;lW8N3h?Vnb1Lu{810YXY!@b2R$X$?SbqT4L!{&%;}) z%|~kv406+y?AljqOGmhGlyq6bpPXyV7a7himzlRs!I4J2_i4{Xn`pei$AmHbhIq;bC8(mmUm zu^K6V^+ZFtMZm3@E&721s`*v8OR9;3jWJfZ``Tu8`sM=SAh%PZvV1=Y@7VLvPaNNPDC|wC#rG zmFrxB+xwHmd0^U|=MVG0zZdiEjjvf1dXP~YmL6wN6vVwb~!q42RSAU*RnRjmsz(zBB{Ey=QfA@0iil zW?PiXVsU&s&a|%Oc7#Eu`QbMo1twO8Z=WKMA*vB0e>1ZPKb^izJJ%}5j;AyJ=SGtK zkKdm$!A>V0LEed1!(Z}X#?}%u%wz~e$U-O7`0o4TR(B7AdOKw;J6l;~XYI>Q^gZr< zAw{%Ka*H{6*lmR4!HVNe#KZUbafw6YU%HH$%|+8$yxuRoFiC?BBwL_w-p@r_9N51t zU+eyT)>+&b`rz3D$icxvTt3S4Mu>4IS;lo7C`Q~6khEJRhCU;v4M1}zzFzaECfpU= z(mIoJqM9LJIacQSPZ!7xDB;leR+%Zeefx}z1t-LhqW1^rJ7fQ1kqP?K-vG(+Zr7Ti z<>hZf_R`wB_l;05XoP09-o8U%mkCIZH3@%%U#TPlEI5A3v>I+p;yc|DS|1#texwug3b%TpHUBKZRR(D`FuzlVzWuoU z3)8vW^?X@)Rs#`q_S&d&!k#FYsf%CQQ#BE0+Ara%0E^7%H&=km01f`eZed4GV9gZG znQ0||zY(!)Z2c-K&?wb^rPEG-^|tYKBWbLerQ^u{PhaXl6@eeVWl^Zi!_XE>W`DFM z+zr=R%DJwZvow~b+*x<$wL8UjPHDa}ZXQ^e%+U5jCP4A<1iR&77AxoOl}Q+J>_3@V zJXf^F-w)Gg)!veykIDrs&4RS(bqT!aw<>SX=_J?gDpWKGfs?JDHfa2vj(>xH3#16& z;osw106zG^%5#uL+}8RW=VW6Ea~T@Q(Wo|qFTC{Z(JxNPE+iETHEAvFMH{W9cERyC z6<+@*)4ZM2&Ax$P=32Se#-KP)7Af$SADCanyqZG)`>X%YC;yK}gNb2F_8r~i!6~!w z9RN1Kyw0na2m%!MFMdtD`E99EvSjoUMko$L9fT^4Zna|K60=|(%BO8`zpRdVTO@>; zvE>6T$MMOAO}C8nqxW&=O|{ozUTt6yqNpGSYB~#ByYwv$HYhlTG@Un6KWb#*pHlp? zK#FBjAZr6`s;ZIxpWrpX~%huUg3Oxi>XScY> z{c4AUKO#H!@X|>cmUut|GWXf68#t4LCPyTII>Y%ti#nj<$t>d3Y~jZ`_U!FtP_T>? z-}lGO9dPf8snW?zrh>_KX8r#Cp8XxC%VSi8JR`E+1!&qQJU?lCr&@dF7F!&*>2~UL z3aGCwjT|k`w|mg_;*TO8fG#naPcizZP_F2J=le$T%{k` z-#MsBNK_#!WHV_lfRnSrlqnC_vKyPZue-T(Uf~LM^`&fQS>Xx)@Z-W!WydY}iSB7% zqCE!5?(1d#1L2Ndmw4ZW(*fL}XXv%DFhA>`KNS?1nItVItUUfP$Av~Z35jQeGzO6{ z)7cBCrZh%7v^jJI**a)X%7{?e22SAyM3y+ueloh*oICnn^cB!T6VhA?{WlB#b|Bm? z$D3PJl-ZdCp*B2T=c6Z%NdA=Y^;a47rztX1(ld#3#D#BUuSdapj6^=wKz^tcBj`J$ z2^p9igQSxAlyF;LeMjBr%NB}dbZsyhX17USd${Sj|0^#FC*1c5X(v7gp~Ufkd*=iC zA!z+V)yD2O)*65}GlTyzZvDEkq|o=U4ak^WcY^{2aM#mIeZ_zdLY}`?W$x#YF^GNe z<@>cg@*AzU6Bmx!8=Kt?z1V#n5!Yrpc5326dFr?Rtn2&VbDY@qr%?hh#c*YLE94k5 z{x2^2L|V|q?vUMG15hxCB3s`rMLoVx-duLyNK^OTz-`HRHXU?rF6;bE+y&8e(Y_lL z%*dSPeLnX=>rc4Th$|EA20JVU66Q3fio9SzlG&3-p%p)zWS|agkb3AlyQC_)xYB|` zS-ZSU+SShmjgkarvt6%4&oqQ`jwB9ov=Jsfo{20*r-IJYeFdXMetK^?;Oz=%UM2hR z>DT^h{0eqeSq{jygWWl^qX*AN{V%KV?HfJ?r8TBy?+nJ!sPF5M1wrep!{bNp$I*_` z-8_F7i70{A9aFUT^D6S34%HNQsjhrfNL+An@; zi>Rc*zwe_y=rq(838PN_glH)r^W$$mV;rXhYY?4!d^{D1oQm=Bt_Qf@Yl>9uPUAnHHzTtea{T=uD^w!9bS&#As+GPdNN;L7R0P15tnltWiN&-p-EUG6e%0| zv2DNVY>`z$uzeC5x{=pQIMztNB^KsV8(f{u3kI5xyboz zl827)Vn^bObh9d+Y!qi5t0v$7GST(*j+*1A@b7ZzD&O&BW6UTRC?D^xU0Hu|5$NC2F>=ZP0YH?_qSB-+1^7{^*vZODJ zO{XDAQ7}(O^LK=jusGUalx0)*kR6dird%f#$T9btJN7on!Qso-dp_u|bBiS(39s7n zhiq-5jv_#Wct`w3hwBuxRZxqybF}sA?IE$;FL`4kglnZ%4CVWk%NTn=5${&EdJ%M> z3KI)_$UF-H0&m;50uUhy#w73D=xXBDv>og)_`d2-B9x{W54`4y zyGdO2T*8{(diDuf2QC>S%GWqh^(#B(atzZz2i5;QYun7YDNf6ZTlSr)rwEFw#~FcJ z8Q#mwA|kC;l8S!c?5aAXvvK*RnC2V}LN_>+PFA+e5Bnbok$vS7lJS_t?5-%^CkLcj zyZ%IvG`Rev+%mBZ>TJr&jJCOzWj999MRiI9BL+#E#rgvzK7fF=&MT<+qJQ+J!yHq} z5Igvhf9?|Cn4IrPOPvqgy+$K3^xC_YS1pC((~=k%Muf-=8w5(rwmb*s){;&|9`u?2 zR^?0u(E!uWW?oYozpwzkS@HBjxsN3xq8oInfV1L$8`ZO&UM;kGDD9$Dwd6Sn>w{+x z{#@yZocwQZlnJ|d?{B&Fssz(} zd3tUY{b*i5=R7)({9&|}4S@15N3Ve|GLlcRYE@p?RN?nTyMpT@5GE*`(2a_^{6s zE8xKe|F?r2CPX<~q0lxjBVQI`YL(vuH(l}wPzSmbZ!5j<&2$=aVY{Gz^uK{XG77UB zWaWRj)r4e5=hOu70@epC zDS<|fb>;HC;XcLtN5siei8khnp8qMpTZ1<-U-HMncYroFC;3%p0g~(|2m+v=t?CXu zm2<{E5S>I6wE&*KDrJh?uGo1ngHOk|=^-SdIaH+Pm}vpw1wfwNR4fkngW$YofrY19K!6a-rBI{pPY=`M9z0`qb(Cbj zIGBYKj||FGm^m!gg<0QhRoc7?(Z?vdt*B7?dbQx$Lp<`8C=5}5&YvLU4;tOTuO}Nz zZ!N!8>ICIDpyT-t4|_9GDlRpPY_1wWVj?H^vIa&7Qe| zO&><=Q;ypf69#=%Oo5oNUgy_Cx`@h{KDTH@Xq!4sN^Gp@h3Ti{oEXobkP_&Vvqj?4_HjSm&(G$!_rPQPBgOVrXozXPM zM6l;!P;_?AHI%21VKO=;RZgHv@}oMIgknT~ypL~nz_#|FDq4}t-vw5St4BCzy!6Tw zO5f{P>_$)KBURRlR1p~5Y+A097utEDkF=!SDMznOoZdIWAS+J=tp)_&a1e7CGZ@5W zS$k3Z;3n9ncyE71=+?Nn76AOzlaZ1f7LK4NQ=q&P?r29}K;UL?E9Xqrq1~91hR`DO zT58hl^Q!pcjGxV3wOvNgzTxMr>oEmI3yWs>ej^hvYev@+Y)kM%)R5t z>K!hxWO^xmQNb(Xb4$v*;hJbIR;Q?HMD@LW|G@!elrhR4WYLoF;G4$k~V#LO?eD|ZFgHj2R zexTEj8mK>tA6?1JX{-rXrq0JMM_sayczh(>?pB(=P=xN9ds=bOm~}C2<@J}lth}h(k~IqK}#t?B;ky84w0zJSLhexJ}xsB7WZ^Pz1b9qJKDq>oRFBW9|KGo76-@iqgn zEYP{@_c@1;x+ebM%=Ew2=;F=xsM7TJxI4(T_kqg~74B85v6oyJwj|eaxCYZFuD)=l zokDq7wq}<-Yw^Xn&Qq_19{Ps0-<%+$1EA+lNCzIzE6i97QU^us8d{jbCAh|Ep&*U^ zL|PR$A%O?!@Y1Wz2{YLTKX)=TgI*<8S+gSVk5vi64~!0__OagO$0vFP4r1G}qGM`5 z;=JZp%c!3Y;W%=X+XzeYNktE7Nr(;V5^{RNx_{DA#EWh(h)IP&w?5W*mMgs*d{;cGtf}zBPm2-}K(3$mZQ-;oMy0Jaa}o%4^)KEf^JGu6oq&KcQ6*iVET8etyOv~w zHwyuQu^Nh8yP&p%<fiek-mqhXpb2d1+1fje=!Y2hZSL|Qe7(76h+mbOL}blA!)WY2xSLabtE=_;WDK(JG;&SI zze8e4@im;g{S-zbMY>kfRK!D&pcvT6Ey43+_t_sy)OS7KEPF^c<%^i^%>%N*p}i|3 z3%d@|SImR5^GDW;ych5JMBo*cHpY+|0wmb_Roy!5<TYU z{G1|+RC66`jI`9?hQdq>dWY?e7kjM>6IQ8L+h>$(rPT~ROyv4c)7D_s3mu=PMgvku z(M36i>P;JnYOGB~UGV5?z!Gkh4;;#xbKndYnZ>P+ zA4F!g(FRYBLI|B2LNCe@4awYf3+4x@pi3ao7r#E;cO4(_#lrG_kZ`j)4RAB6xMr^& z*DrEkyo!dhgxLf6j3R-OSjJhj0uPmYD5t?ncQc#pSTQayrlNsA6jM)+UN<;aUX7Tj zV)oZ=rnPAjrR8fS-58<*e%;Uy=IP_nNeu-w&S!+aOJUa|yi`;lCj4 z`(_1cPPs!{ZJKa)s_dwL2V;X@semb?PX$t{SHfX&a^AfWBk<32N|Pg}z$YymYc)*z z|D^eURe!g0pWDY>+?CZu^=GrmY~p7h^GEtPoH(L*=&6WsBwfRJ-B-Ikb35YX)siGY zNG;0Qi_HAa&HFotNB;rr(Gcahz3dR<^oQ&BW|f`Sd$9MIV`APTSf3gnqMjIbdlCErMQ)5{RFbW@HFpNHO3NdtSK9ZY6{=cYu z&!{H1wp|oOL6#I3C?F*WQba(IYN#q8gsOma0!kB*A`nVwA_}5N3%!bfh%{-T6A_di zI-!OZYJku}CpizU_3rO|*SCM{amIJf7&|}n<0ipo<4kZ9ITEsNyeu|9iHx-W0((qu<5uAj*Y|lnRWHJe7nR!FKG6w zSG8FjC@QdIef7~sG+Rge;;#GBRK8M_O=gFA&-Y%<-v*V_+9l!?zuv`Y-Y(iK!^+4s?DGDy%-(0HDTh@h|1X&;T_zxWnrrksn@>e;5H~L<8 zi2F^=x9!(o8$tcG>Frxk>_C+$N6al#jr=)Tn`~@lo zZ`t6cx{`e++tjHkZ~Bg47MKpb2+*|3PT^CtqHj|Oy+W?XJ)Z^>yGa85oYcg0aC`0< z3RcelwFnLTgWW>!zw2xyN=#q?<}&g~_c)Vpdru`Dbjd=qe&$sd+&Ll&wOlOf$Fr)H zlQ!1)6euhHPz5L}p3sPPs%~oTZ51z@<`xVG8nC{2_3CPw4o0BgLz*@yvphH8nz4P& z;Hor?mpVt`(gO-sxz9Qc=BLMa=QI(U;gjW;-(T#1^n`mz0;YKA!QSK(4|+8yf5Nhm9QjkKjkMA=Icp72oTx>x|a+T;z*j_(7e1a8ZwLSymo_PW{+fVXypP zL#)d~#CPK(9YHkj{#mu1*8VxkdWI4ZAdMgdwyr(-M31VROqF{YDcR`rke)iU_VD|0 z#<1)MaudF=Wcx+)xwROpbaMdZ5nJCaE>LpV1*8Vx^rbNx?ylP_Hp8-mdgOb)7{aC% z(t|%&x^=lwVtglQYR&DG+azME8U%ov)ysdm=&g?T$h)b-^A!BWfb8uPAdLaR7I@Y9 zC!Pc|hH#t2JMxw!Bq+Kz&^O1GH<}x+f}*R7w}%CyRCBt4hPBZ%)C|g;oo`=J>o?-N zll&&oV)~D0is;epViN&uL*u-Rm@OLbrLRmc!Yd5(FqEwNXz(J!eY@KuRH-M#e)!W7 z4y)>N-9NBOvw0zM2ZzRP+{vGAGV^K;koAuyA6~95LcX=`E~)m=-u7d)>2UPt3Y}R; zeH^XoOe}2XSSHLges4kNL9yH7sFEW>oqm7w>K2`s>|QE*Ib>`2X!@%yi4AS9JOC zb?>Pk!9z(&bA{xF+>tjFB)HVvyRSW9GihPeMnoNr4?CaH;>cbBgb^$ua^zP(AL0c7 z&!F9jL$bPWoAuS;4##{G^Y|c8=%1?|aAN-t*jZMI;`%Mqn%%MG;Rj>=)%ws2_=$_H z{km279$+U+-G1WlII>E&7dMkScbL|CwgE%TsA>S}a=mV!GiD05os zi~AHX-_Cjn>lEp)T(CQ^YNgvESPZC;W@~A=8sxR179QzcjiD50lK__g425-wqJg-_ zg2;j&7N8e_+gq@iSt7VzGjs$4?L8`_(}(j`&gAY@WAQs)z&PBZK1}tmbg}{C zOaL4l46#4zIE5o&CA}oQ- z{bi{tYTKcCHGl1ZA}tWEi*M+D-48t5tdJj2c&n48M3?$v!@?q} z<}t4Bjo;nl@l2?w(aAdBZ;Img07Ol7K5lK|T7Et7{aMy&mJQb(7&uKYPwBI7r?f>H zJ!wOu(KblZjI`ZT59Wk-k)=G}V)?d6JTa)WIjTQvb;w*7u2*1`o6|ErRw~v2=%V?Q zWlZ%v1}aeO0owDn3I~a<9t^m7@REvE#ekmy;cJVgfn;$*29NGkpJ zI#~jtih=X8F|JeBw&omTn)s3>U+>o`T^=Zig6o?QG*fg}K2$H=!w zW8N-tSKb3X*$wTgE6`(($w13lE0y2xm?}NI(kuBlKCzlct@pC;0pU@D5iw)PFY){7 z%W9KPkRsI6K*g$%Ku8aXr)1q^!xvI7uT4T#E!^R54m#P8j-Oi;a$mX zbUeEOXc`cx&OSlEwf*>v7$7^@WP(64)YWHw?#Dv5J>+?uhjzDA{i=0m+s`;u6&;bw z_sBDk9{YYG`;lStQ}P`S)M3M+LSK@n84NBn$={>8`t}d46wr6Wvr8&j?G*j@RbnW% zl-Vf9ZnN^<`+>1x^@&AF_3AK1b+aCVc*nYVGZu_#qde=wmfJbkb2Az_Tn@3@HTd!% zgSD@wl|nS!_qWyqp$z#AyHG#EFz}x57{`B zu(Gh*Eh`0{P>$;RCz|iCr5Vr8%!Y%vTsG6UxYUBCZ%N=Y#^ns(k?dmi!+_Fm_SL@q z8?Z!`C>IS*Q->XThlXChqATE1+fk1y$Ux~^&kTHATPeR_CcCIdrwpX|&yr*H2H_X6 zY~kTy-(=r;RjQlK@tc%Ysq)xo_aA!s%Te6=Z%?QU#Wt7PD3>XhL{^~*8{{J{U2tq7 zzE;3n{#uldSL^v?x`7vv|;UDHbx)ol-&QXI$%qV5F#OXYV}B3nKk zcLFS>p)0!&)xmm`SU!tP!iAlPwwMoU+5NCG=;suSQ)CiKSE?fdw`F_+);sWZC;Z_$ z8Y8|~nd2mz(*m&oEcpAK-?CH8P)$D6T-kgjxHT_oFW3HN z3ez1+egxJhE%w-Tj_xhCq1H)e;RYw)A+pC&3t(}l2eZfqHpk@@HV6DWjm`pO8AtMA zi+r+_>Y`OIQCM00=YpH@&9ELXh)vMF6!(B4m?vbh-OE8P2B-evaxzR^o+5d9aq?gG zxau>PT<-FNKwSaGq}}lISqY*?a$pbtnzX~A?XBh1_wxw4AW_R>oJn~7cGVX7c!N`rT^-U&}6ArzL8$YKpTS-d>;3fpKo{t@7q5ZGnV~ih4j`l-8q48fw00P zdwBpkxMe)ct?kb9vV}7Gs?6V3OU_xUyVYlseUc8E&-c6Sf8lU&CM1v#M^SBl+T_Q9 zezVpG8r{rxno64FL1iqDb!SD#C6ep@`T>3X?bndVCFS|LBptmL1iyieA{?hCp`svC5QR^vT6Mia!6cRCo(&a=qk zHn6yMpwH9W49D1bY~blPc|rNtl<4O^oruxl)RjBAMz)Kj5QR_eVFPPi?U`O$@x}26 z{lr&zGV%D_foqSVFhgWbYi$XrQz-7!`8VQd!v%JWYvs&c3Hi(XIcS68G}uCWguLd} zmk%fI{(ilep1)_@^Vu2Fguh@(v97xClADI*326$qpO}~Pc{BU9_*6;IkzCEj%G9q| z`bLQ$rF)=8V%;#nL-94x=M2At?vVdATcGbC5Tbt29P9T26mBu6<^TTU5qW`B^VNTRkt+zG38WCs z|IGdWhhF3V`pfk8gigq4zssEBwJj-3_*u^1E@HAvgxe=wKCae-re-<`QuZ?aV-oCS zA1$H;hw8i`ChY^_bm9nmvj_wl0ZHtJT%DK%cGxGPkN-7#fj+Ts%5 zA}8tGw~N5;oxLq5Hn$`~Bg9%`ugfr$#+{*L{l}Cz`IX?K9vd%oQ5pkrhNT;4AChBA!9+xUxpq>7;(5BL$h4Ct;{iLT}a0o8^Iq)Pg<1(uj z5R6_Hj1w_IY7ZjoE!G?M?p;+1{DfQ7^=cds_S|9Xb5bq2+TgXrR%T%qz#SM6iKwp+ zei5(&UaDCFTdeO=QcP=nL7}R^SOai>oZicz`~ne$!S8@t=AfX;Llv5~A zeeOqV;Jc>q4@;GwFcDBWINfsPVB#6iB0O4-5WhKBxqf7WYkLlO(r30W8l;-t|l_wsj>J=-nc@bhT8h_yI@}9>Qei^$i z^m$24voX2Maq*Uk{O}~wC_JpvD?=kP$Yn18tT`|q9=I?u{nTv^rYQnXbD{bo*;6G4`YEg}TcX`>w`#c6c)(U=) z@@ec;o1HO}#5LyGHlt6ay(??oNh$vl0XEIYm`3x3 z3DbvCW@5nLG&b4oCb@JGexXs(sjXUwD`pkmA&EXo#y79H7ACYSPdJzVic<%NQ`7PQ zape2Zz1Ap9mif~1%TDc*+YQ5Tar@w)0Qz4U^z9XN@UcLOm%%gm5)?BzD8>IQwF;KFFT5P#69N(WgQUxT#j78< zlv@yG1>F)X1oo7S99Bj}hjItfV6~Y(VxcT;v)y$Q-08?7#AphLy8>gjjjD)KYY&)Y84IOD!`a^bk#m1l29nBBSpX%@9WNNKsy$QfF_tbOWo z?VSu{ay(aj$Z*&a`YZumtOGuNmD{zTacALSu6uPkaPqG!>5KxpjgQ@&haL>Fc&kq? zya^_rG0)#=Uk(GM?5t_?S^$7wR9egcmr*jN0M{(q(;+kEFLKCbPPeP$dD z(m6m*F@E54G162KAVQKtrxs#SqhV$w+l<(oD;AJ z4bRDITZrmuJ5WNlyEq-hYXYzR8sM$Ps)6RpDZf?NZrqz<&$MH0A0Op@@{Ghyrd+T3 zkj($SY8b2_urx$reoy97-FR+6xryOe$DyV!6NAx@+hH17^1NoF6M`0NQp4gDM$GAj zUaKSKPA{u1>;2Vr0zC{H4`3g*;U=(h4(G42JbCpGA{|HDmCIzSwaZqlw?3EOxRt@ja4zvmgOKy0Xve`+ z0AEmaNzN@}ulQc+C!ZWBzTdq&dj<#^PsG@v*M625XuP4XGxcSsosK3^w1D6WGs98)hOV}}&yg!38jYrR zkn6wUR3A?8^XtmmO3;{$*&Qk6q^Et4TO|PL)cIlW9($<&QQx+Wb#{=8h@MewlFxhV6&=9*YD$W{m8qKbP#SjI%v(pQAB4AtEl@r6 zF5qkU2251qa9bD`-iy$=yJa<-rGh0jVP*W%sz<~9`nB&nBUsh zFMRa0bwOiS1R>{1EHJZnB$pJRo^mLM}P?i z7sA$~pdLAr{0ZsGUPZak#4eZx?2G9^!oD2Mi%a+ltA^$&CRh zA803Kz^TVen`-%+)^#;Ivk<~$*=JRBrfW=PlWlw$ZXo^rd@wtHzMvvJOvHV|xSP%B8eXCeLV0j!DGN51jhKF*m8J946PdxEk!CsDm;| z4hh>ZWHo;AR)P$D4Y8s!DL^Y6Ghi{sKPHWo2DW;~`@iQCAmu22&3?hYEi;?5A69IU zvOC=Q>#6raM~IzCg|g``MspI&f0|1cBZ#FRy@~2>xSLGxVx-WI}FzWH#a-o~fbr&bCAsX`Xd%>e9-$}1;4{i!M;x^j9tyv9Vgeo6bm<6I`EWEgqK|GcJlB~U*8;DbA1te zJAH9BJj{x-$GJF>-vX8WlVg$ICPS_d-OP;+ji|t>`-JUNZK%Qufp7n6C$JunA8P%(OggqEp-P0u0 zR|kN3!R>go3X8l2jfhV=aZYqcimeP?}?c&7Z(* zHp|T~ylK@z@_a%E>!21B8|)U;c4Yt# zbb9(qTpG$jNi=-+#-PV;lqC~`MSmabGp6ge&pf>>8ZY%`9Y>}(d=N;&LAwYWub%tv zDddY-&QoQ}NY*JZnM(R_W%!J^kI?N@NjY9pPpa zL=NLfEtvVlKZ~|6=AS(Id?GM}2R7+<&l~>OsqYS)Rk+REc9{OSY;3iaW`{b;4CCx) znQ(r1==CoF*Xk0kOisWduI)CSlNBL8h%n^h>`9XyfPOA|YRgoBLqK`|D~8B603Pun zrSzEhLG?FSL~9XlvO(m{2uT!&xv28&J+!)V@!laY0|1RE6HPKtqkwv@P5I^tOCi?d_sDm zrnQD;d0h!oSL~*!&2owqN~u*#qH2V$aVJL$NhCPOt~7jb-#4yPsi=DG9hl6>pf45N z-xnu#c|Z#DJT^g60ra$la&2+&1#*;Kv0X+tMYc9)j*_##U0a^F=jzNZZMa5jDStk; z-oOsWmq1zV*`O(}6|3r6FRF%$*t_wTn$XO|us#)hm&m|S9sa6OL!P($YSybG+9_03 zVVx&x8L3IXgRdQN8gWbD%U4c=O5DV4v@i}gb*(~-} z`C_+>%OV$iK7Hqm{nE>!HkcMAv3S3L18aF5);b#=tQ8){$CbQgMPN{y&=PJw_H7pJZzAE@|UR>CV=;2H5J zYY`Kf?aAL-nwK;Abi5`np6UZaPHo>br`*}UVhleh!GZA_4;^}f(yIZu(z(U-nYZ=M zjEoH9ImG*k6^7?RwYS1dH4=T6e%;Q~&uO5NO~KWC7q|byM>tNyUH}Hx>i+k7uUJr# z9!N+gZEmgUq2{=mK7PYbd$k`mAOI>0X*QBd@qpx0f6rYS!u7Mg9kx2LCqNw9Z~AI3 zO20i%FlrC8tZh0$h!8ZJTe0W*pyE%^P}gVKh}N8$Ny@FxsdJ0Q>Ik1Z+i(1^y>xEO|kV zLa9*}aX{FI$liTH*B&>`;~n^x-2>B^7gLhM{p7-zt|W87m-sN+ z=QOjUpm;BIbJMN(affyD&Sj0}{9EBjjAjhWAgBsyA}iWLNp8@PUmDo=TddI~F2;jB z?y4wFEi~$=%U3s?=lOuGUGV9M@=93)mY;(Bf@c|GrQ?=E+}mByMo&+2bC$koRqEVT zi)%vc*U`sxr7a;cV3@r}a)bx82O*m-FRMR0ga0*B3Y7pboR*bpexuv^%%oiu3j>P4;NiQz5P`z66(S4lr6oy_=wB0^`FP86k5 z^0#XlU-lOwDgyk4b6T5uO-{B&8(^sD0Jh#JPUXAzXSvn*g}=t?9>0Di>#OBxwBNM9 zT-MmfL2%%_e}gAeGB72pwHe_O72fk4p}DC3=*=iLtw7G8?|6WQvgd;BCN(BrP+ zF|3D)0p7h_XoG4H}Z;*SnSHkkRtz#bfWr~3maY4zRuGL&k!gW#~o+Z`wDcS z7K4t86DyV!{|fXJMza4E-zV-y+=_4JOkv$vWD@KAqpE#I-({m7|LUCvE~J0t;a%X< zr9JoB%O)nnvw;J9E|-t4y1coN#7)!Of$5feN$x}pX;Rd>BF8i=IE8%IgTr$k8$`2MdsF{iunOV!jo7YLk z?Kdc{Du8B>K4=*1aeh!Acpsd>Lvd^L6zKvWBR$at-?kW<^2|!z+6m5qj5x6DL3xod zop)fUO<*ifJjilufXrl<*ltE>6c|5IY00InyewhyP9tM&;Xo@E+%tyJ{+Az{GxlTY z%KWC5*|qlzx>bp`zw>GM+Guh6H4FRWeMm#fVipR#t#MhqG$FSmp6jwe54cZZjCqWI z(m{#h`}eGWmC2#KbqXTSI#vg_Wr&--W4;X69o&P8J<9SB=}PHOpY|#4G8QXoW#Zr4 zO-f%o_T_@}seM-LoAl7^Nq+xH3}b4yxL##HPO%gY0ArY|$e+x-jj_VxhZ%#7sjDy( zI};g^prjk*@DquY`a9P%>-#2OlCNk9bh!9gGLLo?Upq~Y?dzDG*Xy^TR}~IjoGt~5 z^&@+wODnGc8~PvrZ9~7El{K9YxAiast7HP#H zKJVFd_G1-lf_^6N=Uj%>wYF3#{{~XFDjI_VB3W;Du)XEd`WKeM{V%`)!0rLSf#T&) ztJ>f%^33r|rJO)AT@4&e{jT$gy1v5UCLHKkDHAnHL>hkWn3h|FJn>AW=ITmJbzyU% zYh$2-ua8kRoDT4iE}%kxeOl)ust`gXk0Fdd)HD988t@1h}B;$PsA|o{;J446==!!&j6Cl|B7nfl^DS2vup70}t zhzB04Eap3%xKQ`^IjNLfX|UTFt4TYvy-x(|Y;95fBIf`0QhU~n1L}OCU=nFGf%5-^ z%i#FexSet`zgZkL-~e*^X1{;GnXLZ<4CQG9Xk_$CWv3hyClnZ#znE) ziJ~r#B5ZbBIDqzcLjPcJ4z-Jgi$_E&<8x!}1-TAe4D*X7MX&JH2-+qVx*-OO)2&4* zt$zNK8`xxIs0850K74G)dcv%;X+nxm8=I_#OC2nZw)>A_E-{? zx=Ktfvnn(~A$*K){TyG(sdbDpFd`(bbYGk&8NcFwZt0Oz=v4rADW60uY?+pVreN-5 z3D*?3XFySj4m36BD%_D`iszpL_94~BSl%V2@N040Y?_Y=cH1v!<-i{}!wZ1YvJ$bz zw|t?luL@su=04psOjhU$D8diB1e&_Mh^4-c%NJQ7A{lF1eFH+Yf^v1f2FkaaU?B2? z!w{3KM6I@3vv*Zq-j*poZkX3>4${4G+1YV&^A}&S<17VK4QcFCKxV%D&wGb?^-j;nfoAtD2f)Ee3 zOR{y1Yg4$omm5yKS8Y8G&2BXpV1Kv86b3y@b?Kk)t}Vam)~%eY_qajlkFt6!6}DG% z)8>^7R|{GJ{qVs(A&@s7|6-Qg*L}IPGxrDNfz4Q=$fjEnt?#p}xaD5$%A?%czd(7y zpJ!+({`u8Y@6I-*YQdYCYS8r;Nj3xR3Lxn&jj7aWJwJJy<+8g`VLL}<0-#x!=y8yR z$d$_YvS~vzup4al-?>(`*{rE^Obk%nuymN7g$Y|KJ9m%^FZd zi_MP<$;bY=283e7%2!!|7aL;|n=~1IX+FkB6ZdKjAq1IL^ zbI3oQ=YL1W-gmwB`+^kxr9Xjc*8hWC@c;EB>i>s4`TyY9xWVd3hlIAan5xc(J;fWR z_}lSuJK9^0f+8fGL|P_%B!N~4pGb;hfig$;kE^`XaPxE#Q_#O^d~{9*Rv#`&JvF2M zuKM?0^|Am#b#A>D5A|Kjf}Eb;4~`Vu+iUd8%GEm8suqC`H(!+kx5-4t=5yVCG)(a9PdJpXSjWLm2 z2}^z63~yDG`A@mXfT6(Z+P_IJU6UHUdS}$&dSM)0k7(jgp{clcD<^slfyEPcW?bK^ zLIL?fb?wpywXZJ|E>iB7F{KCv#WkJNOX~+IIim_Dv&I$otr{av&s;fNZKpM1L~~Ev zh}Zuj_FRh*uli;4Crx>>%g=U(U2c2VaiT}+nhb^w!NfgnA}8c|8-<;gZq%#||0&M< zq+z}Go=c;Yn#)yqdT!Sw#`$_!-_hBPQ%wfI@6A385s;00Em93EYrSQ{XHwwqLVLjE z14@D{ZH$PR66;cp6MOGAcR5g*x+CT~dG$Ydro=+9o?)F@f--AgU0T zV*;0G+?Dvxjb9b?8vIZOg=zdUet4*ae@fw(-hj0iE5*geBIlcDVP`_F0L;7_toL>T zteWz){baAl;ZZ&+TOqmpJ9&*WLV+{ZfqkdqgetoPMKV1mJ8U%}(L-;4t5KAL`d;4$ z_`7fM=a=D?Fg?R10p{ZP`o+CF&9^6KiE-66ZU_Uy1n)rlxqAyO%0&rL+i=-C3c?kn zCuls4688FIp^Nny^fKyA+pZn&TGeXsXG2Cl)&#TuEk()}u~o-aJkE;PhV~eniNee- z>SHK-OFUm`aTbg*YXPdKqN3;t=g`!^x_o5`K7vbWxlD`qq%9$8zU$z9Ke81Xn0uN8Wnq-z zKv8P`Q)3CMIY5Obnz3m3t{EnCrcO1b8qFAHKPwnKgoMYnC_6sA*KJ;B+3_9*ed|J7 zi96xcuZB3vpyRU+y@ziklYp&q6*!sI;dA`g?zdm;5;{X`$P0?*;ogicG@$qPf{eSE zU47Z9D&WZhh=H1E3&)6<8MV(Pz-rg#mEE^++$L+N z45S9~`K=n-;PnF4WPcxV9Py(tfWmJ2jZT!)ErNEr>jRVgcxnq?c6HuCf-nn~W zx_{s|#cJg94QlbxYKtltcs;&IJvgL zWGuzIO?VL&dD+=Dc33~1QGq>XIAlEMhS+PpzG7wvrwLwwlvKw(H!z)&?$l>wc%v8i z?XjsfY@s98!j8+b6`vOK&@#EJcTRFX-Lguv6PsI__0}AFyV6_T-af2?(;-gkO~RIB z>+0$hhwI_t0DDy-)Ko_$XWKkaB~nV$7NS%59AG~^RyT=@5Mqp%gE9M*qTf2d!<|{@ zpd4sIeNvLoTTB+=y$RudqDuE|Bxn96{2r^XW-fYgyl+&Hb4^6AN;vug^r3ZR$s_56 zq`ARuCFFIaHmux*DITp~)ZgN!bJuhad-QRe>kvtyA^T>Y(Ak$@z15k{hsLYQrKV%( z%mlzCMud~WdXj--gTNOq(V`dzB}G6}?Pf23XWg}`#6NzJ^rJ}h_NExb5CasYC_FsB zNc_}I@P#GY9zhdT(TIA!t;kW*Gk{7eTgvPIIM0O=9cQU&&F1gAh44<9s8w`;_Y|^- zdc4{_ny>~RUM$K^wPj{vj&EO)#A_KpiR=cW2faqBo&-O4-9Zy4tu1kMmuG;W&V#^%WQQYwKXpNlRyCQ~?KZTH-y$id z%+c}ldMX5nVtp(XU+fz3(px7Cm)`$fzttgX()oSAUWrK8QE6bHcSl6@ z?zhBcDdL+nc@IOKK)aRks@si$vVbY49%VM^ByZSL@wgO!JCL)k_Jp*w^=s$Pqa@V{+H!@lz2q}$uwG}N^Eo7X`3vR+|A>%6JzlQBtPtVDxLXYe$326vy z?Uu>rvq+i3K00@^8QJ`=LoKglsFgKlxhFXo@HyXo(}ms(?P{t*8ub^WkC;0`(+_2% zeR^A3lx3=kbeY#}RCaV*uH9YuT|R z=DB@3N4^Uo7N9G102-OI|H@i)!U<8Ys_KzxW26RV)XrpisH&_+zZR{Uo1OhQWf$Ij zmX8COYhD_lhO|*KVnDM2aT#ws(49WGk)2w;S}4ucP`o04O+OAiOyu~^5kjx>DC zgYqfP1tp!X(g)gRiVF!&bq;dBsvs++4~?%o*OoA8FI2DO&ZviZW~@oVelam@$})HS zYS8ZYD%O;ZOquhq8`Nf2NHA8LZ1Nfye&jGd&lj17jMAvcRyHLb%ZPfeIEp)8IvG2r z?pY8Y9eIYlz2>q1)2IL3-HH88CO&~h9VZGt5xU|4Kse&iC!m@c7f`g|8gNpOnSueB zl+Ptm?Q%>WIbRNwPIU_QxE%*?fB`O!;n9!Ew7C;-7_m)g|6KMO;8WI?r|Lxuv@JZm z05FxL7m2LJ2W*e}iMRTGHNs0DiIESF4i+lFRCLoP-yTG+hZKmZjt_u=z%=MTqQG7EQtHfvcn#9LjEa-)a&_2pji&quwx2Dje~!h_L1 zvp{b-s8vF$tmL2izgh3J?L@5p$a3UdnC+Hfw!>80Z3mw{h0#xotUhqRA8gxI4ttjC z6E||r7~1*}_FaJu-*bxX$JX~-rOFz2U=I6T^_dF_8FL)()3>o1@(bs17{c)&m|tci z-E$SSucw1Am1}zJd{KXQ^k{SYoq`3(Fku2@`D^^TvzdZmfj7Vjb{_>+Y%1hWt~44g z=el|B*Z-?7*!r@hbS*ds7UfU)-FFt zi)%ad?uN^1C3#HNPx}jiTWfb=`6v4q7v*@YXPX=ZwxRWPkC>D9W)m0cruVS|>1DRm zU9f{xeIMNXtFPP9g2`iP*4jgpdtF z`K4e&Dh@LP3vEDgV^n`_!>1fVvgIcC>i;W_0ci+O-XQc;u41hz-|~hJRIxhC)fgM% z>Jl?4LcX=t3e*H0;Ob;g#l|fRu&9nr#Z!AuP5{hjka?UEM?qq@C`ynz$Qyd=!OA=& z4^&Q8vWVM8xyFZ3W8*8!Q9`<;W+NTf_aUy0!l_eE2&LoYdjDGf7gNd4QlE{FjL`x` zd}X^B{{nu)0%7SB;?|W0286^h(;l>p?WT>=hO03n1IA`jM7L;ij`oc~WjT7(E?CK= z;>C9MXY?dAU|hq-tN)>9B|cGe6DgZtEZ!wM-^0?@jYXQwF?Tr0?P(Ix3VP0w1*VIB zte_DYF?9>TlVRKMt`joj(rZF_lD@|F)+OFO&p-7q1;YZNPpW@-2(TUnQxKQ5(YrBl zncg*iP#3ZZbROWNGCiNgY~77IM_25`!;K8w)HBgv3Pjx@!c=p(zXc)@9O3HeEYn;@ z#{BFUs*I7K!bd!{C|lsFa$TDOoZHspxQ%eT{A06i?-vM9-XVhMHeOf2ujBsc{!0Jd zS3jKJBQ4hP0GlhN_NgEC_4aU`GD4m`D#doVMR{en!yz-0-xuvyWVafXwD(bUc*#k0 zv$s|%$)I=d5fj~}bB38AzeK&trI#uP2ME90sAl|DXPe0Zf4{Jq3mMRqoWs-6F=`J^ z=TJ>Cw55&9oW_?os~%_PFV^$EVej2S(6S{BsJtFt6P3eywrRUO6C0obQFZLD=zl@v zR2(b(mb5Ty%BRKa-rR8*hLpl|N&fNdR&ZdsH~t9dhnN_gT?I$Ea<5`hPr!& zG^q>_Gh9T)iNAGWp>rmS73O>;?+b1>1yy$jg@1x}s)G4D2a$!4F%{nxF>g3g2+59e$re=pC8J zFRffyzwTSdIG|(yVEmfif<8>z|6RIRc?hEB_NA-YFBh#PF%=Wo3hmYdVpVn!DROXonFe#W~wn@Grw)W z6Va4XTU-}|{o1p%9qW+m>6JwkmqrHwRfe+(-m_BX6$DMuM^`NqZXPIfKkTUxp!B$U z4qMl~U>vfxaQ>?VKS)E{Ppu2K8kCWz9i?DP)_ET&y06=;8{3-atJ^1lQe*122PYIl zM4bZ?oHjN`gMAexAA-g$-Tm@G=FM_Z?J_9vCH%eHO=+&rm4=$Q!+)R{exQ2p=bVt* zAKHK3lbM8NmFT1K5qs>j3A#7PgEeUD2C>?ELApfmpll`(_ji=b;jr zfIHv-qz!0}HX_vN>|6v#+FE>xL(KM~nN88b5%mXq6&^og+HK)4CA{R%VNiD!6AkkcaD8to>|qX33humndvGK)Pwj;uPy#e z{D-FWv*;<^`*+TeeI8C2k$e*YukZyz#GbU9Pk5MmonRf{ReSv;`WSTa(ej;P-l5b) z{&U~c57E+{vEL5^U!q`s{wBL|qh+7(XYRYkAb06K(x43Qr#H>ChOPzJ*GWEe4NhWS z7HgeLLowyZEV4vL^R^4c@ufQDxCE6~tUOfzJfTbbS^)G%&Wu5#r@xr!Ll@r47e2F1 zMcLcgxmc&3v)XB%>H}xDz3;m+Wsc9?^GmJ(6~=t%{G(2x;<6a)a?tdi-P7eYRR3La zV*kSzry|n_ieUe)0E@}h9Syg$B%Nzrg)A5TIG{O0_s4niE}ocp5Q)N{KcNd?tL)Zw zXU#*&^V7|n7B}%)FkjekxC_AyK(C_3I9hQwxCl&kQ~Wxse|sw<(?tVf%UP3>=Kf=T zo>D9mRG~I=4%U?C7}9imAoNe|jeXk(*Np7|k-7uMnRAd?blACH?0CzdU@A2E4BNwUoC ztpjVwKaej%0|Wrclw1iWTG#JduCur0mLD6m=k=AxTQjYe)tTD^!P1rWGU2U>=(E0> ze#J@K@(1jk!_TaR@5w&-ph)Ms8!+bY1;{^LeIeZ$LCoe3c+DCr0myQs@)o4PJ77t= z^kj5*D8$LtF+f& zWR=W?UkY1zz(Xu`t_x{VuKrIQkJ$I0Obom36kbMMbS=Avi!=ux*uL$)+DVD%{aDSk zPxcQxG9VZ44(7a?DV#gs3dl<)YG`7^Anx<(eFa276{RApUBk}}NUIfC(++i{s1`mx zY@+awEId|cV)=~Km)*OE7<>#!(?F6V#jS;l3Eu)HVxzQ97G-nRUXMzL%dse*63x2r z&f00e@MGv2nY<+{951o>qtwn}_L4n6A>=hsog zAi7kTg3QCE-3YfTk5|QZ$y9G%1sxF&J9UD*v?85)rNj!Ot;fy^PMTgnN@;ZSQ}I6q zi8jwZNZH@`wd|E~83U(B-Fi>IbUQsBA$$2@dwCh(x2gWjx}erqrm{KRb%b`gRL!%t zd}UvFtpdG&<>^*Dn@V)ACB7^S8ul{Gewh`&(skdvGxBx5UDwWU$0s&&(gBeB3K_eKeY~n?moLGe zQ&h_-_L!T9A(;wGH!0F+6f5ArgN(#&imY}#`_(gT*4N z#__6SzIS+-xx*c2WI#_@`nK!{)Sm^)2%2)+uj>-ZPk9v9IQ#iHsbT1ILCG?M zv5Bm1@Yoz}KpZk8Q2SIt)nA7p0t84fR-GA{R;6O3H?79xcWWQAH6jNozxSoK=W;5^ zUu#A6S)n$aIFYI$(pdb6nB4mQSglqF$?Rr5=%RgOIJ)Fx6R*fcDvFTKclEen`tclH zHdnGSUiNzHUX68e(Cn%%rRQcqCTTT#u(4n+rLF`vLW@Gr`5B!@ZtoT}iA=UGi{hHS zrx2l*b*=bw6USwusdLsPshH(gzcj*V;jNi*QrB*~Nn2nK<42=dHryk1DW5~{4qa_k zlztvX#nipIub>2oXpUcgl-iu8RsM8Fl90cAu+Xp-%10v?Inpa0XeD!L-y_sp9ABgB z@8^7&EaMg6{$0%a+Yskh)u?7V=2~Sp@M4_%>gQWQg$Hr)kb5oB;;zOC0i>L=u~3xa zf=(c)->_<`u_O8Y48onf>;Wts6P&>LSGAK1p%TK{Ru{NDA~P#Mv1>|8Gp7-DfjGAf zsPjgHt@oBmCkMKo3Uqcyy1`+?$^IX5r>f*Uqsicn&;=S=zKX-eR@_ouik5iEK&3;6 zADdxRvD4VNQ!0&+FJTCi|Eja(|$5%X2&Lf z5cV|$y;qdFAyLxsB~66_EXusPn3CeMJ2Pb6aUV|`5TsmuadOxT z%0-s+61n`?DfL(qhR(QjB2%+-aqCyGjBQP2As}&*adC)$&!TM-W48MpuD_cn(LYVw z8JFghG|$V2<}h7FGrX`aEXP4C%SDe~ zff=R+*&Cj*w0*m9L3hUU3rcr@^DKGud*J7mkn+e~@wW<32vh@dsh#yRf$p~5F+Q9+ z0b{96qFf5PD`VNgzH}37*FS96--TzzaSb^ipX7qJV}HFxjC>iyU^%XAp7`cr_{gE^Br7Eo=L1j=%>z^$+F59lJpLrINXndht9Km7Q(p^;10?O8A@h`)$y<(CD+5B>tsnd&yYblB#m} zys+wfLbFRRR$IjfuRXp*Kb6%PPqm*u6H{-cCwv#DVUGMxBe{Y{{(3C_P2-_{ z`@dl&RJNi`LJE^zi0q2W*ruo?drT#Utb?&sL{j!8*~^k_*~Y#_G?uX&%$OwIuMy6@|C-^U}bX8O)C#~jP&_TF%0T%N?ZhVMrujzP0bMuPLHV|wF^ zH?PpeaeQ$;IQ1CuWP{BGR|iPyFhs8=LVVqIhpYKjOWvQ|rbBXGm1)6o@OgoJy=^(z zZ{B&0(4Uuk(vBEsP2g}orzlq$>XfgMw{6fe!_IgIhd#_8AFUOBpRr8v-n7^ zsw~eaQHKr)ulm15wfNRUZVO;?$W?rrETTGqmMd#7GNw?MXx&xRIIn!L0WXB==^v}E zYiROke8&E?=5fuq%Pfjw9h;#h3Gaa?wFuN&q%h9v)$Hu}?ZN8c!y<+68;4!L4?ND& zjS56>Y=Ft|g)J~4GnRA-rUEAR0jC-h!H@MWrR5pY_Nx8xz%IU9;b5(@Kt==g%daa1 z*c+i>K-5X|dqu($G80o35umd>?l-*kF2L?Kr}9<}DY4GDU?QY`M{F*py2djJS;ILz z@Ki~_B=Jz<DHYE+t@tth8v}uIJwYL2E^4wo6;AZueikMmfg zxE@b?C(N!zo*K!3^(3t|*8htWlFt2Rc4V$6rYoUg4izrle&@HGg10tET@L#3LwOg! zxXD<}=}O>GwNWn>Vh5tUJWe0UW3_^5#L+KkOFbDm7GVf-bXZC-%^ zUiO#Nnca}ZkvS!3RZ8P7|2IfjZU7?xYtoP!O zD=kkqi(}`cV^#yG!9)*BugVh{%E9N@h4epA?OTz*b$tYpWBTjkZjZfKMtu3M93~0c zzj}1Q;^JIyr)?P3AfP5}1e+p+;b{2?8HgGiaD!oNAOt?Fr)@Zed5Fjq3cIcrWd%Fg zH?nH#MHQ$c#A9V+@_26z<2eJ@?7v0Zo>#U1zOjPwW-quiCm5@2q4R1U6Sw0MWt_P- z4<11xkL;?Fn%!)Vn`ePKD5S$x682u3h>W_=TZpf9IY3E zA?;D0u9JnmAyN^)XJxXL(McivS1-F2=>p*LuGzX4;y7=a8d;;@okY0tP8S5Z7>xcF;bi|h)H(;bJcdn$*tHOkSA4P!ubT5ID{DA8 zhjr`~54yM%jPV~Y8per%{3g~EH`Yvd*4U`3qzN~!9Ujk#F2WtEN+}gRjPoScNPDe} z44mJ#Ooyex9@&YO(MtrseOd*WaT{a?%MyrKKkGJTch2t_U}Pjq)k3) z987u^DHtnFHw7V3@LH(0g!2jt@Q3~)Kxsc_$6CGwDbM zll=dH82YRL+WhnCEnjPA0qi5)^OHS?U5|W2mqmIXHm+R z??6*+_$!G}t~hE39R7xmj^*YSC9BSV3-P5cWQ+2pF6#db z0IB|OBLv_nG7lW!+4+S>@I?cxPDO~&%zq$6MkiQ4wD=)fz~=b+jk$i8E|+eXO59vf zPMG<{ect+iC^!5+3RC=V(53&OBk})oZ?-zF50#gh&%6?8wYixLGfOyVyonr|oHK}j zCeG9q_a8v4DfzwZrWxwfoPpU>6E)-PbMvw!xLljIox+RSl`f5_+&`K}~bnylPEBLidmB2sr(ybUE|9oO7hR#fF6alQpZU$+)20)!h5cDly(&+Jr zqEDcs95YPeL*-7J&U2;405q7^q&%oYDO5gvT>X7U0QQqc5CHKWY}>ZEZXPe2^p?>} zQ&X_WFFQ_FOHaA|%hMP%^G>B1hf2SIXpPMcXJaGT3wn~nk(_k)J4=$}NN$Jn?uU$f z&t;T+fBNY^4^0>1ZO(&5=sOy(U12crxgNy^6m_cZzC9HUZUnEpwLLUb6JA`nUR(@Y zvH6^9ECq?ZTkdv4jqrG8X+Q-dGGXX!W9n2>k`(f>=xqEkC0;r!hw|T&<3Oc7N{v#y zYx74+$v>?5Nb|c2LNNDW>j9uHx56#C_?%zlj^4p0uZym&R&&8pfRfbK)?I7wrKf5Y z&t0QDlK=wS5N_ImGNUZq^E>)oVAMmuoM?;UyKE`_bxP?_qxcgiCmBQO)#1~<%nAz1 z4SU~?&h#uOwG2=b0^|DUQ*z$}d_WRq$@3Ov9Q(oJi-WwMo$*`u zY`KXcNjb14Wy92*4jT~V>la@M1%#TGhOA9|lYOujS ztX3(sXmroq4A#`!#Jo2c2VF)ZwI-wuk-2z1JKxO}FT|iI&{{2E#0|31J zX)9a+kMd7T?W7&uf7<`QJ|f6hMt9!NJHI{oq`c3gP?o}c#Pod1Zf5kZ$49_vkHHPD z@^@e=fs9-oD(1&YO2)G`Az{Ri)sLQLRe758oke+mw*MODT2`9iNrJ~EQ5LZ_=QxjPJ9*1U z=f^zNz^wGv*Hr=yV5@hSB|((?85}iQ_;M>VSB8`qi^|*vkj@5G?x99<*N@(xk9?L_ z_7%NdrE_1`DT)WV`V3x0>K_6#XVSSf( zN*#sw6a3^Q->pt|QSVmY72$4o?+4)JD}!5;r3&(SoyLS5=0ClE?sj3Z%h5hKs4Bw) z>nrRh)Z&@(iMKZVXE_u;EWe=8L)A`}@@k_b>QS-MT@w?Y-lkTddkO3Cj z?GNP-zg7nX)bcJp&8}V>i+9%P@zO{c6e&HK5lLLN`FXa?RK%Se`AD)N!owSb62IXH z6vLgvgsUb$dK=$220@E&dIrBe{+OCRHdFzA%myd*C`j(%z(tohP8(Z88$?YevQ6vG z{p~P7qU>_SHEC>y)u1v$Vs(aqa$lZU%n|eJ>hzQb&F@_xLiBJ41Ry|#uTCjuuf3w8 z36Ckt;%y%RX`nHc^8j10Od@(|*sN%@yHL3C;t$L+#{mdqkk3*icjtRIa}XcligldM zjX9zCY(G2a&(5VMroMq|xh6M6VwGynaD=ve4vw68579CI{>|V|r(WdjZOKBsqft6> z+KcbN&LM=V1}(N(?5kD%Z+>L}6YL5`H}d2?gw2qKImaWD;CE{SI=D5(3h2%3XrrkG zElP)1g3U`z$c5TupG%jrwlm_FfOw%sj10p9$gsEPIAOoB1l1nbDKXQ`bt+pQ6P%COo*PDaWe@2 zX3n?bvEU`&R2HU#P)_C_>a}^jcKAr*-)H@r(EyI*73IZnhtQyXI1-m;??&D1IuRm#%M#irvWeq0PZ{qP*5rpwouS>V2h@3-o-d1CUnVM z;O|QLH^rqsKet*YxB;;#pnea_J`~0lZ!>`9v4Fe=@d8pdN-42LA}(THxr*KfKb{FM z2u11ZDSvQ5xR&1i;L`Iueg*ecPVyZjm-WvtS1uCaQ7*E_^+?xc8L`&5`4cu0|5E{2 z_s*olT#rOaf&7gwZKTa5`c)MHQE>h zm!_E3-)M>H8SOFqDp{C#i9LqrO3x(5CI802)wY8al6`Eal~^WQ4@4)b^TelT zlMQAa!czQ-n3Hi!TtAELzwIV%kdvw3+OtFN#;d}8h8 zzHU`zra_Or_(v+U+eWd1+H6}`=wM5>yDhWO73J>F2|W;Zo=A_Mc<$hsA6}173_Mu> z{l;C((Eh_$S=ec$&y$>JwaGkq$93C{;iAx6VL?Z#_ow3)``(ZqYLs-<9@nfY$u>+{ ze6d!@^OE3J@^}pDck!YZ*4^%U+U>LYn{A&sY{guHpwHelt@n2ebSk(FhF&T6$tnR{ z$hls>E=vD&L$PM=^Q~4KRnjh0oTO+hdE@iyhge3WJTbj(dyHXkRO+ELe19&7|BCRZ zy=lcgu5Vr!#=(wWo1XNzgcoo_kPKmLq?gAZxToC{v2IDe=mx5pD6}p)d``!xkGn`% zsw85}G{nBo>$EI5sBZS|XWjSxhL%Hd6A$RIpiWrf;+HbWNSp+MKior#@=)Oi2^?W` z-Vl6s|3_I-qA69JWC>&viZid#_^986P|?KNRhzThaxgVR!ODlC=<ifV{r3TSJTe z^hb1+2NPj2t=An#%xpuk50>s!Q*p_hv13Q=Q<4cvjrM#~PQS~K#h?qY85vH7=N0xv z4`1v6T0V5SJxtT900>A*m8YlVyf?Ki(!-^;~Z zp`*EE_5j8Y~8W- zyccI2O8T*}1}L|Nz_2_+hL77K0Vv)i#S| z?(_uw_Eij<4;N&PZEf~{iHb%1eLqkHpM z#Fr#4uVuL9ceoAkl*xsM16K#TL^oK=$wAd&2{#w2CwFyluB4`**MQza`&Y-fFLB#% zX2o4GRYJE~!+sD`&i@IcyZrVxo%3gSuOs+3f?rtDsY;0O@Sv_yc?B4FbKKo{a7J<1=iWHt%-m-(!%4XF1B8u#`61|6Agx#DJo_`arCZQzZVJMvpEo%-N? z$&InLq_p@~1#Vvi3-tG=im5LxB_eW+rR-0aB`Nk#>6QEBwGR%9MLaP0?jms^sB8<& z6th#Fi?x`FM%DzwQp^k_VJJ>r?xeZ*MM9Fbf9?(vYQQY*B058xIT=RIab#CH+stq- z+YFp={F#>X4qcV1ac^~PgE|*-bc8>lr&ZJyV{-e_nEfA3;quvk#l5uaMH+!6io%y7 z%2@=1=3|A>G1Fs#6$Qm0)U2@!eI|;kQJ^ym!yHfA1nM z+5-NGvU9ucPtw#A??!txzM-kt&6pKki#q(whsG$A!G>uKp)cD5Tn(ILKlZ-PVs3@O zHXz4$9I3iLsC0C32Y`#KHALBll}E5_MDcpyhdKZVezI+1r@m6Yb6BJw;B%gSFj+Mh zubn-zRDwJAAeI)w>3_W#^=omk?Ix-#w{Jgx06g&o@T{mrk2Cx-7THn5BW?!w?t%IN z7}(<0LQ~_F#*|nLjV19*I!t`=*q*wfnbNJIM{UkF4FlArhY@RUK3s&Bnm`v_Vhn0r z_9&LaNsrYUscyHl8d}W?t@B1|bnYu_LyrFGy}AeoJ5>k(Za?H?a1@K9)Zpngctv8~ z4lgwM_xh)KC7n1F`-3G#C`V*H0VanUHI5oj{taGM8jseh5E9?UA1#xREhuZ9wc2_J zD3;VFHE9l9@_bh8NyR;QGLi+YriQ1FKK+rs!eUtzYuvnCvwj$>vXGSO)TkqYZDf)hVaGgcD;!Y3_ zm2>N~5u=y-Q$Th^jF-EB1Y9)qEBs6gRHpU3{>DP;hiTvNgU7ed^<#1sD{mvSGJe&j zo3mII-9(M^kt2ue6OU=6cpL69B=k8x>EBnG0SSKRU|+M|-{FCXBM_J^;JL-{!I1hS z7k<~!pc0301aFdPZ;RlnwpKFdqe0W5y*Se7!o(2`x$d#Q0aceg?F!$NOpjU?Vu>$# z_*J@n@mO=b^49waX{#@okOuPNw>-_8Edy2P>3etXRowcT+!+SrPWD&`b1T(d6H0I= zezpw#d(Yngu{?i%0_cm*aeV^=;8lHVlVCA(0tJ{H#_+b;xRP3`zRy;H5OZc23;N=3 zz1wCq8qj1WT>dj!~-&wq&rR4)lT(_JL)Gets zaTz~VN7Uc|Erxe7e_{e6ZSk_uaDL=JQMJHDvRRYDT>7na`z==hMa_ms?WV?y ztwP3YO%D#9Rxc}W{H)Lz%Wy9*Dt?5`dHm-q@y7|`%oB3U!(89Rt_D0@6NsmJib zOVNL%vgWq+k*(p>Io^Es_hvM`!75fZl*T;N9lI@`;Z95_%AK-}S8)1ZD&tvt7c+o* zi&|TyXhMRvC~~;U+vYMDqTRB~?8HdF(dx>}++ama)PzgK74ZGLl^U1Lhft|)=~zOP z%86>UVLrXj1M5uOZeBaIzmXiw>z87_RUydMGQb;aq%s}yplpd81Rm7e<5 z0pIRLGEIc%zQY1Yj{LUutY9%_06f=RQFHWF29N=oqLT~`s>LifH3~9qc#S>NQfAS@ zfUjk5`VLh}SzT@F2FO*giD&Ur4^qC>nqsV<)?~X_tfsUjd6ciXkVxc=aYD1>1frm7 ze=8oLNdV=F0kX{vUz2j6Mazm7r>cvfAB`>S5ZrXTkEMV`S0hCYsRjt91{~ z(Z-vdhZp3IBz-$PFlG;krg|C#N5e)0PnyW1Mq9}G6xouh9RV(toV(^&B%q5sAH&Ke zTAV@t6ZU5~KHvLz^qANWy1z80!$Q3RH&z5^<~=cuS5u7G*tB9E7_ zKy4Y09?4`Q`*`lyqI6~=ICpBzL^b9UE{3f7NRdtAZF3Sd=!;@~9?r1TAam!Y`;~4I z2=#?iw?*{1KYE!TnFnnaKf#B5pFk5E{5nTSKVq&pM=DR8D}XD*Gjl*1h|Lu8Z+vM^ zi*cmvN{qsKykJ6m1p;FL3Dn}2Gi-GSQZ2@KxxJ%=ly2YrZBWswh3GvH*RDIoHDOM$ zf&>Ivv}nxBkb6aUc4FIB9$<%5l`@v@I&6Dat((R8?vCsxw{OR@*P-b#FPC}igzsgJ zh{`qB!C{pydFj560u|}xBNqja@%EhJC&o@AF8qYBG!Zc4B%xK8la$#9t^RfIz4U(6 zt#6YnT{%c7R}~>rzoRu|7<+tYgcfbG*wEsX)>x17YeEG$&Z$Mq>65<>3Cf{#$*b+9 z6l;hBqrWKnHiue%^zer)k}bg6jsR<3yZbfIK-n%|_a|GQRm8(i2~RzLk;f*iUuG7R zaxw?$Z=kng5b567nT^`Oj_7*;cDi3wP#PNW%o ziSv2VSiaAQRz(*UM0&2r$MFet1k_|E{UN;}{<6UH;h#W`VGrw8tH}G5K&9hA(iM_Af^{_!87>B?yX5W^L)#=rwNcH!N`D25`AJp7UOd+Mheceb3`ce7bvl)4q z{0Mhu3w8FcA~tkmH{3<#+JEp3&1IZ)@>>{e2uGpjFS*1*a%b}@5R}=o-nI>|81CblJ^owVBzDr)HVYONSOk8`j9r|J z*7BT7Sh_1EMHE{OZB)o?a)z_)Zmdc_q`#k#qObMlYTKh=@dC4&^^u5UE6#+^n70QZ zD~b~_MfINAS9>XPx{n=kDR)(`ClA{XUz~HBp6^Az_Y=2KmNY?Z5tnSXQNBc68RO&? zF2hGi8Q9FB3!G|*EncBRnBta?q!xts&`^q}#UnCVic4k9!O?lJiG5#s|X1bo#h8$M33*aIn|^q zR=fI=VfDl-z3Vp3o!;>LjA}lyj(>Ka~@RaxP$lUMS_owDBk~dWr=~5Z%+)2F|4c6ijtX|tMQ{BURkp%**{?fYs zDdr{*LNVZgyY`U`M<`Ay0_K;yMcElUc+9&7Yz@gO9#AVwM5AXVqEvSQ3HG$$4E^Cu zRcPB@(j_Ovl6r{^b5s|d>e|t2AFr2BEP#W;$sD;(aF)o%xGvy0Fz{rN{5@fIX`j)- zCiObTgYb{*gVh!xA(>$OPs{6K4mPg(;mU*rIc{Uox` zR0DHwxTWXhSh0_M`XN>NS&VHYfVEE!mPsPlokUBiEV{G%N^ZWQ6!Sn9eB9Q^pcB^4 ziE{54LSzJarF|!>T#`T*ehqdVnQ<6x4g>|^rqt5Y*lAc+yBpcrF=8>ej@TYFMI$~q zaKJ;7PO6@k5fjDU z*17q;w>r2}c>CNsW%V z4CTRnWuv2~V0u@Xh>3|H-z!J%fO2?O(a=CupZQRx6z|;W<_Zk5Kg)+RjH}p#_d)3I5*-|AZV^$?%7p?oD6vwq(Q#m{i5#RuOi@hRL^c;hNdvXfa&p^ziDCVTQt~g#%&gj=VXIS8yE)(0 zNq|1|E@gAxl!%(+sUEQR^2`&Mjv@#D@~zxkr(P1kO7%}%mzwZl<}I7p(%VQBhy_`I z8tgQotUNIbRUX(p-|Le58+ae7n7i)1iUXPD&wAYpYkLHEIbMl2fDBa{>Z6Xc1f(BQ zkY?yR_|Fd+IRKvoYw>B7yK=oKBLgv`PYVB8{4p2@#p7dbGoo~-qV`6q+F z!H!|$%Po!+eYtI`_qCX=uyc{#omwXFs z!qk%{-!I;2rF?uY7W*r?xLvYyrM6-M)fj%Y@ke@%#E6d&X4-yi^(@N69JyAKs55hg zN4ac#m9wBOA*6kkkN+M4#Xc~-wd^g;T&o_gvon-$R8lHtb6nJU| z*|#=@Ud-^;kM!R_iKSeTKk)UXnWG=l{50MPdUWOScK*?|V{gDSdg0Ru>9HrZN*GUP z1XID41aosy)Yd5CLV1dV&#qvK3A~oTsX(?`T2FDoRi%5pt3|J1KMdkg81T#!wWOxK zm1B>2(hRJeGWf>+$_d}gB?HB6Hmc11quL@FigKZ)O77)o%Ja@}JE+#0tZ^D-hUry! zgSwV>xCZ^Rdn6lY`9Sl-d3*b$wF!YI3=t>h`;*(wORHAQw~;lx@>*}xi%_NCK#fbK zhxwNm$w;hct)fApcBa&2;uNUfISIGabiY?Wljqd|J&Vj=dCThwOVM=&zZr_Y1AzW> zxO$6K^tjW{kwWZP%OiYTkgktngX2fK)L!J)iJ9Cx^A(`7*K;0#_rTF7kzL+ol!*(v z7@#Sy3dQ^S$5u5`np%N7hJBKzTvw(r`DIzCelCf5+AJc0b&hzc^DmuY zInHy(!(D5dDk!@6o1pns5DWU>f68B5J5T!kBpt%5iDQ`UaJ~L2Qi0Mk`q>q?iGjlCAbj0{nwL3l0VHjcj3o$-RnG>IKupY<)m)uG%HteY&6*Z~f&VrQ}^GgI6TI(?uI+RDyoj9T3ia027R)t_DG( zP7a$_k#N7EKmqq;Smxd?>k_*;2UMqJH*D9^f*G>RbPR%axwZm~?{g>0_RCbOpK^2$ z>sEO}!SAA>Gp;v3`D-n=_j7xsR$;xvUbuD3da+2yt*+R#nZiUQTc`oRnTz z9er3>K28oEr`p&vV=JIr$y(#JL6Ej~qxMR?6Xw2vFZ}hcoDdS;>L!hgR>EeYJ0dRa zZMV|c)%Y$vn(}c4JJ`6!MsaZVYn%aQW`V?B_)G(r_z$x>yI4vDzpM7V)RU)X;}MFB zmGUGDo{_#F6$|gI7<=^uuX-l8XN*1R4EDlyVriCcrsBwYY8GWKf`x3dj#WG})jP&3 zsL^`}M471OD2MrA7i1yXO2jkn)d%zuZZ2jCx^q6+5~~|xiKM|{_mPS~NwX~~+@iU2 zThKS_!72*Ty-wg9q3-B(g^&h@It6PTjWUy|a)D*=z&~;RR^8yKgLG0429qOiD54;N z`xWMNKnI!bOkZXJ4iM8B366+B=^EcD1+o|pqWf>ylR5;xDXsl~5&r_%WYHYdA|ft2 z#$C4N4X>^a5(Ln!8oL9~hnb{7VP5De)4A-+Z`Q^6uw;^6qBV0L=Sg87zhCq+?`dnGW=8Uarwmn+fb{QQ_eI*k>~#O6d;E z*X@kSV$tsr@Sm+M_Rv3Fc+v0-@aB`&yHqnz^@}jHpyqV*6L*Z_T$6VF*S9Pt%E*o- z4r8QM&E9{!JiIdg3h@|!jp<10={L6)CnW)1fiuyKl7p>(zngzQ(Ajrf%P+3jhR?th z0KtdKol=y+<19Dq8dv61qmU9Go|t^*Bo2jc306C5(K+E^yg*)-qM(=Dg*>J*aQDr1{-l!_$@5z86ZBRiT=2(jNymv+h9DNA>_0S z2q$GG$JXdTO`l_{GYm#>52ZCk z+<8uRPfvD}D8_8BWREVIA>|m0i))ESCAw6n7r(KfCCdI0+RbJ=WFhH>NJ!7?J=aDP z$9SFx7%~{u!lQDBa8uE6xj1d4OnZbe;T)pnPBAQyKQS3=DycVjXL{!8Wy)gEEs^jU z&d0`C2wgXdi2%oyRlXsc8O+k_p1!nT*5VGD(8Zuc(rLwsg%Abj4(kkqin1GuKvCYk z%O0DharN(P$bSr2S%#;m4Skf=>R)d1rx|(PL}`e>$Mff%{?xPdOy8TwFCKI8Q{{-B zY*TAUT4sz<%T}dV%i6U0sQh#wHC%p*VK{^BQ|aCyUK(oW(9$A0Xlh zB_wwb^3&Wkah^WG8sfI2v~o8~Zq%caA-`F-G+A%h@XRs{Z})GH1BP7+-~ZFmL06^E+fLs;lRiy@R`NFOt(@9pRXi>XR zSH{TG4xYEhs@0v(tAG|4+^p>_Fz@gJ;*+?sRiaim-aXhH_#WQNTMT;dK3E%*I)o#P zgv8N{8!q}!NBKqauvCSAHk~u6!9k>g-OwwxU8hSNu&Od=%|wFS&;fvc%tk%8cJ}jx zbE+rZ0M<_0I|)s}rE9ig5#fMPw5IDD!D1ZG%q0NY+YI`gTv%oH361oXd~@tIfL39_ z^a z)A=}f_Rj<8s(`Jrd}nJ{43JaEI}(8Jsw~mR*8Y9HHg6 z<+opNPgvju$C@YFHWvM$)a3p5(+8=($#tT8&-B!^{*)x0rF<$tqM&fe0KW0Onxu#3%Fqejm1(Uh%JiS;ml0afudsa^GEc^@&hmK(yK0}mXuw0 z3^eP3(HtKgtebI?WjJSU)E_`}q1x_pT{v*ixQre`ZX9@_2^<)yuD*fEx>xf^(pmIU z<5#1Z#wBSfUhjw6k8FDYoZEw_lYT56JR+@{-RM#BR;~?E>8o;FsJPDKE9}T** z^56au3k6=IwQ#mzCcp^}c;jEtKo1Zn?fWhK0c)u%0;Ylcbca9dUTa?r4FFX9h1^a9 zf9&boGyhp-=Xw4Lr(XjqIkc7z;C2DGCV*r3r`rC)75HOM1)Jgjpfg4{N3F_nijNa< zh?XyXMF~X{`G{sP`c3)-p!H06Q#EeB%)<*Wzi>2&MD*g~!W%J!U0q-8GUw)R9x#;CJi6@JdfOH#GM1h$-y5>6 z`K$x(@yW}Rlra>SOg9w~1YLx<5!51#2bOq0%q91l`0akH0dSUb zEIQIKfrsxjKdL~Pxc8pnsr^X_^_{*EfAUQ8&t3DBWDBvJ_VpEXT(Wc<^LT}uTExj> zzKH1%zYAExQXMv^&8#=B&5KCEQFhj6JjaJpes1xp39fq;IG&Z!OE#|Pus0FKmgPqM z=q11B30tmOXbLH6-430Vb#s(+QyXXBgm)SOw_{30-5Q3K*YCZ(#rmghsfDT&CcE8> zlW?u|ERtBm?*jR>lF-y=y)kj$f@qM|lgv2}_m&&eXy zH7eh(%I5E!%COTDRw{wml*rYGwblun;>H4Pax;#Qn(8Hayy`A~gt!(L+gAO9gsW_g z`+0RlZWAv2e4B)cA9D$V*d+BA8T#wVj6pDXS(nd!>4$(=QROjMd0e>` zy=hLUD5|eU$9fjTlg9xpbREX=oMZ^$xgV$OwOQW+OzYQMV}xEn1<3eT)ak8}9qN^{ z3kBg%9M%UGIS%fSZ(Se#&BUFgXw>8-juQ-P_tieyJf&Jqt-F7Y`~a-n&NqEvBQ>G}w$}1%akR5_ zUqBeJj@JUr*&~5Sa?fVC*UcV#@QiXkfUHuT<04hujz04ZM|0>@+!#K8x<0sD5aRXmW|3%Gxxm_RS>;)>gAvnG!nOSWlc8O=Uv*o!l2YaAW zi$A(8RYg#CVHYX376@M=dURD>*tP+8C^{al4e&AyYs{{CnlO}k^HdnboDhZMY>FSd zM*RAmsMwSCaX{_(2Qpyl{L6&KHm4aSV=Q)4CPJ0wooY{1YGC?d_HGBxY^)&a-L1Rp z0AybvTgABYff+_`+>7^R=9U{FgST~Rbd_=Ahy3udKXwv1W-{+cU$9!TrBg2uU>Y`6Jo6 z82Fk_2T8JZo$qJt@v_i$011ARj?PQMdh~adG@2{+%l&~)41eHFFgpE4k#SvIjZM>p zmNHJ!0wK_s02tFAmuHJJ=^(#fOV05GgYqT9Pb#%ns`*@AGg)rDqPXe$S{V)j81svm zw#i2>+i;v9d&kP;w*{i>&uEY5lV^Ke;wJ_8ZoW&BESTD{uozw0cmo3hh6VBN@|WVt zboKj}UIVH4F7CZcFMXWv8chBaRTPM3p3^HXzUa{ND>X$@tOK~Qe%1O&#RCcELcH%1 zcufUJ@MaQVMP<}cEB|}igic-3_2ObGrxL_s6Di*mp7XfP*|RBu_{&*<*ENOU2gb0^ zy9CRV19k?)BJvcO0xa_Dm}P3x!VEZ$WVh@N=s-|rzU+azfY<7s!Y>!BD8pp~sCDw; zTmW3RAl~z*w?OnaY{LIRz5E1l6=yt6#bF)9^6fth5R;vpjqK07tGzrL>f}uRV9&uu zw*V^_Hkp?u%uy8!odbQ=rr-_PBtUlID`IkK8ws2vrXuh6V<~#M#nfe)mB$_#t|byc zOvcE*_u#$og-+JNlID%gI@qKE5sI9b$m>N)($R8QfjarcnvXwX@ z&@M?mPM^T7%>LMMt?o4;TiCxmJs(X#d!B!~@znw+qzBU71a3Y(SAQYZ zmm04)7{5O^04vcq%nyFO5*!5UIXf5mbLV?s(Wub$)J%BhBR}Jf+7S0rEt4^!CrQZS zI{?4mz$o(Kkf0K7>@v3G!`ebnx-&*Hip!*9%L1-V#LOL;aNq{KN{2U8^Nnv`6bI=l z{n(j*?)A(y*&aT#gz0Sp_g*d=x!9sx)wzoJWcjV! z*zPiXshi}IM`E<44TlRt+0{+9tyy^b;IP9e|JnXuel>k_u;|!=&qt2DAgWGDwG7*~ z+NBqLy2+LzJMo0-JfbSnjRw&F^4fsyt$wF}3{!_(COVe-@JUq3^m7P^1PwHLvKG?O zV~ZqReS{n`k<4p2XgsSM4|Vii(cczU|K zCG;lU01eS%Nxi`n0=b0l=@_zOMCj8vn&+j8q*%>O^R6*i+%qZ`yz&9ozNrYS_Dn0& zk8(BDviHwto;=4Hsd~nKW!a&saDO#+{n;v>a5%Ll1Wuc1Xwc^TgEwMcul zzfO%WsT*yH<_GY{Zr_q;5Q>zD4m8XZ?*U4|=-ppjbG#6{8_x*|iOGBsTk02n1KEgo zF_?tq%UJt$bMAf+ZY(5^a55rL{rY)I|Eb_-7nNeJk>&P zgs)9 z5cZ4t$Kei&%h$!DDFp}PqW(+AiQ_$0J2!R)`vv{Bx4u+Y`FzgJQ54HUyMc|2g1n>C zS~SQ)ama*1`BCMDlr>vwzCA zBNqTCmP049qvl-GZ@ztmwfffmUzgFlbDoInMd$lXhuo(uCUCnwHvUAvq^RB6U1(cn zFl`&xP>4D!ujA?b&G%ZA>(gV(Qg;ouni}P7Yr9*INCmnopCJP-Ft5|@D@i@%ZkV5_x%|Mc)|j&7FN;ePx0G&x5&-_h!ld0Bni-uu*GhS5M~^2z zuLoQxHx#LpBu@y4g_}+t1uK8BUI(Q+;Q2chh*6@Q_dd*V2zlw_;h`tyU~ylBF1{V{ z@`~}0A`T9j3XPR`uPA5t@<#w}f(lQ39688_4oYF7cz$u7j}NrXv8O$b0pfu;IGN{Id_|dKA?dM-~GpTOSH3u7$l~)kD2u>-U&>Ly=!Sel5oUsRC9d2EfQ97 zz$Z(2Sfc-ZJkjYxVgD?aE4lUas+?mKSHr5o+6Ya?j|OU1~qLeCa{k9FBUbQiZ{O>1bnH@ADa-Pv@b7 z8L9KRy*v$?uMMAARb#HF&{Av?(=`q1pHdLm_xtsZ7k{EuAlG&qkkI%UFy4^SRcTLA z-v!U{A23zjs`%f=vMZP=#`Dv3yVsR8*)3co`vvqdEgdnax8{kX;GJjY!?FXWN-bKya`wzofMg*^3b796U zbz|}2x{tK84HDDpY>ENS({I=R9Lss95Y?L`VAc2enOEZ(_!kM zn5$=wKU6UmdU=3O>AP)}EyY%8co@)l!_tiNj$gcK?7!e2j|@h3WRytK(b01`(Vxeq zLX@|MQikH_&*1i+qQ50j!Pwz;nBU|!&&cPwthvDi&@zCE%_T``AnTQCNhR-*$7$L! zTC`7>pW@Lk-LZ!rtZieYbR|g6G1a9V6AuCeJ!gLPj0V+=j z-fl)pZpC$qdpLABS*0Rqod7to*kr=`de<1}ETzr^RyEwRd!>L*XZUe(I)9$ zSz988FB&B34Bh&Ou=yV55OaiK6MZ_6OuIZI3;P)?e^re-oWFVcXD&1$-3udRyrlWk z20O5Ms(s&0B@gQJ+Zn?b_veOzlxFr(J9VPS0ImdL9kHoTep|Sb>%A&YCRs`?&BWPxWM9 zqLt@PJ&W zY~pK)=}O>MQt`#rExu0aVS05}t?zC;h}9y&db9G*+uRA2yVc+syhKcPA=Jmz@5AYj z8HU)^Wu6h8nX4qgU^!3hv#+uwozXd;XWKMQD}rVZP3RYJ6zS#L?K7W$Ajq2cWZZS! z(6j9!KFr-)1TbY0>r}@YVoQ}E6C_2zdue7YINb%>|59$VaWe$a_9af_gyoE*)Hmrv zzK-`2QskkhC$@L(qXK(y^++RC+Aw$lnhF5V2*_moqScFgnr>xkb$^^*+EzQ&x?Q#m zZJoi6|2`eq7I3;26tdHqZLqam7gG_#4&IURv#pgCQnENVQ}B2awOuCE`nmIc*#N8M zuSx@l^FpG9)IFIuOjptMD z&l&*AMihz*j(M^ujTaP90ha_k!%qmCrOu}~+9F1iZstN{WO%=J2CY^u^74XcF1+x8 zN}sfC_3}45SObjVclAb5FQU=oPxv^~WCaL*Zkm(j@sz+Ez(y16tGmA{Dz(4LU*IIg zM~yeeozGYqOAU>Lr2@@0`>tD(!~b^45*K~F_N`Z<)~iv=o1dGc>w8s?F8$eI_ExZwY&CYW1;A@ zQDxR+b@^QrBP99t9tok`xr<@vjbWV>k#}}M2(0Wy%2P@VBSH@BXXfaV$%r`lucGRr-|ND&&93A2VStP5e@pQox4W{Zl)|2}ip163ytF7gayu$$!@(9H zrYrBoO9im5yo#!3nTMUOI6NJ2xJ7lXK9uG+W9^pf_BdKUOY)puEU(m1vCDI>Ox`q# zXBXKVv(%cBu$P`%$?^^n>`YL-0AJ#;$|%3bLxPkR}?Q7cuxuRGRr0I_z6u@BTjd#W5Bx`6HN&g#YACkydygBLbCVAqA~+(2!u!UWI;dmOGJMKc zcxbI*xwGEJWiR6tKC^|`m||a*(;d;!!9T-~;8y?D&BMpD$Vp0sXmXJK&f%o{y7=*y ztDli=vgyHL-|Xu{YC9*E^XLa({UMSa=#5P5!{#|0jJ%fjDSgJ+SZg?WRHN0nZ&rA> zLAG^KXy5=}I$QX=_s5?+vaYOH_7LM99}6`T4Rle`^hm`cMxb`JDvnblxge`V2{G3d zQOyxO^J1dZ-@#vA3tfKF??V&ci^~NFK;;vP0Kj$bY?MridG@&Pt;-+%6>sXb7znl* zZDbP*FnKFmZ*A;YD}q@F2aJD}fgYK@Ka7qj^%#)=pq*|{3e&g0Tpt0we@hUmCtaWh zbYB!XuEv(cY&*Xh^FA%lKM>An4>1&hC9@{M{>-T}OP_@za?vkFkFJ6(3Em|dfKxF{ zmZA{io(QkxD2nR(cwK6llI^ndA$OFvjWrCMHHtW!Bd%l3; zqg%Qzt#C0&s_|w0Q~7vri{5G#tvxA?z>nIf9F`!>?g9AFW>!PD%rvRb15aI%JGrACGq<8ibAQ5gD^c3 z>?{k@`Po#9^w&!cGLQLKoYQ#r;`Qi`8M z*xsf-?(1qKEbNojtXz!>MsX?l0gHg!P8c~pu#P_$RXqmR`QV@B_^+l6qzNkxSss(4 zbquzZpzmEe0b+9Uy^5us-@a*LJm;4LVlt+*Zd!D%Q>r^&tgx+vD@q9{wTWCIT?&4a zJ7Cu^UWU{gSJ&}K>bPEUqYaz8oG?lVsO&J#>-E`N(#c(>6uYi{t4Dh5c+PbO+<_(f zFg5NW9sNtSjx3j|c;&voOt#05j$NjC({w^VoQQ$`yefZD<5q*mt_qU8iOQa{VCoFl zF=&scv+oN&kfPLuhJ=7MjtI6g*vg`s189l}ORCFQb!qqSTITGR3gbn$&&0kF7`^hE zHp=RL`Z7Ao>keK+w$how4^`w8_0mA;`a3s;``GEwkl&z%lV$Ww`}GN%b}@MIscqrK zmltAXsEd(&rL2k%pZ}F#X3ou;`9gl(shMJH7;LOg-%l3}de!DXbdG3j^SQCQQlT2> zo$;&-JyPJBz2R07(TA!n4NK0x>EhqT7QFa0tNRbfiuHV>c)+d-fV{kP=#LGp-R=tx zS&Nh3BZAbVO5NtKSwqBK$6b#TXfE9Pr>8;;(x=^5w;t&7zDTks*Pc2OVN$afw0F^$ zE9g%f5{@!Rm z+ars-0I2;vDm1QV(nE6czQ%R!uoU~0xnfV{>(4_OwEPb0?}}_%1Q?{rZjrfH{GVt0 zR`%2l0eXiESAPc~v2~XrAwt}vdd>*mEB+fg8nc`xUu|ZNcsH_^b~nD*Qu8$YpZMkH zgrwJq=vS{5fB(5Lsg1^cWXjRfwDIs}VXEB-oNnF7if-NTAbCp>jOPvxv!;!-Ma~64w50E&>2Y+yO2#|o z2UF8*QVR^wtqx>*rvd8s2P(ndVdXW9@mDy&`_1 z3VpzR!l<1}^y*jZ10@4VHjap$A*pCIB{|+6msR$!f#_Vt7(_<8tU;)&H+vfx_j58f zSZ6!V%TpWms<)0)ZpUz@PYaSKf?hdKD+Icf+h(1&v9(q@*XnYeFdbC>0D96lrBh*D zsaKBzZunKnyU;kBrA<^jY{^ci!e`HQy4lRBNFPM)`AZKAJEBvy#Mth=KABl z6tA^lPxYn5NN)>epc1_xru)OP)_&twNY04-zSSDdHr}CIElg%y7Y*Lfc&eR?| zaxH{W9o^DwM`!ayP3_xh&v1H-N=s?dg!NX4+5S~OlA_R+VHx`OGjVP#@y844M-U#h zix%;9vWnAFTYb#Ypx$cCpf?|2i)G35VG^2+7-(Z|&0Cf|`5@~&$);-8~~VJ_Lb20s_YH!Okdek3MTH?qgHokqLajz73u zb80DO`>mC~EYR&(nhVML0d;nQ){V6#UP_UEPhkT9`im6znmMJm0mr91)}HlBF!PXK zYue$ORI!HuAgAu_qRL}YMpTGkVLGepiNq#;khy=^d_c@8E5Fi9@@ksz%q|D-pF>j2ulr@b0=Q%y(=k?_v@sgAG?5EkKo5C?5@6?a05Ye&wL(mq+_OKA zS49(dKT!9hRPs8y(s}tfPBOvE2QN(>+1o83-157fKYvPVOddAM9vJAQoV;EHfW)5( zp%*PR04+zBybO^M{VLC8@0e-OoNRh;N$a68<%v=zt8~uXGsy(RjWe2lb)!NfU*NL| zGE?>>qlzP=O=&CLp$ET&?ZrldZ-!DgY2k}}Zd#I7j=7ue;hQN1BL{RI&66C+jdBNG zUtiKO22EuOpO$5TAEP9Rll=UnCY_%nVvM67MU~B1a$3`0{Z!8Sf#D;L(URp1^0O23 zSyi0!MZb$-LVtvvtroo&c!%!l?Heq&t1tZ&oxT_!>F7`kd~I+%sKp)}N;=%}q6!%~R$*XTK=yX+&lQL-Wn!-F zo40eA4VB*Wot*|WADj3?>gKwOc6g&NSCw{O=Y)2|dj&A4h^)cXX|#g=g1(iOetRfg zo+s7WwfQOpTug$=(c4kq(@iLSfL)*iQAUz`v~>L(6HM!9D$O<*Q;SG1qrYD^<@`EN zz{g119J%rGurF`4ZYbyXrLliczB%1R-oa)TNk5$orNaUj)!OK3ZWUv3|AGyX@g?vQody$Yt9Ssd|qk=`5I(z1)J%n*plMZAazA7 zU`XAzwdsRNNQF7~V7zmP0v~9ZfGm20X$QT#J#3QwR3DS4I8#eSUyos-JKWxZf&s0b z>%iXkTYZ6-!&jU=$kTN9fddK)b_4}Fa< zDB;!djxRSSPSHG0QPvx$x%T;^#JUUNFhIZ!gt3?x!svkfU4y$*WQL9u`aw)b*?~UG z#37q~S_*#7pArB~`T^Q^euZ9Qh-*&v>PRTMD+Fm!4Oi2Kiv_xOMS86v*x$L+ z3L1llrt{d^5+u%z4y0GdRVNk*0T@Z4vMn$lk3M<;SHJk)*stlD2u{azZh0@*N7-x84=~)PoHfkOs&>TUVn2KI3>AKA8=D=H7(KDbFY^_QLO5lqR^f{8~7_Kurkic>bOlSXqi`OjIHePHAv=? z7S`WbJ-N(ukwT7k;pQ5smIfP|KjCHAIn(#V9!Iz_Bti4WDi8Wu2iQfLDPDP^ALQ;hehcQ#C!FC{Xn+heo;(Jn2_p+>U3S!MBo6mcE@uXLZfy zK0zk??S~D@S^~HC{B)U9lLtw5>_B_Zh|RlTka|uFCHHlw{>rH+Gp5Ja_Sr3s{VNK_ z$I?VK^`@h>H&&DX0@1iAj(gy}3i1<#b8=AZfr}jx)#=q))p-S}feR9pNf|9?^tMCW z8=1yesL#~{kH=VP-aI!Dd`)XvbJ=!U0Sp<+h=0+8v1mj+uNy!9!68Ppv(-hJ+bj`rPAv6N1>PxZ#WGChf~bW4vHtKAyx6>J>6m zNIvenyJ;UPRjXu5ytbP@u$X#|EK1hC8kV3pfwhN290EMscq7zF{DlqEMxzDI+x zPDjAb>ZmJTwSrHhmmIUW4f1ZlhC}yK=bAVJ2!m72dZQ*OTAyyx3TMK zGd2_;G~~a}X**DOWh3ThVrWQGs<`b~gR31JL* z?3T)E)S`|~Bwc)f325RVT`ZbW;CS)UUSX((`0Duw>r6GM9bu`*+`%0yog6 zkAR@Rfqn**0MYQT!H<)gltK(L0hzwUI?dmlqAM3LLF_DmpOBl}LeP-7U0=NIXF?1y zJb~!@IN;cVGp?Ov*V%HLa2I#>%kF=EOFWb=LOXuvGgLG3BUNBFinA}u&b%->{jNNB zq2N6e&iD|7P7J;0VccsI8f!YHi@%@iFIJ$#K$$24kbnEF{rf|ln0TD~kB0{?S#M7L z``7LO=d$HD+P`afI^Nfuy7&nY-e3E_`DecT^L#Y52>>~J{Uz|uMS%svFDv6D*o3;*#(fn1P=7WCKeUwI&Wvd8{U{h6N$8nU0*DZFg*{_lVm z84%qkuhUO%e_HY+B-DS?WW#`POVs(iu+h+DCLYTJs!4Tr?3@0eIZ}1^wnW-zK{Q2k!18%!jdhq2|if%RUPOF zD=JVh!CMS1ie*-0dn~wx`hId=O=b3#Nz0FBt5LdQZ;PK>Mu$yhIhBq)D1C3vf|kg2 z0BU;LgyAoM@%*~g)uPxRGoS>pf6^Y)zjf%#}MW)+ z%o*OOp{@Z%L`RDy-pRNu`KB6ePK&G5Wi&qIge^KdwrLafr|IRP&(E+-8tS)8_mz2%s57JKt4o`)M-&K%=3VzIbe&n zwr(~slc&s`K|v^{OiHuKCABsgo2mjq5x4AG@a<65u%`%b?APpa0pU)=df6c;+|qH7 zaN+l3FIMB*@X!&Rq`Qeq&9`z#luM(`1<)^Y9q54xzIw6~fF$eb({Ve(fq!lqW1s)d zM?oql-e#~}J`U`|zCu=yxnx|@Ne`@c=hE$$6S?qnfcd{;4zlj1z%PqxrRx#zbEp+H zE%ITi`Nih^6UZermxK4iXoDGM2xk%djpgGLPK1QUO%P~ zao#SrukQ0v?`xA|mVVUb=%UeP-&tp(Yc9vFl{+*>JM(Xe~-Hlis18DP`qF=_qecs;HWIbr(~$YBTaJYO!x|J@OMwRk7EbZ(eoC zn?w6He+J4Vd+&=0aZzihfzbomGD3R3ElNsY7WJI)Sj1VS_!y7Rw#zMQ^YMVf)F`4B zweq7IDZ}Rsiue0aCUZiAW46Pd_xO99UAb%gExi^Foq_%OfNQ~I^ZGa3l%M7)=TGK7 zJzHY~o2O{rlz%>nFbgb9z#G&#j#zOekp20@{ZZO!dye(8epZ39UPtf0r7@;YXec+xOZ5xoHhJuT?=FEaN4IY#XaU0Mw16=Pv& zlLDRl$rloB)TiFu#AvshYx6LZJUrSIAoKc?1BD?c_BK#8_FR?OAd4BfQ;fcHV`UI7 zb$b9sG$hG{hID779uONkLHoBGCM;IPEf0g8-kd&p)PvyL$u~C*oT78lfd8bmn_p-h zd!cI}Oo!1U`at2@>i*E*`KEqCSMY;U1FdJTKGa3YFwscTre+%w&)+#)sWLu(-Kfz$ zt@gs(t?Nrwcg?J6XdWLZn*rIQ?MDw+dp(_y_s5hMM*C0`@!)5=dt2^nmmOV!PFyA>?M@OP6V`bf=BS-u+{3(%~;i_x1fQB4%495}gcnYzv}hE zQEt})Cztu*WgP!M`P)DT?oiQEBU)xnhlQn@HR6JR6PjC0h&E+X@bvU zuSahqy;n{$AK1ki=z-WouUm;#555|#xXv4?D&>s&l7>l#zj{!QtF#^>q7&cVXXrL# z)X0*&p(=;B$w!4qsBcwv{)q~YHiBti9Kz>zvmM=?a?oX zS?!}LNjs3i$SwOLz*nyafGJXcL}NRP`Mx@51_6Q#>P zjK}ld2vsvB`bka9CyVEr8~24|+mEW-%W=f*&ebT1cVYe_;kgGV^q~PIlk7_);squs z6w&QTEyWgE*xXnPNoknb9Dx@0TZgI&f)Iu+(&^9U7R68uI1^0Z3ULDNz8rmpmXA4w z(7G00Bmo<3_wcb$E@m#{H}d1hlOczvtObLSK&XyoNk0Y*glt< z>?wq+A$pWUD6(=i%2jGHt8p@`HP~w{j1oHfP7rR+;JR$_A6&&UxUS}%4dcEK}R%yffB0q~fG8iAcW+yf8Qd zoAznt6+lGJ;Og{8sVX+ujl@MHTNM($^2x2C9@q7jix0GAymarLnFA1T#+6lofP?-B z6o;o7mn-}4_{XH0_IuxG05SF%RuTgl>+dF{F}|8^Az!21y8~XgWQv%sO^Wma^f!kw z(ga;znS0+#=jhGAd|+v$kBFC|fDx%>a_EZz2yb({z$waEOWUhGQ~14Y9@+5jZt3Q& z>@E?N?BV@%86m8utf61bGw{TeWy2o#hMMfp(9c8VTRS+6bskDB2jy>haxC~xR{AN$ zRjEED$K4AS3xkX7RNkk&OAXRg<-QuTR({ofvIzZQ-2B=4hf31HEojK{7mx8e6xw$q zk+L-zra>u(Bi-8Nq^7pB%P1L?uiGZJ!=uHpr33zblX*Ta7&Z69HU!ysB;`T#zs?Q#q@A`OqJ1ud(Dj`7A9z_TYhZXa0))*HPDt38MlprXaDC_Rb zVV9Oxj|u=A>M5s%crwxg)0$b%&q)BVroR ztxpZB_<-E*7DbpIBDt(=$%8UX9gxdoZSC^RmU)&}5#YC5B6g9?SV8_TFugai2jg0@L8$%cMD~Z#{h%jD zwSO(!F19q!e)X~c`73#AH$wtEn$cWR8`^A`Tsl6fqd5k+W3Yvu!hF3NYOZBSRfDoO zh=c*WXTYJy>2HXw#~u*{ST3Vd0)x}zOqe-?Miv!~(H-3jt%}g8mLedkG`cCN?Af`mCbNC0A`mkzwR@sT(*l zd}B%@2RRo6XltE)dnc=XFut;Yw>-W0RkiKY7UZDI6J#kVOE6RHD)n7uiDs!DeD@u! zq*RJP6pnsh0NXa5$ZnZuT-`R%Exjlp?(6c*gNizQ4Fy$s2LNq-L?L2=GVEKz6 z4$oiM-RBDEueZUSHo_t7%h!cuSdmebAwVVho4w!1vU zEG(c*?AeP7V%}?(v%wNB@pIaf{<%;#L6%RfJL26nduZjN$34H#9sd|7b4KY;yM4z?Qflzd(& z;NU~dU3%c&kWE!j&-1)77Q_7r-(Kvyzga)G(CId4Ei!WEgYOM3v%?+vA5tIy)1kTJ z@%dxTsXHG}kgUU%;71-vt9%H@WtO&6))A9{ckWFAnOkX{{)I;k?A;P)CJg*8q6jU0 zH=^4)w}j({Iqx^8w2CbG(XI>vg~N;Q?*LwQG7Wk-Je8J=Wd=Io*@MM1=?C2jFKKD$~dot%^4c{5Dd!XK(-3?smcab9%;C~ z3c&|9kzPLmFm()FP&%XNm8zY3{jbC&Pm2gTEm=Rmj@47sMKXGfezKJH%+}xW}G$^62+L8K8I?FD!_{U@~`>TRQzvCGssuM7vYX{nE(R^&{ z=R@qAKIA~=RO?HCJVFFqS5S)S&oQA8?1uNe3Iu=rg+U*?;f@GV+P(qtZ(5qS zh!!!lpB><#r*r^DDk}BPecu|KnP+apnlr+E)q+%#tgWsYU`|YO!OyM(dz|>rkj$f) zE8^*6b%A&N+w35ad75PbQ12&m>k~hc-jKY24RP<+?fwO3XwlNpY!ydsI-0ETBgTaI zh?*ZQtwKnj_;V{K>Dg7^Xrhvhg*2DdpLGszsL@sCI`wyp2H)%p)3-85*DReaOQrFu zbPAQ>(T%(7(e?=Z28hHNE&f;nze&vYe52DBxY6Bp7b(Hdjox-*ODNiP+&=rbw;lgd z>gFb21juo^3Dk8f)l$`sg*lIuO5SO>_L?4w3P(GJ7deL+6NW-VqF1Iq&F~vJnsJhn z7-GFX6PlF)9C9IKLr8ZbqQ!HXdem8lnu7% z7NPyB7;AofSKDutK9CeNPx`FjjFP;}k0ARmEKHDyMyM1N43 zYr#3OLGcEwe`p+heMWqidO_-TIa9FQkLbfu&rfLwJGKM0T+e$YCR6AWPf>M^Uf-F< zO%)+NROaBVB11#$!dv&Z%hrA>-pTO^d)nD6tQB2DmBsYE8Ui?dkn$1)%x1+oGMtcd zKcSY;z3Ji`{FCI%V!`C%3<%OPGx`$s@_KI!$}+Lc<2qv6)DW9@tfK@gLvD1XOWNq> zy00~Il6K=oQW52j`|o_b;m2H~M}Azv#$8VP@A7(_vf8({xKS!fR|~b$x}U|g2j6pcKv=cRlfUMq5AVKDEb^}Uua zC$-&mk>ys_2XjSqe69oFq4JRvFtzy&N#FWN_krrEc0VferAaEwh0^@!bH4A0`FG`G ztAuGRTNcsH^`d2;2Q*3(&1KNRxeh>cptf_s zg!~KDx2#zle}4TNNC3k5@$Ct!bi%07oX{Bm#RR;G`U34CMM9)4zD4q-w0-Dz$Qn^C zy>en2^adbL&-{x+Q2yA<@+Ic7PO7xtDv&Ae`yn7HOramxRq*DeZ_Ur6`FxzVu8ZaY zh{)Or`ui^g{w4rHN(c4*1DP(PNZS~dYu;l!J`7vN3-`FD0JIpnC?04pMML-h6tMq) zm1eLOUcci!j<7k{rFR92oKzpR!o{4%0woTMPHNahff}~|Up|4!%m5`i46sXy|Cm$J z$mRlt>+K1E%L70%*Bb;b(s6%q&cY`xoq0U|Hy3*X+y93A0YDD0B$Q9QbJ;kgz?r61 zgAD(73L;Zm_9gUe!09QU{>@X@Otu!*hs=N36=Q~!N^hDV6k6(d6WiV4gCK6L-@9Rrs^tx|AHBr)m+xu$P-AzZjhrY zTxaK6gJI$)xM#RpA52x24Z+gK*7YViWs>|Klt-f1HLr(@lG!9z-RNTy@_?Fm%*QJK zxpVH=7Or!o(Ca8xoZhB1B*z1K-10XL`>#;|ULmvP_FFF?@x{6l@4~U$M&(xKTceJK1~ab8w*&2>b77?3YAZh(0%g{kcthTVnwJKf&PTEE5)%J*=!%DBxi}S3b*LeTb_cUz37@@KCT-8`_j-DRsi4wu{S&IiM)!ZVy#D56K1EPjlT4?bT|WJo9YOXTaE_Zu01;yF7tMq*~4P>$n+ih`VCD z9_Y*l-KIqM``jD&g@5r3{ZxN}sB8bxkht3^Opw#a$qf8|e1;m7lEk#1B_ zysvS}u{pG*)Ht3RMfsPY;gV96PkO;5&s`dwT zH}?q5<>*iea^N6sz6lNCFANW=*6WrIFH79GcuzM<5uA&zU;HyC0hU-rvEqTD*exa_8h z40F8_wi?+|(>#2+Bu~9ag2}v--V>1wFI|9R+M%dxiS6zfA!}M!n=bn5fOA2>eLWK* z=Fa$sZ|@%@UjX>4qB|2x_vzGszS1kU(^lg(BDOr||NOmA67Vw_S&Gl4Z$zu}@$8$M zwYBsKFe>w;@uN)t#A^Hg!AM;l}@A{;G0&D|1i$8x1e=nW}1L55?u?50I`=| z=UYePMy2^aSD)LnIe-ETRbAlj%a?!+1fmU)s22D;hKs13zATR>{>L@~YL1-8S#8HE z>BO1oMifbd(puE(3G(YSzg~QypijbT^CMr}QdwFI--Li(ZuhN)3c){BO2f=K(b_)f zVBAe<`9(}hUARjp&^X7?Qpqha75Sj#hUF9U*1}+_BN~FwFV}w2D-YD-B zmYyDC#ZP;NfqM5Q#w zTm`0w61|F@Mm<$b6AU`B`uvk#XgljjXs?|y*|X5 zGr&Mjh<^PafT{x8zsvH6Vg@W>>4dJH{7s-mZp~eVQ%wj5d;>0SgQVOMozj_YbkXeE z5noqVquJfi;I|9jOM6$pG61SD`hOVl$CJ(V@`;g5AiuZe8YkL3K#y}7UJHm#>VB)v zzI;v&Ggm>Ujcg7^JNuH+L$f4{<6zMT7Ry@=+Xvq3=V@HTPY%7`T8~Zf3opLgn|Usr z6s3=z$Sr|cfB!`U^)k{pWLflV`D@_03PxbJL_`)PuDg$Leb7|cD}UH$fq%>TjfN(R z132`4CaB4JxsN}F2hR7(kjszqe#rqx^xe4&i-U{!2N-SX73t|=^{YF9(~mstOV?4y&S`16L7Vs_$4b< zH*x&dMqC#h4zE1ZHLKvs>-w`<*2*^6839&@@<37gdsRqA}nT^o1C17yCv z%w@SmN}ee zyi+t_!)N31%IGbrZ%4ny9($vXAm%7Oo<@rF5)%feiN_{xIaH7=$#`jsj>jD0Cvqf0 zHKxGvL9?IJ+b5cYUx3(Y{uekp?TLs=y+|i9-DJPFV@I-nxepDtsOSO!$Qm7=EW26Q zP<7VCKU|mN-Jh_wSHy{Vm_Ly6S#0!7RohV1pEoxFw;q32pVrt6pKp>;t_#$EU|CAIJh&Q^k7^XtT*by z?)WXB=EBbXKYzX;>6irQt=I6}O^oa#48wRqZ^|`7wi~FcuGVGJk;yN2W&KPUm%k$3 zM_>et!F?T5+v3F=K2l>@YN5iT)?*%(4#?h>EciTf!16=kKfoX`AMu5d>h^j8Y%J4j9=*qCFL~u7|)#%_FBz-#dXAl`*ZtuW^`=iZfoS-5jg>^yqkk{O@)O| zzoJkY_nBk7WceoY@HX(T;79+-gTD@a5gVX@Lp{o_FP=OyMN9Hb5R1udMJo;s+iqm+ zxN&jM5YFoKu6+{^U&C6b>$aEBU~Ftn)B8tbd4EnHyAUMPan-8 zr@w$qPVPxO?)?awjj=qObgr;xKzp^?oX|7e>yRP|RQS4I{as;Ut>HW(%NVuDEYD5r z`(PtnZGZm-XNd2iE)_MFIcO$nkeoASbj&FHh}V%rZ~KkYW9TvDy{FBg*A^0B@t=$Ft~su;QJfnb8zI=Q&TC)3S*g+MBNrXNC{2zl0qcwjkIChF z(9--;2GYoXLcsK?%%Vq`*MrQhZ&ehne$YVXwbIXyogxMCJQtJ-rC+%5<5A{Db5@LP@1 zJbFwA+o`J^bKDknFyfxCeCyd$gkce}vZ{m#b{1NV8pR*gcU@~N;ICLQ(P`&#P7Rrm z`EJ%Ohw{?QH0!Mm5iz2oZQz)NltSm2wW`-`Rp>Max^bIa%|xyf-7xL2)Z$}#bdq7& z*0vgSm!n_^wN^}qtZd_?jY30msM$ki7|A6*L7%>k9n(#PyT;oAlVHxq9~j5|{h7fb zUj;e83WcTW#+!D&01w>r$YV4uqG;c@w=<+CWOjQy?DPnHY9ilna)>tsRT5VRV32s@ zQdmw^L>$1Xu&~ZCbFVA`vA)l!f&6H!W+hC8xcRUo;WEWsKlGDD- zT48+*Y>?*+>oA*TVbdw4SvlF5-_p{Jswz!}3(UJS3qkdmq z^U{ImY!@bE?q}2? z(yGb8c0fcf&cUE^5{BG+j5?U=+v9zBa3U|&TscSc?K2=B{98h(+)5k7@u>$AtJT@` zQaazMG&yn&w}szEIsT!>%@(JBY>kK2DBVHq0U1D@%+0cb=xn z`ZrIrefEb?6BT(am|L#6gT9)oOUlF8pp;+HN(6+iXxf6g%i#;1^Xq;gBkks(%fKj$^fW7QS& zssshrzmT$-uX;rM5daaGYsqo<-PV!4us(K5GSPkrbmQQFk^kmEC9&06 zpv)0m@HCfv-@4SdTFPL}KUdJSuA{%uGU~?e&#_fYpZdi<3?E&!oLvC!!SU!}uUrbO z7{0pJXhM4RFpJ7zR6?*51h^e@R`C@dndEUYn<5*-W1TnOZ}ed|tnTuwjCkATAiu7B zkNmfImDw6P0 z{9)n8Pk4|r+Yrt%;k`;O0AM@)UzpxCNN7m@UE($8HpYi`JRmVzqDQU#AI@&UgX=me zreY3niwGG|LP)mEbb!DFeUY6uvIsj!73BkUEiKcyHDr|#@Js*DM^U)2{p#bcr-{XF ztUQvuOv|>lF_O1y$3#G{ftE=@MThp579)!UIge?sEXaiwm|P)x`HW_X+pW0Kh+(X@ z17QU?X*saV3InA*<`>kaUp;-u);=ygA?+6}q7a>-88ILOp4+PYz%gVlzCU44{pqVQ zP8Oap@R)82SPiZofo+j93H!efsZZyggvhLaIprmUjUSVM8be=PoYbMjsDr%P`wL@C z>nled0qPG0EJC7Vic+6luY#a-65+MXOte2(zT9cA(2=enc6HouJYaxMTg3~y5}ohk z1O_Q-G7*ZflBrK=G2!wb+%cmV!HV`EzKe2kFG|M|UOz9^aT`2j*TdU7Cs(B&n)$Jq zn3Ncu&lDe+IJxA-fZE#ia+sNv4g8dgCOfUjCu zQ&9`G_@qi%^st{CnA&<+Mq1=F@s6cC%r3Y- zx%vLdqL#M~9%0Y&V4%b7rJm)2$mo0RiWtmUMLY?xE5Czdu+Qhcm(Zf9Dz7rv+-7WK zp1d>rZVP|BXC$B^qBOVQqnCKQKiy%%Sn2>TC*@ni1-rn8-_ekr1y1YlHzI&G0G+1@ zWlx7F>Fm1GQ^7#T1v!f}LkR*Kcc;E^F*=tk5x^UM;k-M_QV%Bxw9j7xdF6kbF41=S z-lR4nDnx9s^UcJAhTASAc9eWoH%Phtd^|L!48u6b5MVPRzT2WW2k0;t8W@kJLLF&W zPT0NOcX#(gXg78Vb(pkYRrV$f)1)%hqiwMZLhJn^Y_X&p7e+*~5Mf=;6-61T=Ok2; ze4}@4W{2--4C*fu)O@^hb2kt1lX6!r@Gp*q62{C&xYpm4JFCWM6e{q5uGZP2IXR;~ zS2@gWvpk{t+6_^}Nn7290(c6B4;C!-m;2A$$%q}bszZ3Lqm&gq0MjIly|7hJO7j!C zrv`%!u}R^NlAhXvPtC8!>!BN+2$kU!l*%~#%&;{)y<9b|s901x$Sh&zbuePkqbejV z>4`u{vp$}YYyFOHjK=ZSutldpBiCogD)1arz-kO`uJ@uk#qmPtmBUqrjM*6I^c2?Q zyX#8QGyRoVTU=tQR{|8wyB~hsq2(Wo4JH^m+%O`>6GJ+4PSIqs{VOcoW4c(N1~;+U z7GxMB)f@}$1T}?rG`c_hez|6Nk^xb)?K75R6?ZPiQ|(rKWXm%V!__VS57k1iLzKQo zxAoR6oaxtw1Zv2}{)B!rv2{TD@culhn+#k)G!Gh1ImJA~aC|}G2#;4|4SvWFiCPc* zAWaTK)waE5PuIhJ5*nr+*ayNrWH>vnfKw=|EBX5dKN1C?UW_lUu@XkhO0GWAJoQ!w%5CUbn_2srA^Es`(EblCw6cpbF}GF=70LrH-{pew=i z5af=014sVS;59fpU>ciw8;CI-ru|OqpW*6zGr;Lf>TFOaOn2*FHc4D zvKuqL$esHXfZg-p+8kb#>3wpxyT@O!;l1a>puv&McdAJpnQuATSoe-X*AAqUMx|KB zT2Hzn8Y@7FaQ69+BmS+ES@kP!EyCsnXOckA+^3iaM=DE2ZJ?7bpf^_W52g=aB{xg$ zBjw@o-XlLleb(E?S<^_eR6H1>)`CACgWvd-CT)>V{iiPh`7ZQZE9$}tlw1DQ^^&2L zt5`E^CRmRIn0A35{%^%kaK>yzK05-lU+O%U&g<^gnptWu$@od-YCU>?`h>N=a2j9G zsvF*BSAY?82$?^2nni=Y6QZ`-_nQDo?hPeqqvF47y zL9xTCA7h_%uXL|`Ug=rsUFln~2i}19pC0{86CHOv>D_SQMgZ5g?)2Fog@K0&&pB74!q^nObef*h%a_JJ}Yq(FSwBMT42(Z49LvNCb z4(CFj1%mr$W9|pzxoGB}#U|j9D4rS1?7(JjmDjC#8(W&JA(!L!xwqA!3oC+8-_7A< z6CUCk2UG(O>AzImpttD`>f7ku%u|6`1RX{H3dQ>x8E?0-o})pH?W{RXk|}?8JHMR* z1}QVw4Cx=;Hm*>iN}kxyif+kOj(t38kpg&_#~B>&2VQ+|{W9LpP}D#VcVT?v2C=8 zXbl#}8u7&>9ZQc-7!6TEo=Nil;Mm*1l52j>IZ@62aaO@Ajl4os*}n6&UB#9f!1UgQ z4o+;HQVYLdYoiY&Kje_0He9y-X$(-^O8lfq?H23Q z=uEyw|)D=pczW3mO$(jx33}%TE(}hHTSGKt|ymaApP<-Zwsku?yeS)xUJf#jY2dem%vj{^!PC z*7JI2Abg_gL`3<5K2>SU6cWm?+kKidp86jYWCnL5E4tdnWCi)^EZIsb_BGhwmE+<# zDw?zDt$Jsvs#)NSqov`3sAR$%H$l)_S|Z$Es|H%xc0g9IxItXtv*zveq{6S$kRF2x z*NIA1Qx#2(S6;VvEl=2|Oq(!Tc&>miXpA;=zxKiXGATifC``7{3%82YG+0NwWIn=F zw!=a!-v9caXA-zrfcTL3=+7R8^Cj%ZpKu4W2emB1@_(%J{Y?%Pr@ePp#`8TZT zXB*@C1RHrYmtVXcjQ97SGS-UaCd{i|YdJ4FI|e;8@Rg%5z90KWB#h7Ftq>gZbXfh* z*wD{?=3F}CXMk5KD``F}7Zr&Gu=gI>TX z&8>x0<<6>fhqiD8DyXMWiQ95DQAw0slHDA+DeP9k#UZ_)*QI21$B1kpt#1CjJ^Ydm z$K%rgkM+u?zFHH$r`hj7icnwnR6AMVlg7t^lmA!ZfrI^m2)Y?!@WQKWLW)K%U~u4g z%+g1qS!~HCamg?c0KkLs-u!OFLt^$YFY}AB zQF!;$Mnl17?d{FY*pP~8-|k0{k#*CrA6peB6YMRP2O#+oga9CruAXvSP}g|noMeC( zQ#b3Eklx*9YiWi|d0gN5I;aH^zm)DL9K0skX>d-cfp|d3tejO9_Uqe+-~^`8*UIb; zCE4XW#mpF{YY}Fze^yyq+~-(^$Q(+Lj*nOR?YD5&Ve9|YErBQGMEhR!iSGk$iv%3QXK>=}nR}p@V9MiZ{utRR= zKj<6_#7!kmN542-bNw~}aw$PGhtJQPg59NxZpq#ETKd-Rf~-N(r2X3j3~ zHA4ZI@)N-dP#J*B=0w;%Y>(m_Ow5=Vn=M7zx1R`TSWa4d)SH@=5&fcrit=A@GFh5o zH`xt6iyEgPK_sw58RO>ugw7HY$#-z=uok!*++wDI^o$clEkeF9Babhm_8N-!&f9=o zl4hingq(Wf$Yc2A9Bj2)t(&JRt@xE!IlJQZ9HPLqF#;9+9i5ibEHwox%-NWJGM+ zUZ44FdGeCs`9Xt3h`I?*TDtn;jXm`Aeg=p+zec^u)84Te zV_oh*MUnXqJMS*=l4wXWmhG9&kkT2;ykIL~?W)l#$wMQnyf*5BuX<`))L~jhLxLiA z+UtaSS;FFWZu~p1JK{^&h){)8&>R#6RW^5cP~`RyZMNDzJpVv-fpB~D;^(p#(m|00 z0rS!6*1D2`R`4s^=gyv3^4RpW9btN!^l$huF!4HK5if0$f~DZ;N>NRTIk{hRuJeMy zqC(h@2ovIOTiJ`sW>DD~fGD=(Kzskob>$2FH7C<2C2?deVSkJ3o(SH5;PjxH+{3io z_;ca-{~Mn_?g&aw^_Hm2)Bn=$qrosJ+lBvP?LS-pW<8AvhjGl+->bQK@Jvu9luUR`kKctMXO(i-Gl-!u$vGvHw)Y@q zn?{ba>-epZCUHWgL6wn0vYdb%K4-PFZ1n8UjAt2f8L8j0wl9E`?T^5P{&#}F@%;=V zez2zT=wac{OcCsiO}<-yLwx-P?k818t1Fl%Tu-=n{0Ru^_R1PM!gPb@=eizv7j*@X z9__VfcqSX6OtOi@R9Mtpy;BWhC5u3WQ!Z|9;n6mSFj4zp3qvgU-bf~?lOW{#Z|h-` z`shyogE#DyI|uyt9_L-<>xcXN?mkbFIq88PLul>UEKwy~A^e%a!Sr-LsF441{7IR6 z*Lm^69%Zn<|8$jNSnt4k!RVo1X4KaV_Q5i776SO77e78IRBxS0@28@cQRh0Q^Stzg z00&j!`NX3i7I9YRO8LzMRvZ#eb}+chhg}k#mdc#oqng>M4+eK%hVXp9{t;shVN6@S z>drpf{vs0~AsWZ5{00UZAsASxzvNb?d*B^^q1})l6*wPTsmDE_L*v zP-aN5It!w*N3G1&e(U)fqgsxOW?;ZU5Mmp@u={nv6P1{D0~xU!4&yd-N77KsL~OU+ zHD&yI4Y&YqywE9veTx^HdIHXa^jb+zt_EY^8`||YK^!ngO>a;(FW23axdch_e#7^Zq!%-imN%E1QJbDo z@3a!o=Nypcz%c~zHOFl6?&n!v(ID{#4%OCXkJuew0CA@gF|rzatVj6 zmxL2QNnGbdZW_WUD2LE14k#r)X&%2He!XDD#4tm`i(nnpFXN~}S$J)mRM2*!j$dm$ z%l!5{xj6E(6G2mNKR-b@;RMO-D=YQXIOndIKvrmjs$P+^t%1809)OD^(k+yKT-lqV zo6ic0<8E1wH`d=Rzpda??P|^MoY0?mhD3SqR0+!Vs>@1s259GdK}EpnET6>XUU2FX z1it5^bx$^&{4({Aw6IZQKHL{G`~&!-V&dz#&ZD8;xnNL9wPGgmUigdI(pUw^-XbMQ<5x+71M#^+4e z=UPOSYwDC!;%cgiTc9B3z?0)%i};`&CTCKae<>-1$--Qd?O*#8!EtEe!rK;>wQoq6 z8VRnu8Ws}@80xAOccn{pkOXWVX!IdFAdi>CF6U!Pt}l@mUHVeXM<+BV`7f4_AP`oG zUwt_cfYaQOo1Lu{RN^iLvg@tQ=}@zzw=8L+6OzyNT3OiEk>~?{y|Q4S$8v zN1TfaCYE+?L@k(#<`H)4RC?E(y$;&wsMdRqJ62Dm^UjaRs zD$7}aJoa|w&LcxB|5``BLrf6q9~@@iAqIyDzn6`Lzv|&C-*F9E2&g2UGZTbg3&Iqu zk63tCwEm$`8xq4{GbO7?VL;L|0$SBumOE5&m<6%Orz@U}zCX87hlIisDi`E|)`)3< z!wY@3(zcX$>!3RIoZV)3Zooz}%?Q*Xf(dm{No@EFdZD!_UvsU?Ut4F8=8dbY{ETI8 zC~<%5{I0yKFkf~q!w4y{UP!^2xt534Tg;`UkL7v_8CGPL%{}wYW*$+7E2ApNaw7aP z{IPhvWb7;{N)t-ix4HO<{70IFH@{+2V*%vjr2Ap4-WdOvAYDhJ&&+KzGB@Yj`Pt1m zoi!NK5hJ&Fe+Vt4C= zLuQ;}K9MXFY|&@|{3~%(TT$+HW%|mc>ScOf~tIHKK< z+Ed&pTe1jo)j`=N1mjQp>fdn%24)p$M?bir_6I3{n0TynNGNdxvw*s*U6Wk&To1HKn&n3sZ4$D4k89z)O|nGA zCd}-+ec|jlqHiOf$HfPGiIH%Q?7nyo^4&RbgxTZnn{KKcC-gd9Y$Qy0A$FD02 z_qx}kPx+T#&7;!3@vL6~&zi2Z{hS4ZZQn!Ild>&z8KvwCt3M4#xO|rV3}kPqDv}*JzGQ;ao~* z3FOKxhYk|@a}B0_thID~;zfyC9(OXA#K{5W+EGz6L0093xLXQ6Bpr9fv-KmC%Sx9E zV8`CTSi8z_)V-6EX07oj4qNs6o{4I15&-zEd?1Z*Uw9-~(uC|zh8sH3PXgsx({t`9 zJ&m16@*(*kAJ%6GKYGQT-Jb!ux+@Xbdd%1%ItAr%wFc3;O2<*}W8I-MvEKY^u&}Op z#B|N{Yp>YtwI>AHhoQR*ihk*$aflGjjH%wium%o)%KEoZqGMr`=y=7l<(#nI_&4NLhx2IlR^H&oV#Pv&P zry6P4Ux!&t%d5`w?VaW0KSQeGNZ!>6e+pNxlma18zC#3-A=p1Jvd&`jeoFGdwe<3kTiY<+2F&!hCg1MUHeAdPV!M8#I+^ zQ!D1h6jCx3sma_$EFqBWP9euGZ?zxVp4MDXRSuR7c&#Msecm|-k5Rznu&ovAk0{j) zJCT&#&6I>GY{5pBUW#@?`{F|)SqmX>ACQr7h*pM#;9#?BZ9_M$)2}+s+eFDzDT5;| z3g}rGMq?+;w#bS?3f06r!q`K4B`fnfbvT;3D~#kE@kg)%Ub~9Q_R~K=!oD zuWrbV({5-ErJ%1J^m5+ZQdny5^*VY`0=Q(R=^v(c1ARtoniW6$wYG=WXltqkSl5E2^ z#fPiUEdx^AEqsXF3(eCWmRY3UEw=Ak0#f90a+pkCkF%#nXlXr0O1d+NZ+Ra!-G|Sz z=5|G0fNEs${4a~-$|f=@1+_TK0su$_&pd+qwHe)O;5H;LE0qa@j|4F9!Z-mqJ)>;?9;0RFzS#ZyUscwP$xTI7m{)x6L--nO%i3HH9~` zFF4U%dHc|Xt0jbbfdUvej8qfS`WP&zJdl-`_7Z(#tNtaL9!0IEp*Qeehm@9dfzbt5 zfjC4M9HlFr4g*gPrr!?!RTu(y^3>(4V;PAB;Su5h*_Pj5!t+FSUuDqBzU;)orgi^^ zv#y+L_^k7L7eb+m1`S7J9$`pd_}5h!VV3m zg%k=ZddYzh_Job05|nr%-v>ve&u|i?=Sw0r&S=~w^5VK6IXpMtIWfDihm;Tk(pmkox>r7xVaH~fVd^U z7D70(xmbzW0-i~+z~1!Au@5H(OAV(F7b}~MDx}t3DX-iSP&i(wen5M#7?L2)KUeo# zkOgWQK1jPilikL8vc8gh!Jf6kr%gnA*`G_qNH#sDl#En2^o_M%aHE}+C(Iqz)=?Ow zCF?nJWW~EVYYX;kl-qDRz(a7bH4k&HDY+FrfAaFi899Oj{kcC^Aj!j&->3bdyak@S z4nG#>J@W^0CxwfoS&a14KHs?RbTx1Uh#=p)#?1$F%u=t(6z7n zo7bKxRo@ptNv20ju~ep?iQ-fu_Y`Y{4%7<%rl~d#p31(&ji~?9$wU0f$pfT9^aJ)Kx&a7tlSz@06XHSS^ z5*4gwwRPTMBhNeFodJJ@G1-Oe_kyaF3Bo(<4CEW5r-TJ{@Ptv#3dDt>mv$x8O6&Fa zjPWSOXc96){th`!eS_QK=Z#?y3!Q6?Z0C%eQCH!rRbS<$m-fJ)yhvI85aYGbjES^l z6Cg=xfjf+TN<&~}T1~eD2d$SY_-3S~Llo=C`m#kzYd@DKBH+9bUmewFhL`v5blcJy zkR5Ej)*?fy8=(({KVHKRKM2HaX1Ji1L-v>olPZ~xDz5l+sk%(1IQ=AF z8?Rft&WEZM-GMk$c++nt`23a123H?X7v8%XmAzNCV%TTZ%gYs5AMp%hUp@@cMhy_C z>g!_@J+sc-6iPD>@$K}an^kI6Bdu>V0`7uCWBKmKolZ0at9s+^e@GcU>zBTe#^o+g zu=fzzt*9))AXi?mo;r4dIxAO(o?o>#cA^KtAV%*NCQu(2_K&Ifk|-OSke+uTt+=(V zb)4xJJ`NaVPRXUFlJ3o`t)Kko!sA=XS&8$L4>96P4wc*=Gm&b3y?0i8^I&LitNU=v zd%4d3r8DNo}aVxo-7b2Jq{31q8|sX^ZTQ@@+e|ixR8eWjDyc z+e*1Q6x`wx+!N*T+sMsJp#Ws30q3(PT4zG7gY?xUSRyB(C#QLw9rF`m+qsK=uV z7;222Rv>l$SDss`VEPvJWBT^nN9JaFtzep(T3I1xzpjyidZ<#P zm%$7E4pVL>sXMFMqd+%^59gv>WV!4f5ujf=!MiEdCzPJWh^WtOHCx&z<5yTm#MzKX z<-^uTWAOOeIF7TfZrWGe!}RQ01m3aFq)530hCp(MINFc7?}U@G*Yk*_p#hqswOI5H z)%`g0qHRpYM{5G;%uBZ6VC~M2x(s1J^v4DnQN=(Qn9MubeaZr? zjp@Hi?1QhugsU* zp^+nRIgZDPK;syf*KF_ZF~%F2ev_u}hCFA!r}#jQ_dz3L-KT%O4uA{x$IrhX2cPq= x*Gp?gjMMkmgFpZGAO80&{f`(LlLwb~nQ58nZ=XEIIx$A4`gI-EqHETF{vX|qbZGzp literal 0 HcmV?d00001 From 823a2ea22ef3e6c65acd8328df9d501d9f8af8e4 Mon Sep 17 00:00:00 2001 From: Danielle Maywood Date: Wed, 30 Oct 2024 10:06:10 +0000 Subject: [PATCH 36/42] chore(cli): drop 'notification' prefix for configuring email auth (#15270) Closes https://github.com/coder/coder/issues/14644 --- cli/testdata/coder_server_--help.golden | 72 ++++++- cli/testdata/server-config.yaml.golden | 51 ++++- coderd/notifications/dispatch/smtp.go | 4 +- codersdk/deployment.go | 178 +++++++++++++++++- codersdk/deployment_test.go | 3 + docs/admin/monitoring/notifications/index.md | 56 +++--- docs/reference/cli/server.md | 145 +++++++++++++- .../cli/testdata/coder_server_--help.golden | 72 ++++++- 8 files changed, 536 insertions(+), 45 deletions(-) diff --git a/cli/testdata/coder_server_--help.golden b/cli/testdata/coder_server_--help.golden index d5c26d98115cb..cd647d0537a93 100644 --- a/cli/testdata/coder_server_--help.golden +++ b/cli/testdata/coder_server_--help.golden @@ -106,6 +106,58 @@ Use a YAML configuration file when your server launch become unwieldy. Write out the current server config as YAML to stdout. +EMAIL OPTIONS: +Configure how emails are sent. + + --email-force-tls bool, $CODER_EMAIL_FORCE_TLS (default: false) + Force a TLS connection to the configured SMTP smarthost. + + --email-from string, $CODER_EMAIL_FROM + The sender's address to use. + + --email-hello string, $CODER_EMAIL_HELLO (default: localhost) + The hostname identifying the SMTP server. + + --email-smarthost host:port, $CODER_EMAIL_SMARTHOST (default: localhost:587) + The intermediary SMTP host through which emails are sent. + +EMAIL / EMAIL AUTHENTICATION OPTIONS: +Configure SMTP authentication options. + + --email-auth-identity string, $CODER_EMAIL_AUTH_IDENTITY + Identity to use with PLAIN authentication. + + --email-auth-password string, $CODER_EMAIL_AUTH_PASSWORD + Password to use with PLAIN/LOGIN authentication. + + --email-auth-password-file string, $CODER_EMAIL_AUTH_PASSWORD_FILE + File from which to load password for use with PLAIN/LOGIN + authentication. + + --email-auth-username string, $CODER_EMAIL_AUTH_USERNAME + Username to use with PLAIN/LOGIN authentication. + +EMAIL / EMAIL TLS OPTIONS: +Configure TLS for your SMTP server target. + + --email-tls-ca-cert-file string, $CODER_EMAIL_TLS_CACERTFILE + CA certificate file to use. + + --email-tls-cert-file string, $CODER_EMAIL_TLS_CERTFILE + Certificate file to use. + + --email-tls-cert-key-file string, $CODER_EMAIL_TLS_CERTKEYFILE + Certificate key file to use. + + --email-tls-server-name string, $CODER_EMAIL_TLS_SERVERNAME + Server name to verify against the target certificate. + + --email-tls-skip-verify bool, $CODER_EMAIL_TLS_SKIPVERIFY + Skip verification of the target server's certificate (insecure). + + --email-tls-starttls bool, $CODER_EMAIL_TLS_STARTTLS + Enable STARTTLS to upgrade insecure SMTP connections using TLS. + INTROSPECTION / HEALTH CHECK OPTIONS: --health-check-refresh duration, $CODER_HEALTH_CHECK_REFRESH (default: 10m0s) Refresh interval for healthchecks. @@ -349,54 +401,68 @@ Configure how notifications are processed and delivered. NOTIFICATIONS / EMAIL OPTIONS: Configure how email notifications are sent. - --notifications-email-force-tls bool, $CODER_NOTIFICATIONS_EMAIL_FORCE_TLS (default: false) + --notifications-email-force-tls bool, $CODER_NOTIFICATIONS_EMAIL_FORCE_TLS Force a TLS connection to the configured SMTP smarthost. + DEPRECATED: Use --email-force-tls instead. --notifications-email-from string, $CODER_NOTIFICATIONS_EMAIL_FROM The sender's address to use. + DEPRECATED: Use --email-from instead. - --notifications-email-hello string, $CODER_NOTIFICATIONS_EMAIL_HELLO (default: localhost) + --notifications-email-hello string, $CODER_NOTIFICATIONS_EMAIL_HELLO The hostname identifying the SMTP server. + DEPRECATED: Use --email-hello instead. - --notifications-email-smarthost host:port, $CODER_NOTIFICATIONS_EMAIL_SMARTHOST (default: localhost:587) + --notifications-email-smarthost host:port, $CODER_NOTIFICATIONS_EMAIL_SMARTHOST The intermediary SMTP host through which emails are sent. + DEPRECATED: Use --email-smarthost instead. NOTIFICATIONS / EMAIL / EMAIL AUTHENTICATION OPTIONS: Configure SMTP authentication options. --notifications-email-auth-identity string, $CODER_NOTIFICATIONS_EMAIL_AUTH_IDENTITY Identity to use with PLAIN authentication. + DEPRECATED: Use --email-auth-identity instead. --notifications-email-auth-password string, $CODER_NOTIFICATIONS_EMAIL_AUTH_PASSWORD Password to use with PLAIN/LOGIN authentication. + DEPRECATED: Use --email-auth-password instead. --notifications-email-auth-password-file string, $CODER_NOTIFICATIONS_EMAIL_AUTH_PASSWORD_FILE File from which to load password for use with PLAIN/LOGIN authentication. + DEPRECATED: Use --email-auth-password-file instead. --notifications-email-auth-username string, $CODER_NOTIFICATIONS_EMAIL_AUTH_USERNAME Username to use with PLAIN/LOGIN authentication. + DEPRECATED: Use --email-auth-username instead. NOTIFICATIONS / EMAIL / EMAIL TLS OPTIONS: Configure TLS for your SMTP server target. --notifications-email-tls-ca-cert-file string, $CODER_NOTIFICATIONS_EMAIL_TLS_CACERTFILE CA certificate file to use. + DEPRECATED: Use --email-tls-ca-cert-file instead. --notifications-email-tls-cert-file string, $CODER_NOTIFICATIONS_EMAIL_TLS_CERTFILE Certificate file to use. + DEPRECATED: Use --email-tls-cert-file instead. --notifications-email-tls-cert-key-file string, $CODER_NOTIFICATIONS_EMAIL_TLS_CERTKEYFILE Certificate key file to use. + DEPRECATED: Use --email-tls-cert-key-file instead. --notifications-email-tls-server-name string, $CODER_NOTIFICATIONS_EMAIL_TLS_SERVERNAME Server name to verify against the target certificate. + DEPRECATED: Use --email-tls-server-name instead. --notifications-email-tls-skip-verify bool, $CODER_NOTIFICATIONS_EMAIL_TLS_SKIPVERIFY Skip verification of the target server's certificate (insecure). + DEPRECATED: Use --email-tls-skip-verify instead. --notifications-email-tls-starttls bool, $CODER_NOTIFICATIONS_EMAIL_TLS_STARTTLS Enable STARTTLS to upgrade insecure SMTP connections using TLS. + DEPRECATED: Use --email-tls-starttls instead. NOTIFICATIONS / WEBHOOK OPTIONS: --notifications-webhook-endpoint url, $CODER_NOTIFICATIONS_WEBHOOK_ENDPOINT diff --git a/cli/testdata/server-config.yaml.golden b/cli/testdata/server-config.yaml.golden index 95486a26344b8..38b2b68c24de1 100644 --- a/cli/testdata/server-config.yaml.golden +++ b/cli/testdata/server-config.yaml.golden @@ -518,6 +518,51 @@ userQuietHoursSchedule: # compatibility reasons, this will be removed in a future release. # (default: false, type: bool) allowWorkspaceRenames: false +# Configure how emails are sent. +email: + # The sender's address to use. + # (default: , type: string) + from: "" + # The intermediary SMTP host through which emails are sent. + # (default: localhost:587, type: host:port) + smarthost: localhost:587 + # The hostname identifying the SMTP server. + # (default: localhost, type: string) + hello: localhost + # Force a TLS connection to the configured SMTP smarthost. + # (default: false, type: bool) + forceTLS: false + # Configure SMTP authentication options. + emailAuth: + # Identity to use with PLAIN authentication. + # (default: , type: string) + identity: "" + # Username to use with PLAIN/LOGIN authentication. + # (default: , type: string) + username: "" + # File from which to load password for use with PLAIN/LOGIN authentication. + # (default: , type: string) + passwordFile: "" + # Configure TLS for your SMTP server target. + emailTLS: + # Enable STARTTLS to upgrade insecure SMTP connections using TLS. + # (default: , type: bool) + startTLS: false + # Server name to verify against the target certificate. + # (default: , type: string) + serverName: "" + # Skip verification of the target server's certificate (insecure). + # (default: , type: bool) + insecureSkipVerify: false + # CA certificate file to use. + # (default: , type: string) + caCertFile: "" + # Certificate file to use. + # (default: , type: string) + certFile: "" + # Certificate key file to use. + # (default: , type: string) + certKeyFile: "" # Configure how notifications are processed and delivered. notifications: # Which delivery method to use (available options: 'smtp', 'webhook'). @@ -532,13 +577,13 @@ notifications: # (default: , type: string) from: "" # The intermediary SMTP host through which emails are sent. - # (default: localhost:587, type: host:port) + # (default: , type: host:port) smarthost: localhost:587 # The hostname identifying the SMTP server. - # (default: localhost, type: string) + # (default: , type: string) hello: localhost # Force a TLS connection to the configured SMTP smarthost. - # (default: false, type: bool) + # (default: , type: bool) forceTLS: false # Configure SMTP authentication options. emailAuth: diff --git a/coderd/notifications/dispatch/smtp.go b/coderd/notifications/dispatch/smtp.go index e18aeaef88b81..dfb628b62eb86 100644 --- a/coderd/notifications/dispatch/smtp.go +++ b/coderd/notifications/dispatch/smtp.go @@ -453,7 +453,7 @@ func (s *SMTPHandler) auth(ctx context.Context, mechs string) (sasl.Client, erro continue } if password == "" { - errs = multierror.Append(errs, xerrors.New("cannot use PLAIN auth, password not defined (see CODER_NOTIFICATIONS_EMAIL_AUTH_PASSWORD)")) + errs = multierror.Append(errs, xerrors.New("cannot use PLAIN auth, password not defined (see CODER_EMAIL_AUTH_PASSWORD)")) continue } @@ -475,7 +475,7 @@ func (s *SMTPHandler) auth(ctx context.Context, mechs string) (sasl.Client, erro continue } if password == "" { - errs = multierror.Append(errs, xerrors.New("cannot use LOGIN auth, password not defined (see CODER_NOTIFICATIONS_EMAIL_AUTH_PASSWORD)")) + errs = multierror.Append(errs, xerrors.New("cannot use LOGIN auth, password not defined (see CODER_EMAIL_AUTH_PASSWORD)")) continue } diff --git a/codersdk/deployment.go b/codersdk/deployment.go index 6a5f7c52ac8f5..3ba09bd38d1a4 100644 --- a/codersdk/deployment.go +++ b/codersdk/deployment.go @@ -926,6 +926,23 @@ when required by your organization's security policy.`, Name: "Config", Description: `Use a YAML configuration file when your server launch become unwieldy.`, } + deploymentGroupEmail = serpent.Group{ + Name: "Email", + Description: "Configure how emails are sent.", + YAML: "email", + } + deploymentGroupEmailAuth = serpent.Group{ + Name: "Email Authentication", + Parent: &deploymentGroupEmail, + Description: "Configure SMTP authentication options.", + YAML: "emailAuth", + } + deploymentGroupEmailTLS = serpent.Group{ + Name: "Email TLS", + Parent: &deploymentGroupEmail, + Description: "Configure TLS for your SMTP server target.", + YAML: "emailTLS", + } deploymentGroupNotifications = serpent.Group{ Name: "Notifications", YAML: "notifications", @@ -997,6 +1014,135 @@ when required by your organization's security policy.`, Group: &deploymentGroupIntrospectionLogging, YAML: "filter", } + emailFrom := serpent.Option{ + Name: "Email: From Address", + Description: "The sender's address to use.", + Flag: "email-from", + Env: "CODER_EMAIL_FROM", + Value: &c.Notifications.SMTP.From, + Group: &deploymentGroupEmail, + YAML: "from", + } + emailSmarthost := serpent.Option{ + Name: "Email: Smarthost", + Description: "The intermediary SMTP host through which emails are sent.", + Flag: "email-smarthost", + Env: "CODER_EMAIL_SMARTHOST", + Default: "localhost:587", // To pass validation. + Value: &c.Notifications.SMTP.Smarthost, + Group: &deploymentGroupEmail, + YAML: "smarthost", + } + emailHello := serpent.Option{ + Name: "Email: Hello", + Description: "The hostname identifying the SMTP server.", + Flag: "email-hello", + Env: "CODER_EMAIL_HELLO", + Default: "localhost", + Value: &c.Notifications.SMTP.Hello, + Group: &deploymentGroupEmail, + YAML: "hello", + } + emailForceTLS := serpent.Option{ + Name: "Email: Force TLS", + Description: "Force a TLS connection to the configured SMTP smarthost.", + Flag: "email-force-tls", + Env: "CODER_EMAIL_FORCE_TLS", + Default: "false", + Value: &c.Notifications.SMTP.ForceTLS, + Group: &deploymentGroupEmail, + YAML: "forceTLS", + } + emailAuthIdentity := serpent.Option{ + Name: "Email Auth: Identity", + Description: "Identity to use with PLAIN authentication.", + Flag: "email-auth-identity", + Env: "CODER_EMAIL_AUTH_IDENTITY", + Value: &c.Notifications.SMTP.Auth.Identity, + Group: &deploymentGroupEmailAuth, + YAML: "identity", + } + emailAuthUsername := serpent.Option{ + Name: "Email Auth: Username", + Description: "Username to use with PLAIN/LOGIN authentication.", + Flag: "email-auth-username", + Env: "CODER_EMAIL_AUTH_USERNAME", + Value: &c.Notifications.SMTP.Auth.Username, + Group: &deploymentGroupEmailAuth, + YAML: "username", + } + emailAuthPassword := serpent.Option{ + Name: "Email Auth: Password", + Description: "Password to use with PLAIN/LOGIN authentication.", + Flag: "email-auth-password", + Env: "CODER_EMAIL_AUTH_PASSWORD", + Annotations: serpent.Annotations{}.Mark(annotationSecretKey, "true"), + Value: &c.Notifications.SMTP.Auth.Password, + Group: &deploymentGroupEmailAuth, + } + emailAuthPasswordFile := serpent.Option{ + Name: "Email Auth: Password File", + Description: "File from which to load password for use with PLAIN/LOGIN authentication.", + Flag: "email-auth-password-file", + Env: "CODER_EMAIL_AUTH_PASSWORD_FILE", + Value: &c.Notifications.SMTP.Auth.PasswordFile, + Group: &deploymentGroupEmailAuth, + YAML: "passwordFile", + } + emailTLSStartTLS := serpent.Option{ + Name: "Email TLS: StartTLS", + Description: "Enable STARTTLS to upgrade insecure SMTP connections using TLS.", + Flag: "email-tls-starttls", + Env: "CODER_EMAIL_TLS_STARTTLS", + Value: &c.Notifications.SMTP.TLS.StartTLS, + Group: &deploymentGroupEmailTLS, + YAML: "startTLS", + } + emailTLSServerName := serpent.Option{ + Name: "Email TLS: Server Name", + Description: "Server name to verify against the target certificate.", + Flag: "email-tls-server-name", + Env: "CODER_EMAIL_TLS_SERVERNAME", + Value: &c.Notifications.SMTP.TLS.ServerName, + Group: &deploymentGroupEmailTLS, + YAML: "serverName", + } + emailTLSSkipCertVerify := serpent.Option{ + Name: "Email TLS: Skip Certificate Verification (Insecure)", + Description: "Skip verification of the target server's certificate (insecure).", + Flag: "email-tls-skip-verify", + Env: "CODER_EMAIL_TLS_SKIPVERIFY", + Value: &c.Notifications.SMTP.TLS.InsecureSkipVerify, + Group: &deploymentGroupEmailTLS, + YAML: "insecureSkipVerify", + } + emailTLSCertAuthorityFile := serpent.Option{ + Name: "Email TLS: Certificate Authority File", + Description: "CA certificate file to use.", + Flag: "email-tls-ca-cert-file", + Env: "CODER_EMAIL_TLS_CACERTFILE", + Value: &c.Notifications.SMTP.TLS.CAFile, + Group: &deploymentGroupEmailTLS, + YAML: "caCertFile", + } + emailTLSCertFile := serpent.Option{ + Name: "Email TLS: Certificate File", + Description: "Certificate file to use.", + Flag: "email-tls-cert-file", + Env: "CODER_EMAIL_TLS_CERTFILE", + Value: &c.Notifications.SMTP.TLS.CertFile, + Group: &deploymentGroupEmailTLS, + YAML: "certFile", + } + emailTLSCertKeyFile := serpent.Option{ + Name: "Email TLS: Certificate Key File", + Description: "Certificate key file to use.", + Flag: "email-tls-cert-key-file", + Env: "CODER_EMAIL_TLS_CERTKEYFILE", + Value: &c.Notifications.SMTP.TLS.KeyFile, + Group: &deploymentGroupEmailTLS, + YAML: "certKeyFile", + } opts := serpent.OptionSet{ { Name: "Access URL", @@ -2432,6 +2578,21 @@ Write out the current server config as YAML to stdout.`, YAML: "thresholdDatabase", Annotations: serpent.Annotations{}.Mark(annotationFormatDuration, "true"), }, + // Email options + emailFrom, + emailSmarthost, + emailHello, + emailForceTLS, + emailAuthIdentity, + emailAuthUsername, + emailAuthPassword, + emailAuthPasswordFile, + emailTLSStartTLS, + emailTLSServerName, + emailTLSSkipCertVerify, + emailTLSCertAuthorityFile, + emailTLSCertFile, + emailTLSCertKeyFile, // Notifications Options { Name: "Notifications: Method", @@ -2462,36 +2623,37 @@ Write out the current server config as YAML to stdout.`, Value: &c.Notifications.SMTP.From, Group: &deploymentGroupNotificationsEmail, YAML: "from", + UseInstead: serpent.OptionSet{emailFrom}, }, { Name: "Notifications: Email: Smarthost", Description: "The intermediary SMTP host through which emails are sent.", Flag: "notifications-email-smarthost", Env: "CODER_NOTIFICATIONS_EMAIL_SMARTHOST", - Default: "localhost:587", // To pass validation. Value: &c.Notifications.SMTP.Smarthost, Group: &deploymentGroupNotificationsEmail, YAML: "smarthost", + UseInstead: serpent.OptionSet{emailSmarthost}, }, { Name: "Notifications: Email: Hello", Description: "The hostname identifying the SMTP server.", Flag: "notifications-email-hello", Env: "CODER_NOTIFICATIONS_EMAIL_HELLO", - Default: "localhost", Value: &c.Notifications.SMTP.Hello, Group: &deploymentGroupNotificationsEmail, YAML: "hello", + UseInstead: serpent.OptionSet{emailHello}, }, { Name: "Notifications: Email: Force TLS", Description: "Force a TLS connection to the configured SMTP smarthost.", Flag: "notifications-email-force-tls", Env: "CODER_NOTIFICATIONS_EMAIL_FORCE_TLS", - Default: "false", Value: &c.Notifications.SMTP.ForceTLS, Group: &deploymentGroupNotificationsEmail, YAML: "forceTLS", + UseInstead: serpent.OptionSet{emailForceTLS}, }, { Name: "Notifications: Email Auth: Identity", @@ -2501,6 +2663,7 @@ Write out the current server config as YAML to stdout.`, Value: &c.Notifications.SMTP.Auth.Identity, Group: &deploymentGroupNotificationsEmailAuth, YAML: "identity", + UseInstead: serpent.OptionSet{emailAuthIdentity}, }, { Name: "Notifications: Email Auth: Username", @@ -2510,6 +2673,7 @@ Write out the current server config as YAML to stdout.`, Value: &c.Notifications.SMTP.Auth.Username, Group: &deploymentGroupNotificationsEmailAuth, YAML: "username", + UseInstead: serpent.OptionSet{emailAuthUsername}, }, { Name: "Notifications: Email Auth: Password", @@ -2519,6 +2683,7 @@ Write out the current server config as YAML to stdout.`, Annotations: serpent.Annotations{}.Mark(annotationSecretKey, "true"), Value: &c.Notifications.SMTP.Auth.Password, Group: &deploymentGroupNotificationsEmailAuth, + UseInstead: serpent.OptionSet{emailAuthPassword}, }, { Name: "Notifications: Email Auth: Password File", @@ -2528,6 +2693,7 @@ Write out the current server config as YAML to stdout.`, Value: &c.Notifications.SMTP.Auth.PasswordFile, Group: &deploymentGroupNotificationsEmailAuth, YAML: "passwordFile", + UseInstead: serpent.OptionSet{emailAuthPasswordFile}, }, { Name: "Notifications: Email TLS: StartTLS", @@ -2537,6 +2703,7 @@ Write out the current server config as YAML to stdout.`, Value: &c.Notifications.SMTP.TLS.StartTLS, Group: &deploymentGroupNotificationsEmailTLS, YAML: "startTLS", + UseInstead: serpent.OptionSet{emailTLSStartTLS}, }, { Name: "Notifications: Email TLS: Server Name", @@ -2546,6 +2713,7 @@ Write out the current server config as YAML to stdout.`, Value: &c.Notifications.SMTP.TLS.ServerName, Group: &deploymentGroupNotificationsEmailTLS, YAML: "serverName", + UseInstead: serpent.OptionSet{emailTLSServerName}, }, { Name: "Notifications: Email TLS: Skip Certificate Verification (Insecure)", @@ -2555,6 +2723,7 @@ Write out the current server config as YAML to stdout.`, Value: &c.Notifications.SMTP.TLS.InsecureSkipVerify, Group: &deploymentGroupNotificationsEmailTLS, YAML: "insecureSkipVerify", + UseInstead: serpent.OptionSet{emailTLSSkipCertVerify}, }, { Name: "Notifications: Email TLS: Certificate Authority File", @@ -2564,6 +2733,7 @@ Write out the current server config as YAML to stdout.`, Value: &c.Notifications.SMTP.TLS.CAFile, Group: &deploymentGroupNotificationsEmailTLS, YAML: "caCertFile", + UseInstead: serpent.OptionSet{emailTLSCertAuthorityFile}, }, { Name: "Notifications: Email TLS: Certificate File", @@ -2573,6 +2743,7 @@ Write out the current server config as YAML to stdout.`, Value: &c.Notifications.SMTP.TLS.CertFile, Group: &deploymentGroupNotificationsEmailTLS, YAML: "certFile", + UseInstead: serpent.OptionSet{emailTLSCertFile}, }, { Name: "Notifications: Email TLS: Certificate Key File", @@ -2582,6 +2753,7 @@ Write out the current server config as YAML to stdout.`, Value: &c.Notifications.SMTP.TLS.KeyFile, Group: &deploymentGroupNotificationsEmailTLS, YAML: "certKeyFile", + UseInstead: serpent.OptionSet{emailTLSCertKeyFile}, }, { Name: "Notifications: Webhook: Endpoint", diff --git a/codersdk/deployment_test.go b/codersdk/deployment_test.go index d7eca6323000c..61474a3b77ea1 100644 --- a/codersdk/deployment_test.go +++ b/codersdk/deployment_test.go @@ -78,6 +78,9 @@ func TestDeploymentValues_HighlyConfigurable(t *testing.T) { "Provisioner Daemon Pre-shared Key (PSK)": { yaml: true, }, + "Email Auth: Password": { + yaml: true, + }, "Notifications: Email Auth: Password": { yaml: true, }, diff --git a/docs/admin/monitoring/notifications/index.md b/docs/admin/monitoring/notifications/index.md index a98fa0b3e8b48..eabc09438d7b9 100644 --- a/docs/admin/monitoring/notifications/index.md +++ b/docs/admin/monitoring/notifications/index.md @@ -89,34 +89,34 @@ existing one. **Server Settings:** -| Required | CLI | Env | Type | Description | Default | -| :------: | --------------------------------- | ------------------------------------- | ----------- | ----------------------------------------- | ------------- | -| ✔️ | `--notifications-email-from` | `CODER_NOTIFICATIONS_EMAIL_FROM` | `string` | The sender's address to use. | | -| ✔️ | `--notifications-email-smarthost` | `CODER_NOTIFICATIONS_EMAIL_SMARTHOST` | `host:port` | The SMTP relay to send messages through. | localhost:587 | -| ✔️ | `--notifications-email-hello` | `CODER_NOTIFICATIONS_EMAIL_HELLO` | `string` | The hostname identifying the SMTP server. | localhost | +| Required | CLI | Env | Type | Description | Default | +| :------: | ------------------- | ----------------------- | ----------- | ----------------------------------------- | ------------- | +| ✔️ | `--email-from` | `CODER_EMAIL_FROM` | `string` | The sender's address to use. | | +| ✔️ | `--email-smarthost` | `CODER_EMAIL_SMARTHOST` | `host:port` | The SMTP relay to send messages through. | localhost:587 | +| ✔️ | `--email-hello` | `CODER_EMAIL_HELLO` | `string` | The hostname identifying the SMTP server. | localhost | **Authentication Settings:** -| Required | CLI | Env | Type | Description | -| :------: | ------------------------------------------ | ---------------------------------------------- | -------- | ------------------------------------------------------------------------- | -| - | `--notifications-email-auth-username` | `CODER_NOTIFICATIONS_EMAIL_AUTH_USERNAME` | `string` | Username to use with PLAIN/LOGIN authentication. | -| - | `--notifications-email-auth-password` | `CODER_NOTIFICATIONS_EMAIL_AUTH_PASSWORD` | `string` | Password to use with PLAIN/LOGIN authentication. | -| - | `--notifications-email-auth-password-file` | `CODER_NOTIFICATIONS_EMAIL_AUTH_PASSWORD_FILE` | `string` | File from which to load password for use with PLAIN/LOGIN authentication. | -| - | `--notifications-email-auth-identity` | `CODER_NOTIFICATIONS_EMAIL_AUTH_IDENTITY` | `string` | Identity to use with PLAIN authentication. | +| Required | CLI | Env | Type | Description | +| :------: | ---------------------------- | -------------------------------- | -------- | ------------------------------------------------------------------------- | +| - | `--email-auth-username` | `CODER_EMAIL_AUTH_USERNAME` | `string` | Username to use with PLAIN/LOGIN authentication. | +| - | `--email-auth-password` | `CODER_EMAIL_AUTH_PASSWORD` | `string` | Password to use with PLAIN/LOGIN authentication. | +| - | `--email-auth-password-file` | `CODER_EMAIL_AUTH_PASSWORD_FILE` | `string` | File from which to load password for use with PLAIN/LOGIN authentication. | +| - | `--email-auth-identity` | `CODER_EMAIL_AUTH_IDENTITY` | `string` | Identity to use with PLAIN authentication. | **TLS Settings:** -| Required | CLI | Env | Type | Description | Default | -| :------: | ----------------------------------------- | ------------------------------------------- | -------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------- | -| - | `--notifications-email-force-tls` | `CODER_NOTIFICATIONS_EMAIL_FORCE_TLS` | `bool` | Force a TLS connection to the configured SMTP smarthost. If port 465 is used, TLS will be forced. See https://datatracker.ietf.org/doc/html/rfc8314#section-3.3. | false | -| - | `--notifications-email-tls-starttls` | `CODER_NOTIFICATIONS_EMAIL_TLS_STARTTLS` | `bool` | Enable STARTTLS to upgrade insecure SMTP connections using TLS. Ignored if `CODER_NOTIFICATIONS_EMAIL_FORCE_TLS` is set. | false | -| - | `--notifications-email-tls-skip-verify` | `CODER_NOTIFICATIONS_EMAIL_TLS_SKIPVERIFY` | `bool` | Skip verification of the target server's certificate (**insecure**). | false | -| - | `--notifications-email-tls-server-name` | `CODER_NOTIFICATIONS_EMAIL_TLS_SERVERNAME` | `string` | Server name to verify against the target certificate. | | -| - | `--notifications-email-tls-cert-file` | `CODER_NOTIFICATIONS_EMAIL_TLS_CERTFILE` | `string` | Certificate file to use. | | -| - | `--notifications-email-tls-cert-key-file` | `CODER_NOTIFICATIONS_EMAIL_TLS_CERTKEYFILE` | `string` | Certificate key file to use. | | +| Required | CLI | Env | Type | Description | Default | +| :------: | --------------------------- | ----------------------------- | -------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------- | +| - | `--email-force-tls` | `CODER_EMAIL_FORCE_TLS` | `bool` | Force a TLS connection to the configured SMTP smarthost. If port 465 is used, TLS will be forced. See https://datatracker.ietf.org/doc/html/rfc8314#section-3.3. | false | +| - | `--email-tls-starttls` | `CODER_EMAIL_TLS_STARTTLS` | `bool` | Enable STARTTLS to upgrade insecure SMTP connections using TLS. Ignored if `CODER_NOTIFICATIONS_EMAIL_FORCE_TLS` is set. | false | +| - | `--email-tls-skip-verify` | `CODER_EMAIL_TLS_SKIPVERIFY` | `bool` | Skip verification of the target server's certificate (**insecure**). | false | +| - | `--email-tls-server-name` | `CODER_EMAIL_TLS_SERVERNAME` | `string` | Server name to verify against the target certificate. | | +| - | `--email-tls-cert-file` | `CODER_EMAIL_TLS_CERTFILE` | `string` | Certificate file to use. | | +| - | `--email-tls-cert-key-file` | `CODER_EMAIL_TLS_CERTKEYFILE` | `string` | Certificate key file to use. | | -**NOTE:** you _MUST_ use `CODER_NOTIFICATIONS_EMAIL_FORCE_TLS` if your smarthost -supports TLS on a port other than `465`. +**NOTE:** you _MUST_ use `CODER_EMAIL_FORCE_TLS` if your smarthost supports TLS +on a port other than `465`. ### Send emails using G-Suite @@ -126,9 +126,9 @@ After setting the required fields above: account you wish to send from 2. Set the following configuration options: ``` - CODER_NOTIFICATIONS_EMAIL_SMARTHOST=smtp.gmail.com:465 - CODER_NOTIFICATIONS_EMAIL_AUTH_USERNAME=@ - CODER_NOTIFICATIONS_EMAIL_AUTH_PASSWORD="" + CODER_EMAIL_SMARTHOST=smtp.gmail.com:465 + CODER_EMAIL_AUTH_USERNAME=@ + CODER_EMAIL_AUTH_PASSWORD="" ``` See @@ -142,10 +142,10 @@ After setting the required fields above: 1. Setup an account on Microsoft 365 or outlook.com 2. Set the following configuration options: ``` - CODER_NOTIFICATIONS_EMAIL_SMARTHOST=smtp-mail.outlook.com:587 - CODER_NOTIFICATIONS_EMAIL_TLS_STARTTLS=true - CODER_NOTIFICATIONS_EMAIL_AUTH_USERNAME=@ - CODER_NOTIFICATIONS_EMAIL_AUTH_PASSWORD="" + CODER_EMAIL_SMARTHOST=smtp-mail.outlook.com:587 + CODER_EMAIL_TLS_STARTTLS=true + CODER_EMAIL_AUTH_USERNAME=@ + CODER_EMAIL_AUTH_PASSWORD="" ``` See diff --git a/docs/reference/cli/server.md b/docs/reference/cli/server.md index 981c2419cf903..42ef7f7418b45 100644 --- a/docs/reference/cli/server.md +++ b/docs/reference/cli/server.md @@ -1249,6 +1249,148 @@ Refresh interval for healthchecks. The threshold for the database health check. If the median latency of the database exceeds this threshold over 5 attempts, the database is considered unhealthy. The default value is 15ms. +### --email-from + +| | | +| ----------- | ------------------------------ | +| Type | string | +| Environment | $CODER_EMAIL_FROM | +| YAML | email.from | + +The sender's address to use. + +### --email-smarthost + +| | | +| ----------- | ----------------------------------- | +| Type | host:port | +| Environment | $CODER_EMAIL_SMARTHOST | +| YAML | email.smarthost | +| Default | localhost:587 | + +The intermediary SMTP host through which emails are sent. + +### --email-hello + +| | | +| ----------- | ------------------------------- | +| Type | string | +| Environment | $CODER_EMAIL_HELLO | +| YAML | email.hello | +| Default | localhost | + +The hostname identifying the SMTP server. + +### --email-force-tls + +| | | +| ----------- | ----------------------------------- | +| Type | bool | +| Environment | $CODER_EMAIL_FORCE_TLS | +| YAML | email.forceTLS | +| Default | false | + +Force a TLS connection to the configured SMTP smarthost. + +### --email-auth-identity + +| | | +| ----------- | --------------------------------------- | +| Type | string | +| Environment | $CODER_EMAIL_AUTH_IDENTITY | +| YAML | email.emailAuth.identity | + +Identity to use with PLAIN authentication. + +### --email-auth-username + +| | | +| ----------- | --------------------------------------- | +| Type | string | +| Environment | $CODER_EMAIL_AUTH_USERNAME | +| YAML | email.emailAuth.username | + +Username to use with PLAIN/LOGIN authentication. + +### --email-auth-password + +| | | +| ----------- | --------------------------------------- | +| Type | string | +| Environment | $CODER_EMAIL_AUTH_PASSWORD | + +Password to use with PLAIN/LOGIN authentication. + +### --email-auth-password-file + +| | | +| ----------- | -------------------------------------------- | +| Type | string | +| Environment | $CODER_EMAIL_AUTH_PASSWORD_FILE | +| YAML | email.emailAuth.passwordFile | + +File from which to load password for use with PLAIN/LOGIN authentication. + +### --email-tls-starttls + +| | | +| ----------- | -------------------------------------- | +| Type | bool | +| Environment | $CODER_EMAIL_TLS_STARTTLS | +| YAML | email.emailTLS.startTLS | + +Enable STARTTLS to upgrade insecure SMTP connections using TLS. + +### --email-tls-server-name + +| | | +| ----------- | ---------------------------------------- | +| Type | string | +| Environment | $CODER_EMAIL_TLS_SERVERNAME | +| YAML | email.emailTLS.serverName | + +Server name to verify against the target certificate. + +### --email-tls-skip-verify + +| | | +| ----------- | ---------------------------------------------- | +| Type | bool | +| Environment | $CODER_EMAIL_TLS_SKIPVERIFY | +| YAML | email.emailTLS.insecureSkipVerify | + +Skip verification of the target server's certificate (insecure). + +### --email-tls-ca-cert-file + +| | | +| ----------- | ---------------------------------------- | +| Type | string | +| Environment | $CODER_EMAIL_TLS_CACERTFILE | +| YAML | email.emailTLS.caCertFile | + +CA certificate file to use. + +### --email-tls-cert-file + +| | | +| ----------- | -------------------------------------- | +| Type | string | +| Environment | $CODER_EMAIL_TLS_CERTFILE | +| YAML | email.emailTLS.certFile | + +Certificate file to use. + +### --email-tls-cert-key-file + +| | | +| ----------- | ----------------------------------------- | +| Type | string | +| Environment | $CODER_EMAIL_TLS_CERTKEYFILE | +| YAML | email.emailTLS.certKeyFile | + +Certificate key file to use. + ### --notifications-method | | | @@ -1288,7 +1430,6 @@ The sender's address to use. | Type | host:port | | Environment | $CODER_NOTIFICATIONS_EMAIL_SMARTHOST | | YAML | notifications.email.smarthost | -| Default | localhost:587 | The intermediary SMTP host through which emails are sent. @@ -1299,7 +1440,6 @@ The intermediary SMTP host through which emails are sent. | Type | string | | Environment | $CODER_NOTIFICATIONS_EMAIL_HELLO | | YAML | notifications.email.hello | -| Default | localhost | The hostname identifying the SMTP server. @@ -1310,7 +1450,6 @@ The hostname identifying the SMTP server. | Type | bool | | Environment | $CODER_NOTIFICATIONS_EMAIL_FORCE_TLS | | YAML | notifications.email.forceTLS | -| Default | false | Force a TLS connection to the configured SMTP smarthost. diff --git a/enterprise/cli/testdata/coder_server_--help.golden b/enterprise/cli/testdata/coder_server_--help.golden index b637a0da3f74d..a6398586fa972 100644 --- a/enterprise/cli/testdata/coder_server_--help.golden +++ b/enterprise/cli/testdata/coder_server_--help.golden @@ -107,6 +107,58 @@ Use a YAML configuration file when your server launch become unwieldy. Write out the current server config as YAML to stdout. +EMAIL OPTIONS: +Configure how emails are sent. + + --email-force-tls bool, $CODER_EMAIL_FORCE_TLS (default: false) + Force a TLS connection to the configured SMTP smarthost. + + --email-from string, $CODER_EMAIL_FROM + The sender's address to use. + + --email-hello string, $CODER_EMAIL_HELLO (default: localhost) + The hostname identifying the SMTP server. + + --email-smarthost host:port, $CODER_EMAIL_SMARTHOST (default: localhost:587) + The intermediary SMTP host through which emails are sent. + +EMAIL / EMAIL AUTHENTICATION OPTIONS: +Configure SMTP authentication options. + + --email-auth-identity string, $CODER_EMAIL_AUTH_IDENTITY + Identity to use with PLAIN authentication. + + --email-auth-password string, $CODER_EMAIL_AUTH_PASSWORD + Password to use with PLAIN/LOGIN authentication. + + --email-auth-password-file string, $CODER_EMAIL_AUTH_PASSWORD_FILE + File from which to load password for use with PLAIN/LOGIN + authentication. + + --email-auth-username string, $CODER_EMAIL_AUTH_USERNAME + Username to use with PLAIN/LOGIN authentication. + +EMAIL / EMAIL TLS OPTIONS: +Configure TLS for your SMTP server target. + + --email-tls-ca-cert-file string, $CODER_EMAIL_TLS_CACERTFILE + CA certificate file to use. + + --email-tls-cert-file string, $CODER_EMAIL_TLS_CERTFILE + Certificate file to use. + + --email-tls-cert-key-file string, $CODER_EMAIL_TLS_CERTKEYFILE + Certificate key file to use. + + --email-tls-server-name string, $CODER_EMAIL_TLS_SERVERNAME + Server name to verify against the target certificate. + + --email-tls-skip-verify bool, $CODER_EMAIL_TLS_SKIPVERIFY + Skip verification of the target server's certificate (insecure). + + --email-tls-starttls bool, $CODER_EMAIL_TLS_STARTTLS + Enable STARTTLS to upgrade insecure SMTP connections using TLS. + INTROSPECTION / HEALTH CHECK OPTIONS: --health-check-refresh duration, $CODER_HEALTH_CHECK_REFRESH (default: 10m0s) Refresh interval for healthchecks. @@ -350,54 +402,68 @@ Configure how notifications are processed and delivered. NOTIFICATIONS / EMAIL OPTIONS: Configure how email notifications are sent. - --notifications-email-force-tls bool, $CODER_NOTIFICATIONS_EMAIL_FORCE_TLS (default: false) + --notifications-email-force-tls bool, $CODER_NOTIFICATIONS_EMAIL_FORCE_TLS Force a TLS connection to the configured SMTP smarthost. + DEPRECATED: Use --email-force-tls instead. --notifications-email-from string, $CODER_NOTIFICATIONS_EMAIL_FROM The sender's address to use. + DEPRECATED: Use --email-from instead. - --notifications-email-hello string, $CODER_NOTIFICATIONS_EMAIL_HELLO (default: localhost) + --notifications-email-hello string, $CODER_NOTIFICATIONS_EMAIL_HELLO The hostname identifying the SMTP server. + DEPRECATED: Use --email-hello instead. - --notifications-email-smarthost host:port, $CODER_NOTIFICATIONS_EMAIL_SMARTHOST (default: localhost:587) + --notifications-email-smarthost host:port, $CODER_NOTIFICATIONS_EMAIL_SMARTHOST The intermediary SMTP host through which emails are sent. + DEPRECATED: Use --email-smarthost instead. NOTIFICATIONS / EMAIL / EMAIL AUTHENTICATION OPTIONS: Configure SMTP authentication options. --notifications-email-auth-identity string, $CODER_NOTIFICATIONS_EMAIL_AUTH_IDENTITY Identity to use with PLAIN authentication. + DEPRECATED: Use --email-auth-identity instead. --notifications-email-auth-password string, $CODER_NOTIFICATIONS_EMAIL_AUTH_PASSWORD Password to use with PLAIN/LOGIN authentication. + DEPRECATED: Use --email-auth-password instead. --notifications-email-auth-password-file string, $CODER_NOTIFICATIONS_EMAIL_AUTH_PASSWORD_FILE File from which to load password for use with PLAIN/LOGIN authentication. + DEPRECATED: Use --email-auth-password-file instead. --notifications-email-auth-username string, $CODER_NOTIFICATIONS_EMAIL_AUTH_USERNAME Username to use with PLAIN/LOGIN authentication. + DEPRECATED: Use --email-auth-username instead. NOTIFICATIONS / EMAIL / EMAIL TLS OPTIONS: Configure TLS for your SMTP server target. --notifications-email-tls-ca-cert-file string, $CODER_NOTIFICATIONS_EMAIL_TLS_CACERTFILE CA certificate file to use. + DEPRECATED: Use --email-tls-ca-cert-file instead. --notifications-email-tls-cert-file string, $CODER_NOTIFICATIONS_EMAIL_TLS_CERTFILE Certificate file to use. + DEPRECATED: Use --email-tls-cert-file instead. --notifications-email-tls-cert-key-file string, $CODER_NOTIFICATIONS_EMAIL_TLS_CERTKEYFILE Certificate key file to use. + DEPRECATED: Use --email-tls-cert-key-file instead. --notifications-email-tls-server-name string, $CODER_NOTIFICATIONS_EMAIL_TLS_SERVERNAME Server name to verify against the target certificate. + DEPRECATED: Use --email-tls-server-name instead. --notifications-email-tls-skip-verify bool, $CODER_NOTIFICATIONS_EMAIL_TLS_SKIPVERIFY Skip verification of the target server's certificate (insecure). + DEPRECATED: Use --email-tls-skip-verify instead. --notifications-email-tls-starttls bool, $CODER_NOTIFICATIONS_EMAIL_TLS_STARTTLS Enable STARTTLS to upgrade insecure SMTP connections using TLS. + DEPRECATED: Use --email-tls-starttls instead. NOTIFICATIONS / WEBHOOK OPTIONS: --notifications-webhook-endpoint url, $CODER_NOTIFICATIONS_WEBHOOK_ENDPOINT From df6afd3354082a74ae6d36b8498fe127c4e332f8 Mon Sep 17 00:00:00 2001 From: Muhammad Atif Ali Date: Wed, 30 Oct 2024 03:49:28 -0700 Subject: [PATCH 37/42] fix(install.sh): fix installation script for remote hosts (#15288) --- install.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/install.sh b/install.sh index 257576ae4d57a..40753f2f9973c 100755 --- a/install.sh +++ b/install.sh @@ -363,7 +363,7 @@ main() { if [ "${RSH_ARGS-}" ]; then RSH="${RSH-ssh}" echoh "Installing remotely with $RSH $RSH_ARGS" - curl -fsSL https://coder.dev/install.sh | prefix "$RSH_ARGS" "$RSH" "$RSH_ARGS" sh -s -- "$ALL_FLAGS" + curl -fsSL https://coder.com/install.sh | prefix "$RSH_ARGS" "$RSH" "$RSH_ARGS" sh -s -- "$ALL_FLAGS" return fi From afacb07140a68f10f9507591340484b80235a038 Mon Sep 17 00:00:00 2001 From: Muhammad Atif Ali Date: Wed, 30 Oct 2024 04:17:42 -0700 Subject: [PATCH 38/42] chore: tighten GitHub workflow permissions (#15282) --- .github/workflows/docker-base.yaml | 9 +++++---- .github/workflows/nightly-gauntlet.yaml | 4 ++++ .github/workflows/pr-cleanup.yaml | 6 +++--- .github/workflows/pr-deploy.yaml | 7 +++++-- .github/workflows/release-validation.yaml | 3 +++ .github/workflows/release.yaml | 14 ++++++++------ .github/workflows/stale.yaml | 15 +++++++++++++-- 7 files changed, 41 insertions(+), 17 deletions(-) diff --git a/.github/workflows/docker-base.yaml b/.github/workflows/docker-base.yaml index 8053b12780855..c0a3e87c5fe98 100644 --- a/.github/workflows/docker-base.yaml +++ b/.github/workflows/docker-base.yaml @@ -22,10 +22,6 @@ on: permissions: contents: read - # Necessary to push docker images to ghcr.io. - packages: write - # Necessary for depot.dev authentication. - id-token: write # Avoid running multiple jobs for the same commit. concurrency: @@ -33,6 +29,11 @@ concurrency: jobs: build: + permissions: + # Necessary for depot.dev authentication. + id-token: write + # Necessary to push docker images to ghcr.io. + packages: write runs-on: ubuntu-latest if: github.repository_owner == 'coder' steps: diff --git a/.github/workflows/nightly-gauntlet.yaml b/.github/workflows/nightly-gauntlet.yaml index 99ce3f62618a7..2b2887a13934e 100644 --- a/.github/workflows/nightly-gauntlet.yaml +++ b/.github/workflows/nightly-gauntlet.yaml @@ -6,6 +6,10 @@ on: # Every day at midnight - cron: "0 0 * * *" workflow_dispatch: + +permissions: + contents: read + jobs: go-race: # While GitHub's toaster runners are likelier to flake, we want consistency diff --git a/.github/workflows/pr-cleanup.yaml b/.github/workflows/pr-cleanup.yaml index ebcf097c0ef6b..f5cee03a4c6c4 100644 --- a/.github/workflows/pr-cleanup.yaml +++ b/.github/workflows/pr-cleanup.yaml @@ -8,12 +8,12 @@ on: description: "PR number" required: true -permissions: - packages: write - jobs: cleanup: runs-on: "ubuntu-latest" + permissions: + # Necessary to delete docker images from ghcr.io. + packages: write steps: - name: Harden Runner uses: step-security/harden-runner@91182cccc01eb5e619899d80e4e971d6181294a7 # v2.10.1 diff --git a/.github/workflows/pr-deploy.yaml b/.github/workflows/pr-deploy.yaml index 6ca35c82eebeb..49e73e9b0bf63 100644 --- a/.github/workflows/pr-deploy.yaml +++ b/.github/workflows/pr-deploy.yaml @@ -30,8 +30,6 @@ env: permissions: contents: read - packages: write - pull-requests: write # needed for commenting on PRs jobs: check_pr: @@ -171,6 +169,8 @@ jobs: needs: get_info if: needs.get_info.outputs.BUILD == 'true' || github.event.inputs.deploy == 'true' runs-on: "ubuntu-latest" + permissions: + pull-requests: write # needed for commenting on PRs steps: - name: Harden Runner uses: step-security/harden-runner@91182cccc01eb5e619899d80e4e971d6181294a7 # v2.10.1 @@ -205,6 +205,9 @@ jobs: # Run build job only if there are changes in the files that we care about or if the workflow is manually triggered with --build flag if: needs.get_info.outputs.BUILD == 'true' runs-on: ${{ github.repository_owner == 'coder' && 'depot-ubuntu-22.04-8' || 'ubuntu-latest' }} + permissions: + # Necessary to push docker images to ghcr.io. + packages: write # This concurrency only cancels build jobs if a new build is triggred. It will avoid cancelling the current deployemtn in case of docs chnages. concurrency: group: build-${{ github.workflow }}-${{ github.ref }}-${{ needs.get_info.outputs.BUILD }} diff --git a/.github/workflows/release-validation.yaml b/.github/workflows/release-validation.yaml index 405e051f78526..2f12ac2bb5e7b 100644 --- a/.github/workflows/release-validation.yaml +++ b/.github/workflows/release-validation.yaml @@ -5,6 +5,9 @@ on: tags: - "v*" +permissions: + contents: read + jobs: network-performance: runs-on: ubuntu-latest diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index b2757b25181d5..74b5b7b35a1e7 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -18,12 +18,7 @@ on: default: false permissions: - # Required to publish a release - contents: write - # Necessary to push docker images to ghcr.io. - packages: write - # Necessary for GCP authentication (https://github.com/google-github-actions/setup-gcloud#usage) - id-token: write + contents: read concurrency: ${{ github.workflow }}-${{ github.ref }} @@ -40,6 +35,13 @@ jobs: release: name: Build and publish runs-on: ${{ github.repository_owner == 'coder' && 'depot-ubuntu-22.04-8' || 'ubuntu-latest' }} + permissions: + # Required to publish a release + contents: write + # Necessary to push docker images to ghcr.io. + packages: write + # Necessary for GCP authentication (https://github.com/google-github-actions/setup-gcloud#usage) + id-token: write env: # Necessary for Docker manifest DOCKER_CLI_EXPERIMENTAL: "enabled" diff --git a/.github/workflows/stale.yaml b/.github/workflows/stale.yaml index a05632d181ed3..d055c4f451e4e 100644 --- a/.github/workflows/stale.yaml +++ b/.github/workflows/stale.yaml @@ -1,16 +1,21 @@ -name: Stale Issue, Banch and Old Workflows Cleanup +name: Stale Issue, Branch and Old Workflows Cleanup on: schedule: # Every day at midnight - cron: "0 0 * * *" workflow_dispatch: + +permissions: + contents: read + jobs: issues: runs-on: ubuntu-latest permissions: + # Needed to close issues. issues: write + # Needed to close PRs. pull-requests: write - actions: write steps: - name: Harden Runner uses: step-security/harden-runner@91182cccc01eb5e619899d80e4e971d6181294a7 # v2.10.1 @@ -86,6 +91,9 @@ jobs: branches: runs-on: ubuntu-latest + permissions: + # Needed to delete branches. + contents: write steps: - name: Harden Runner uses: step-security/harden-runner@91182cccc01eb5e619899d80e4e971d6181294a7 # v2.10.1 @@ -105,6 +113,9 @@ jobs: exclude_open_pr_branches: true del_runs: runs-on: ubuntu-latest + permissions: + # Needed to delete workflow runs. + actions: write steps: - name: Harden Runner uses: step-security/harden-runner@91182cccc01eb5e619899d80e4e971d6181294a7 # v2.10.1 From 144d3f3e3d6d6c6ee6bb752e02f74ee49a4b9d55 Mon Sep 17 00:00:00 2001 From: Steven Masley Date: Wed, 30 Oct 2024 10:20:47 -0400 Subject: [PATCH 39/42] chore: record lifecycle duration metric to prometheus (#15279) `autobuild_execution_duration_seconds` keeps track of how long autobuild takes and exposes it via prometheus histogram --- cli/server.go | 2 +- coderd/autobuild/lifecycle_executor.go | 23 ++++++++++++++++++++++- coderd/coderdtest/coderdtest.go | 1 + 3 files changed, 24 insertions(+), 2 deletions(-) diff --git a/cli/server.go b/cli/server.go index b29b39b05fb4a..d0282004a2aa1 100644 --- a/cli/server.go +++ b/cli/server.go @@ -1035,7 +1035,7 @@ func (r *RootCmd) Server(newAPI func(context.Context, *coderd.Options) (*coderd. autobuildTicker := time.NewTicker(vals.AutobuildPollInterval.Value()) defer autobuildTicker.Stop() autobuildExecutor := autobuild.NewExecutor( - ctx, options.Database, options.Pubsub, coderAPI.TemplateScheduleStore, &coderAPI.Auditor, coderAPI.AccessControlStore, logger, autobuildTicker.C, options.NotificationsEnqueuer) + ctx, options.Database, options.Pubsub, options.PrometheusRegistry, coderAPI.TemplateScheduleStore, &coderAPI.Auditor, coderAPI.AccessControlStore, logger, autobuildTicker.C, options.NotificationsEnqueuer) autobuildExecutor.Run() hangDetectorTicker := time.NewTicker(vals.JobHangDetectorInterval.Value()) diff --git a/coderd/autobuild/lifecycle_executor.go b/coderd/autobuild/lifecycle_executor.go index db3c1cfd3dd31..ac2930c9e32c8 100644 --- a/coderd/autobuild/lifecycle_executor.go +++ b/coderd/autobuild/lifecycle_executor.go @@ -10,6 +10,8 @@ import ( "github.com/dustin/go-humanize" "github.com/google/uuid" + "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/client_golang/prometheus/promauto" "golang.org/x/sync/errgroup" "golang.org/x/xerrors" @@ -39,6 +41,13 @@ type Executor struct { statsCh chan<- Stats // NotificationsEnqueuer handles enqueueing notifications for delivery by SMTP, webhook, etc. notificationsEnqueuer notifications.Enqueuer + reg prometheus.Registerer + + metrics executorMetrics +} + +type executorMetrics struct { + autobuildExecutionDuration prometheus.Histogram } // Stats contains information about one run of Executor. @@ -49,7 +58,8 @@ type Stats struct { } // New returns a new wsactions executor. -func NewExecutor(ctx context.Context, db database.Store, ps pubsub.Pubsub, tss *atomic.Pointer[schedule.TemplateScheduleStore], auditor *atomic.Pointer[audit.Auditor], acs *atomic.Pointer[dbauthz.AccessControlStore], log slog.Logger, tick <-chan time.Time, enqueuer notifications.Enqueuer) *Executor { +func NewExecutor(ctx context.Context, db database.Store, ps pubsub.Pubsub, reg prometheus.Registerer, tss *atomic.Pointer[schedule.TemplateScheduleStore], auditor *atomic.Pointer[audit.Auditor], acs *atomic.Pointer[dbauthz.AccessControlStore], log slog.Logger, tick <-chan time.Time, enqueuer notifications.Enqueuer) *Executor { + factory := promauto.With(reg) le := &Executor{ //nolint:gocritic // Autostart has a limited set of permissions. ctx: dbauthz.AsAutostart(ctx), @@ -61,6 +71,16 @@ func NewExecutor(ctx context.Context, db database.Store, ps pubsub.Pubsub, tss * auditor: auditor, accessControlStore: acs, notificationsEnqueuer: enqueuer, + reg: reg, + metrics: executorMetrics{ + autobuildExecutionDuration: factory.NewHistogram(prometheus.HistogramOpts{ + Namespace: "coderd", + Subsystem: "lifecycle", + Name: "autobuild_execution_duration_seconds", + Help: "Duration of each autobuild execution.", + Buckets: prometheus.DefBuckets, + }), + }, } return le } @@ -86,6 +106,7 @@ func (e *Executor) Run() { return } stats := e.runOnce(t) + e.metrics.autobuildExecutionDuration.Observe(stats.Elapsed.Seconds()) if e.statsCh != nil { select { case <-e.ctx.Done(): diff --git a/coderd/coderdtest/coderdtest.go b/coderd/coderdtest/coderdtest.go index 47d9a42319d20..e287e04b8d0cf 100644 --- a/coderd/coderdtest/coderdtest.go +++ b/coderd/coderdtest/coderdtest.go @@ -335,6 +335,7 @@ func NewOptions(t testing.TB, options *Options) (func(http.Handler), context.Can ctx, options.Database, options.Pubsub, + prometheus.NewRegistry(), &templateScheduleStore, &auditor, accessControlStore, From 371a2e12abef4e27406481f4ef38e6eb2550da24 Mon Sep 17 00:00:00 2001 From: Ethan <39577870+ethanndickson@users.noreply.github.com> Date: Thu, 31 Oct 2024 02:05:10 +1100 Subject: [PATCH 40/42] fix: check correct default during template push from stdin (#15293) I used the wrong default in #14643 - not sure how or why I didn't catch that.. --- cli/templatepush.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cli/templatepush.go b/cli/templatepush.go index f5ff1dcb3cf85..22a77791c5f77 100644 --- a/cli/templatepush.go +++ b/cli/templatepush.go @@ -282,7 +282,7 @@ func (pf *templateUploadFlags) stdin(inv *serpent.Invocation) (out bool) { } }() // We let the directory override our isTTY check - return pf.directory == "-" || (!isTTYIn(inv) && pf.directory == "") + return pf.directory == "-" || (!isTTYIn(inv) && pf.directory == ".") } func (pf *templateUploadFlags) upload(inv *serpent.Invocation, client *codersdk.Client) (*codersdk.UploadResponse, error) { From 85ff8e026763c8a8184400701677cb1a86a20336 Mon Sep 17 00:00:00 2001 From: Kayla Washburn-Love Date: Wed, 30 Oct 2024 10:07:19 -0600 Subject: [PATCH 41/42] chore: tweak e2e test timeouts (#15275) --- site/e2e/helpers.ts | 6 ++++-- site/e2e/playwright.config.ts | 8 ++------ site/e2e/tests/app.spec.ts | 4 +++- site/e2e/tests/outdatedAgent.spec.ts | 2 +- site/e2e/tests/outdatedCLI.spec.ts | 2 ++ site/e2e/tests/webTerminal.spec.ts | 2 ++ 6 files changed, 14 insertions(+), 10 deletions(-) diff --git a/site/e2e/helpers.ts b/site/e2e/helpers.ts index fd436fa5dad7f..c5ac7f1abde65 100644 --- a/site/e2e/helpers.ts +++ b/site/e2e/helpers.ts @@ -425,7 +425,9 @@ export const startAgentWithCommand = async ( ); }); - await page.getByTestId("agent-status-ready").waitFor({ state: "visible" }); + await page + .getByTestId("agent-status-ready") + .waitFor({ state: "visible", timeout: 45_000 }); return cp; }; @@ -928,7 +930,7 @@ export async function openTerminalWindow( ): Promise { // Wait for the web terminal to open in a new tab const pagePromise = context.waitForEvent("page"); - await page.getByTestId("terminal").click(); + await page.getByTestId("terminal").click({ timeout: 60_000 }); const terminal = await pagePromise; await terminal.waitForLoadState("domcontentloaded"); diff --git a/site/e2e/playwright.config.ts b/site/e2e/playwright.config.ts index 7042ebfcf5bb6..ea55bf398e7df 100644 --- a/site/e2e/playwright.config.ts +++ b/site/e2e/playwright.config.ts @@ -65,16 +65,12 @@ export default defineConfig({ testMatch: /.*\.spec\.ts/, dependencies: ["testsSetup"], use: { storageState }, - timeout: 50_000, + timeout: 30_000, }, ], reporter: [["./reporter.ts"]], use: { - // It'd be very nice to add this, but there are some tests that need - // tweaking to make it work consistently (notably, ones that wait for agent - // stats on the workspace page. The default is like 50 seconds, which is - // way too long and makes it painful to wait for test runs in CI. - // actionTimeout: 5000, // 5 seconds + actionTimeout: 5000, baseURL: `http://localhost:${coderPort}`, video: "retain-on-failure", ...(wsEndpoint diff --git a/site/e2e/tests/app.spec.ts b/site/e2e/tests/app.spec.ts index bf127ce9f21b7..9682fcb5751dc 100644 --- a/site/e2e/tests/app.spec.ts +++ b/site/e2e/tests/app.spec.ts @@ -13,6 +13,8 @@ import { beforeCoderTest } from "../hooks"; test.beforeEach(({ page }) => beforeCoderTest(page)); test("app", async ({ context, page }) => { + test.setTimeout(75_000); + const appContent = "Hello World"; const token = randomUUID(); const srv = http @@ -56,7 +58,7 @@ test("app", async ({ context, page }) => { // Wait for the web terminal to open in a new tab const pagePromise = context.waitForEvent("page"); - await page.getByText(appName).click(); + await page.getByText(appName).click({ timeout: 60_000 }); const app = await pagePromise; await app.waitForLoadState("domcontentloaded"); await app.getByText(appContent).isVisible(); diff --git a/site/e2e/tests/outdatedAgent.spec.ts b/site/e2e/tests/outdatedAgent.spec.ts index a4e42e62ec725..422074d92e341 100644 --- a/site/e2e/tests/outdatedAgent.spec.ts +++ b/site/e2e/tests/outdatedAgent.spec.ts @@ -17,7 +17,7 @@ const agentVersion = "v2.12.1"; test.beforeEach(({ page }) => beforeCoderTest(page)); test(`ssh with agent ${agentVersion}`, async ({ page }) => { - test.setTimeout(40_000); // This is a slow test, 20s may not be enough on Mac. + test.setTimeout(60_000); const token = randomUUID(); const template = await createTemplate(page, { diff --git a/site/e2e/tests/outdatedCLI.spec.ts b/site/e2e/tests/outdatedCLI.spec.ts index 22301483e0977..3470367c63546 100644 --- a/site/e2e/tests/outdatedCLI.spec.ts +++ b/site/e2e/tests/outdatedCLI.spec.ts @@ -17,6 +17,8 @@ const clientVersion = "v2.8.0"; test.beforeEach(({ page }) => beforeCoderTest(page)); test(`ssh with client ${clientVersion}`, async ({ page }) => { + test.setTimeout(60_000); + const token = randomUUID(); const template = await createTemplate(page, { apply: [ diff --git a/site/e2e/tests/webTerminal.spec.ts b/site/e2e/tests/webTerminal.spec.ts index 6db4363a4e360..fc6baec7daa67 100644 --- a/site/e2e/tests/webTerminal.spec.ts +++ b/site/e2e/tests/webTerminal.spec.ts @@ -12,6 +12,8 @@ import { beforeCoderTest } from "../hooks"; test.beforeEach(({ page }) => beforeCoderTest(page)); test("web terminal", async ({ context, page }) => { + test.setTimeout(75_000); + const token = randomUUID(); const template = await createTemplate(page, { apply: [ From e9fbfcc45b996e7e55f1f7a06cdedeb595b4f717 Mon Sep 17 00:00:00 2001 From: Stephen Kirby <58410745+stirby@users.noreply.github.com> Date: Wed, 30 Oct 2024 11:41:52 -0500 Subject: [PATCH 42/42] chore(docs): include custom roles examples and mention of password reset (#15294) Added example custom roles for admin inspiration, mention of headless authentication use case, and user-activated password reset. --- docs/admin/users/groups-roles.md | 19 +++++++++++++++++++ docs/admin/users/index.md | 7 ++++++- 2 files changed, 25 insertions(+), 1 deletion(-) diff --git a/docs/admin/users/groups-roles.md b/docs/admin/users/groups-roles.md index 17c0fc8b5b8b9..e40efb0bd5a10 100644 --- a/docs/admin/users/groups-roles.md +++ b/docs/admin/users/groups-roles.md @@ -42,6 +42,25 @@ in the dashboard under **Organizations** -> **My Organization** -> **Roles**. ![Custom roles](../../images/admin/users/roles/custom-roles.PNG) +### Example roles + +- The `Banking Compliance Auditor` custom role cannot create workspaces, but can + read template source code and view audit logs +- The `Organization Lead` role can access user workspaces for troubleshooting + purposes, but cannot edit templates +- The `Platform Member` role cannot edit or create workspaces as they are + created via a third-party system + +Custom roles can also be applied to +[headless user accounts](./headless-auth.md): + +- A `Health Check` role can view deployment status but cannot create workspaces, + manage templates, or view users +- A `CI` role can update manage templates but cannot create workspaces or view + users + +### Creating custom roles + Clicking "Create custom role" opens a UI to select the desired permissions for a given persona. diff --git a/docs/admin/users/index.md b/docs/admin/users/index.md index 6b500ea68ac66..a00030a514f05 100644 --- a/docs/admin/users/index.md +++ b/docs/admin/users/index.md @@ -143,7 +143,12 @@ Confirm the user activation by typing **yes** and pressing **enter**. ## Reset a password -To reset a user's via the web UI: +As of 2.17.0, users can reset their password independently on the login screen +by clicking "Forgot Password." This feature requires +[email notifications](../monitoring/notifications/index.md#smtp-email) to be +configured on the deployment. + +To reset a user's password as an administrator via the web UI: 1. Go to **Users**. 2. Find the user whose password you want to reset, click the vertical ellipsis