diff --git a/.github/workflows/sync-staging-repo-files.yml b/.github/workflows/sync-staging-repo-files.yml
new file mode 100644
index 000000000000..136706073db9
--- /dev/null
+++ b/.github/workflows/sync-staging-repo-files.yml
@@ -0,0 +1,164 @@
+# **What it does**: Synchronizes each of the github/docs-staging-X repositories with the latest build scripts, workflows, and other files from src/deployments/staging.
+# **Why we have it**: We want to centralize build config in src/deployments/staging for use across multiple repos.
+# **Who does it impact**: Docs engineering, and potentially content writers.
+
+name: Sync Staging Repo Files
+
+on:
+ push:
+ branches: [main]
+ paths:
+ - 'src/deployments/staging/build-scripts/*.sh'
+ - 'src/deployments/staging/.github/**'
+ - 'src/deployments/staging/Dockerfile'
+ - 'src/deployments/staging/.env.example'
+ - 'src/deployments/staging/README.example.md'
+ - 'src/deployments/staging/config/**'
+
+permissions:
+ contents: write
+
+jobs:
+ # Determine how many staging repos we have and generate a matrix with repo and index
+ generate-matrix:
+ if: github.repository == 'github/docs-internal'
+ runs-on: ubuntu-latest
+ outputs:
+ matrix: ${{ steps.set-matrix.outputs.matrix }}
+ steps:
+ - name: Checkout source repository
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ with:
+ fetch-depth: 1 # Only need latest commit for config.json
+
+ - name: Read configuration
+ id: read-config
+ run: |
+ sudo apt-get update && sudo apt-get install -y jq
+ NUMBER_OF_REPOS=$(jq '.number_of_staging_repos' src/deployments/staging/config.json)
+ if ! [[ "$NUMBER_OF_REPOS" =~ ^[0-9]+$ ]]; then
+ echo "Invalid number_of_staging_repos in config.json: $NUMBER_OF_REPOS"
+ exit 1
+ fi
+ echo "number_of_repos=$NUMBER_OF_REPOS" >> $GITHUB_OUTPUT
+
+ - name: Generate repository list with indices
+ id: generate-repos
+ run: |
+ NUMBER_OF_REPOS=${{ steps.read-config.outputs.number_of_repos }}
+ repos=()
+ for i in $(seq 0 $NUMBER_OF_REPOS); do
+ repos+=("{\"repo\": \"github/docs-staging-$i\", \"index\": $i}")
+ done
+ json_repos=$(printf '%s\n' "${repos[@]}" | jq -s .)
+ echo "repos=$json_repos" >> $GITHUB_OUTPUT
+
+ - name: Set matrix output with repo and index
+ id: set-matrix
+ run: |
+ repos=${{ steps.generate-repos.outputs.repos }}
+ echo "matrix={\"include\": $repos}" >> $GITHUB_OUTPUT
+
+ - uses: ./.github/actions/slack-alert
+ if: ${{ failure() && github.event_name != 'workflow_dispatch' }}
+ with:
+ slack_channel_id: ${{ secrets.DOCS_ALERTS_SLACK_CHANNEL_ID }}
+ slack_token: ${{ secrets.SLACK_DOCS_BOT_TOKEN }}
+
+ sync:
+ if: github.repository == 'github/docs-internal'
+ needs: generate-matrix
+ runs-on: ubuntu-latest
+ strategy:
+ fail-fast: false
+ matrix: ${{ fromJson(needs.generate-matrix.outputs.matrix) }}
+ steps:
+ - name: Checkout source repository
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ with:
+ fetch-depth: 0
+
+ - name: Checkout target repository
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ with:
+ repository: ${{ matrix.repo }}
+ token: ${{ secrets.DOCS_BOT_PAT_READPUBLICKEY }}
+ path: target_repo
+ fetch-depth: 0
+
+ - name: Synchronize files to target repo
+ run: |
+ # Create necessary directories if they DNE
+ mkdir -p target_repo/build-scripts
+ mkdir -p target_repo/.github/workflows
+ mkdir -p target_repo/config
+
+ # Copy build scripts
+ cp src/deployments/staging/build-scripts/*.sh target_repo/build-scripts/ || true
+
+ # Copy .github directory
+ cp -r src/deployments/staging/.github target_repo/
+
+ # Copy config files
+ cp -r src/deployments/staging/config/* target_repo/config/ || true
+
+ # Overwrite Dockerfile
+ cp src/deployments/staging/Dockerfile target_repo/Dockerfile
+
+ # Conditional copy for .env if not present
+ if [ ! -f target_repo/.env ]; then
+ cp src/deployments/staging/.env.example target_repo/.env
+ fi
+
+ # Conditional copy for README.md if not present
+ if [ ! -f target_repo/README.md ]; then
+ cp src/deployments/staging/README.example.md target_repo/README.md
+ fi
+
+ - name: Install jq
+ run: sudo apt-get update && sudo apt-get install -y jq
+
+ - name: Replace template variables
+ run: |
+ # Determine which values to use based on the index
+ INDEX=${{ matrix.index }}
+
+ if [ "$INDEX" -eq 0 ]; then
+ DOMAIN=$(jq -r '.server_domain_name.internal' src/deployments/staging/config.json)
+ LOADBALANCER=$(jq -r '.load_balancer_type.internal' src/deployments/staging/config.json)
+ elif [ "$INDEX" -eq 1 ]; then
+ DOMAIN=$(jq -r '.server_domain_name.external' src/deployments/staging/config.json)
+ LOADBALANCER=$(jq -r '.load_balancer_type.external' src/deployments/staging/config.json)
+ else
+ DOMAIN=$(jq -r '.server_domain_name["docs-staging-x"]' src/deployments/staging/config.json)
+ LOADBALANCER=$(jq -r '.load_balancer_type.["docs-staging-x"]' src/deployments/staging/config.json)
+
+ # Replace {{x}} in the domain variable with the current index
+ DOMAIN=$(echo "$DOMAIN" | sed "s/{{x}}/$INDEX/g")
+ fi
+
+ # Perform replacements in target_repo files
+ # Replace the server_domain_name and load_balancer_type
+ find target_repo -type f -exec sed -i "s|{{server_domain_name}}|$DOMAIN|g" {} +
+ find target_repo -type f -exec sed -i "s|{{load_balancer_type}}|$LOADBALANCER|g" {} +
+
+ # If any files still contain {{x}}, replace them with the current index
+ find target_repo -type f -exec sed -i "s/{{x}}/$INDEX/g" {} +
+
+ - name: Commit and push changes
+ run: |
+ cd target_repo
+ git config user.name "github-actions[bot]"
+ git config user.email "github-actions[bot]@users.noreply.github.com"
+ git add .
+ # If there are changes, commit and push
+ if ! git diff --cached --quiet; then
+ git commit -m "Synchronize files from source repository with index ${{ matrix.index }}"
+ git push
+ fi
+
+ - uses: ./.github/actions/slack-alert
+ if: ${{ failure() && github.event_name != 'workflow_dispatch' }}
+ with:
+ slack_channel_id: ${{ secrets.DOCS_ALERTS_SLACK_CHANNEL_ID }}
+ slack_token: ${{ secrets.SLACK_DOCS_BOT_TOKEN }}
diff --git a/.github/workflows/update-docs-staging-x-repo.yml b/.github/workflows/update-docs-staging-x-repo.yml
new file mode 100644
index 000000000000..4d9693d5c3e4
--- /dev/null
+++ b/.github/workflows/update-docs-staging-x-repo.yml
@@ -0,0 +1,119 @@
+# **What it does**: Triggers a repo disaptch event when pushing to a `docs-staging-x` branch
+# or when a PR is labeled with `docs-staging-x`. The repo dispatch updates the corresponding
+# docs-staging-x repo with the latest commit SHA, which triggers a deployment.
+#
+# Note: This does not work for docs-staging-{0/1} (review servers) updates to those are
+# handled in the `update-review-servers-on-code-push.yml` workflow.
+#
+# **Why we have it**: Makes staging deployments easy
+# **Who does it impact**: Anyone trying to deploy a staging branch, both Docs Content and Docs Engineering
+
+name: Update docs-staging-x
+
+on:
+ push:
+ branches:
+ - 'docs-staging-[0-9]*'
+ pull_request:
+ types: [labeled]
+
+permissions:
+ contents: read
+
+jobs:
+ dispatch-sha:
+ if: github.repository == 'github/docs-internal'
+ runs-on: ubuntu-latest
+
+ steps:
+ # Needed because we call a composite action (Slack alert)
+ - name: Checkout source repository
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ with:
+ fetch-depth: 1 # Only need latest commit
+
+ - name: Determine target staging repo
+ id: determine_repo
+ run: |
+ # Determine the event type
+ EVENT_TYPE="${{ github.event_name }}"
+
+ SHOULD_DISPATCH="false"
+ if [ "$EVENT_TYPE" = "push" ]; then
+ # Triggered by a push event
+ BRANCH_NAME=${GITHUB_REF#refs/heads/}
+ echo "Triggered by push event on branch: $BRANCH_NAME"
+
+ # Extract the staging number from branch name
+ if [[ "$BRANCH_NAME" =~ ^docs-staging-([0-9]+)$ ]]; then
+ STAGING_NUMBER="${BASH_REMATCH[1]}"
+ else
+ echo "Branch name does not match the required pattern docs-staging-X."
+ exit 1
+ fi
+
+ # Get the commit SHA from the push event
+ COMMIT_SHA="${GITHUB_SHA}"
+
+ elif [ "$EVENT_TYPE" = "pull_request" ]; then
+ # Triggered by a PR labeled event
+ LABEL_NAME="${{ github.event.label.name }}"
+ echo "Triggered by PR labeled event with label: $LABEL_NAME"
+
+ if [[ "$LABEL_NAME" =~ ^docs-staging-([0-9]+)$ ]]; then
+ STAGING_NUMBER="${BASH_REMATCH[1]}"
+ else
+ echo "Label does not match the required pattern docs-staging-X."
+ # Do not dispatch if it doesn't match
+ echo "should_dispatch=false" >> $GITHUB_OUTPUT
+ exit 0
+ fi
+
+ # Get the commit SHA from the pull request head
+ COMMIT_SHA="${{ github.event.pull_request.head.sha }}"
+
+ else
+ echo "Event type $EVENT_TYPE not supported."
+ echo "should_dispatch=false" >> $GITHUB_OUTPUT
+ exit 0
+ fi
+
+ echo "Staging Number: $STAGING_NUMBER"
+
+ # Check if staging number is 0 or 1
+ if [ "$STAGING_NUMBER" = "0" ] || [ "$STAGING_NUMBER" = "1" ]; then
+ echo "Staging number $STAGING_NUMBER is reserved."
+ echo "Review server repos are handled in the \`update-review-servers-on-code-push.yml\` repository."
+ echo "should_dispatch=false" >> $GITHUB_OUTPUT
+ exit 0
+ fi
+
+ TARGET_REPO="docs-staging-$STAGING_NUMBER"
+ echo "Target Repository: $TARGET_REPO"
+ SHOULD_DISPATCH="true"
+
+ # Set outputs
+ echo "target_repo=$TARGET_REPO" >> $GITHUB_OUTPUT
+ echo "commit_sha=$COMMIT_SHA" >> $GITHUB_OUTPUT
+ echo "should_dispatch=$SHOULD_DISPATCH" >> $GITHUB_OUTPUT
+
+ - name: Dispatch repository dispatch event to staging repo
+ if: steps.determine_repo.outputs.should_dispatch == 'true'
+ env:
+ REPO_DISPATCH_TOKEN: ${{ secrets.DOCS_BOT_PAT_WORKFLOW }}
+ TARGET_OWNER: github
+ TARGET_REPO: ${{ steps.determine_repo.outputs.target_repo }}
+ EVENT_TYPE: update-sha
+ SHA: ${{ steps.determine_repo.outputs.commit_sha }}
+ run: |
+ curl -X POST \
+ -H "Accept: application/vnd.github.v3+json" \
+ -H "Authorization: token $REPO_DISPATCH_TOKEN" \
+ https://api.github.com/repos/$TARGET_OWNER/$TARGET_REPO/dispatches \
+ -d "{\"event_type\":\"$EVENT_TYPE\",\"client_payload\":{\"SHA\":\"$SHA\"}}"
+
+ - uses: ./.github/actions/slack-alert
+ if: ${{ failure() && github.event_name != 'workflow_dispatch' }}
+ with:
+ slack_channel_id: ${{ secrets.DOCS_ALERTS_SLACK_CHANNEL_ID }}
+ slack_token: ${{ secrets.SLACK_DOCS_BOT_TOKEN }}
diff --git a/.github/workflows/update-review-servers-on-code-push.yml b/.github/workflows/update-review-servers-on-code-push.yml
new file mode 100644
index 000000000000..ff2032eddc1b
--- /dev/null
+++ b/.github/workflows/update-review-servers-on-code-push.yml
@@ -0,0 +1,67 @@
+# **What it does**: Triggers a repo dispatch event when pushing a code change to `main`
+# dispatches the latest SHA to both review server repos, `docs-staging-0` and `docs-staging-1`
+#
+# Note: We only dispatch on code changes to prevent unnecessary deployments since content changes
+# won't affect the review servers.
+#
+# **Why we have it**: Keeps the review servers up-to-date with the latest code changes
+# **Who does it impact**: Docs Content and Docs Engineering
+
+name: Update review servers on code push
+
+on:
+ push:
+ branches:
+ - main
+ paths:
+ - 'src/**'
+ - 'package.json'
+ - 'tsconfig.json'
+ - 'next.config.js'
+
+permissions:
+ contents: read
+
+jobs:
+ dispatch-sha:
+ if: github.repository == 'github/docs-internal'
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ target_repo: [docs-staging-0, docs-staging-1]
+
+ steps:
+ # Needed because we call a composite action (Slack alert)
+ - name: Checkout source repository
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ with:
+ fetch-depth: 1 # Only need latest commit
+
+ - name: Determine commit SHA and dispatch condition
+ id: determine_repo
+ run: |
+ echo "commit_sha=${GITHUB_SHA}" >> $GITHUB_OUTPUT
+ # Since this workflow only runs when code changes occur (due to path filters),
+ # we can always set should_dispatch to true.
+ echo "should_dispatch=true" >> $GITHUB_OUTPUT
+
+ - name: Dispatch repository dispatch event to staging repos
+ if: steps.determine_repo.outputs.should_dispatch == 'true'
+ env:
+ REPO_DISPATCH_TOKEN: ${{ secrets.DOCS_BOT_PAT_WORKFLOW }}
+ TARGET_OWNER: github
+ TARGET_REPO: ${{ matrix.target_repo }}
+ EVENT_TYPE: update-sha
+ SHA: ${{ steps.determine_repo.outputs.commit_sha }}
+ run: |
+ curl -X POST \
+ -H "Accept: application/vnd.github.v3+json" \
+ -H "Authorization: token $REPO_DISPATCH_TOKEN" \
+ https://api.github.com/repos/$TARGET_OWNER/$TARGET_REPO/dispatches \
+ -d "{\"event_type\":\"$EVENT_TYPE\",\"client_payload\":{\"SHA\":\"$SHA\"}}"
+
+ - uses: ./.github/actions/slack-alert
+ if: ${{ failure() && github.event_name != 'workflow_dispatch' }}
+ with:
+ slack_channel_id: ${{ secrets.DOCS_ALERTS_SLACK_CHANNEL_ID }}
+ slack_token: ${{ secrets.SLACK_DOCS_BOT_TOKEN }}
diff --git a/content/account-and-profile/setting-up-and-managing-your-personal-account-on-github/managing-email-preferences/adding-an-email-address-to-your-github-account.md b/content/account-and-profile/setting-up-and-managing-your-personal-account-on-github/managing-email-preferences/adding-an-email-address-to-your-github-account.md
index f1c5c01c5dfd..b4baa9dfde01 100644
--- a/content/account-and-profile/setting-up-and-managing-your-personal-account-on-github/managing-email-preferences/adding-an-email-address-to-your-github-account.md
+++ b/content/account-and-profile/setting-up-and-managing-your-personal-account-on-github/managing-email-preferences/adding-an-email-address-to-your-github-account.md
@@ -1,6 +1,6 @@
---
title: Adding an email address to your GitHub account
-intro: '{% data variables.product.product_name %} allows you to add as many email addresses to your account as you like. If you set an email address in your local Git configuration, you will need to add it to your account settings in order to connect your commits to your account. For more information about your email address and commits, see "[Setting your commit email address](/articles/setting-your-commit-email-address/)."'
+intro: '{% data variables.product.product_name %} allows you to add as many email addresses to your account as you like. If you set an email address in your local Git configuration, you will need to add it to your account settings in order to connect your commits to your account. For more information about your email address and commits, see [Setting your commit email address](/articles/setting-your-commit-email-address/).'
redirect_from:
- /articles/adding-an-email-address-to-your-github-account
- /github/setting-up-and-managing-your-github-user-account/adding-an-email-address-to-your-github-account
diff --git a/content/account-and-profile/setting-up-and-managing-your-personal-account-on-github/managing-your-membership-in-organizations/viewing-peoples-roles-in-an-organization.md b/content/account-and-profile/setting-up-and-managing-your-personal-account-on-github/managing-your-membership-in-organizations/viewing-peoples-roles-in-an-organization.md
index 15ef0ddab8f1..aba42992ca38 100644
--- a/content/account-and-profile/setting-up-and-managing-your-personal-account-on-github/managing-your-membership-in-organizations/viewing-peoples-roles-in-an-organization.md
+++ b/content/account-and-profile/setting-up-and-managing-your-personal-account-on-github/managing-your-membership-in-organizations/viewing-peoples-roles-in-an-organization.md
@@ -1,6 +1,6 @@
---
title: Viewing people's roles in an organization
-intro: 'You can view a list of the people in your organization and filter by their role. For more information on organization roles, see "[Roles in an organization](/organizations/managing-peoples-access-to-your-organization-with-roles/roles-in-an-organization)."'
+intro: 'You can view a list of the people in your organization and filter by their role. For more information on organization roles, see [Roles in an organization](/organizations/managing-peoples-access-to-your-organization-with-roles/roles-in-an-organization).'
permissions: Organization members can see people's roles in the organization.
redirect_from:
- /articles/viewing-people-s-roles-in-an-organization
diff --git a/content/admin/enforcing-policies/enforcing-policies-for-your-enterprise/enforcing-policies-for-code-governance.md b/content/admin/enforcing-policies/enforcing-policies-for-your-enterprise/enforcing-policies-for-code-governance.md
index d6bb07cae274..82863cbb6955 100644
--- a/content/admin/enforcing-policies/enforcing-policies-for-your-enterprise/enforcing-policies-for-code-governance.md
+++ b/content/admin/enforcing-policies/enforcing-policies-for-your-enterprise/enforcing-policies-for-code-governance.md
@@ -30,7 +30,7 @@ To learn more, see [AUTOTITLE](/repositories/configuring-branches-and-merges-in-
To import a prebuilt ruleset created by {% data variables.product.company_short %}, see [`github/ruleset-recipes`](https://github.com/github/ruleset-recipes).
{% ifversion repo-rules-management %}
-{% data reusables.repositories.import-a-ruleset-conceptual %} For more information, see "[AUTOTITLE](/organizations/managing-organization-settings/managing-rulesets-for-repositories-in-your-organization#using-ruleset-history)."
+{% data reusables.repositories.import-a-ruleset-conceptual %} For more information, see [AUTOTITLE](/organizations/managing-organization-settings/managing-rulesets-for-repositories-in-your-organization#using-ruleset-history).
{% endif %}
## How will I define where my ruleset applies?
@@ -79,7 +79,7 @@ The following are eligible for bypass access:
Select all organizations, choose a selection of existing organizations, or set a dynamic list by name. If you use {% data variables.product.prodname_emus %}, you can also choose to target all repositories owned by users in your enterprise.
-If you set a dynamic list, you'll add one or more naming patterns using `fnmatch` syntax. For example, the string `*open-source` would match any organization with a name that ends with `open-source`. For syntax details, see "[AUTOTITLE](/repositories/configuring-branches-and-merges-in-your-repository/managing-rulesets/creating-rulesets-for-a-repository#using-fnmatch-syntax)."
+If you set a dynamic list, you'll add one or more naming patterns using `fnmatch` syntax. For example, the string `*open-source` would match any organization with a name that ends with `open-source`. For syntax details, see [AUTOTITLE](/repositories/configuring-branches-and-merges-in-your-repository/managing-rulesets/creating-rulesets-for-a-repository#using-fnmatch-syntax).
### Choosing which repositories to target in your enterprise
@@ -91,7 +91,7 @@ Within the selected organizations, you can target all repositories or target a d
### Selecting branch or tag protections
-In the "Branch protections" or "Tag protections" section, select the rules you want to include in the ruleset. When you select a rule, you may be able to enter additional settings for the rule. For more information on the rules, see "[AUTOTITLE](/repositories/configuring-branches-and-merges-in-your-repository/managing-rulesets/available-rules-for-rulesets)"
+In the "Branch protections" or "Tag protections" section, select the rules you want to include in the ruleset. When you select a rule, you may be able to enter additional settings for the rule. For more information on the rules, see [AUTOTITLE](/repositories/configuring-branches-and-merges-in-your-repository/managing-rulesets/available-rules-for-rulesets)
### Adding metadata restrictions
@@ -132,7 +132,7 @@ You can grant certain roles, teams, or apps bypass permissions as well as the ab
Select all organizations, choose a selection of existing organizations, or set a dynamic list by name. If you use {% data variables.product.prodname_emus %}, you can also choose to target all repositories owned by users in your enterprise.
-If you set a dynamic list, you'll add one or more naming patterns using `fnmatch` syntax. For example, the string `*open-source` would match any organization with a name that ends with `open-source`. For syntax details, see "[AUTOTITLE](/repositories/configuring-branches-and-merges-in-your-repository/managing-rulesets/creating-rulesets-for-a-repository#using-fnmatch-syntax)."
+If you set a dynamic list, you'll add one or more naming patterns using `fnmatch` syntax. For example, the string `*open-source` would match any organization with a name that ends with `open-source`. For syntax details, see [AUTOTITLE](/repositories/configuring-branches-and-merges-in-your-repository/managing-rulesets/creating-rulesets-for-a-repository#using-fnmatch-syntax).
### Choosing which repositories to target in your enterprise
diff --git a/content/admin/enforcing-policies/enforcing-policies-for-your-enterprise/managing-policies-for-code-governance.md b/content/admin/enforcing-policies/enforcing-policies-for-your-enterprise/managing-policies-for-code-governance.md
index ea36de1701f6..84b87f29c3b1 100644
--- a/content/admin/enforcing-policies/enforcing-policies-for-your-enterprise/managing-policies-for-code-governance.md
+++ b/content/admin/enforcing-policies/enforcing-policies-for-your-enterprise/managing-policies-for-code-governance.md
@@ -32,7 +32,7 @@ You can edit a ruleset to change parts of the ruleset, such as the name, bypass
1. On the "Rulesets" page, click the name of the ruleset you want to edit.
1. Change the ruleset as required.
- For information on the available rules, see "[AUTOTITLE](/repositories/configuring-branches-and-merges-in-your-repository/managing-rulesets/available-rules-for-rulesets)"
+ For information on the available rules, see [AUTOTITLE](/repositories/configuring-branches-and-merges-in-your-repository/managing-rulesets/available-rules-for-rulesets)
1. At the bottom of the page, click **Save changes**.
diff --git a/content/copilot/using-github-copilot/using-claude-sonnet-in-github-copilot.md b/content/copilot/using-github-copilot/using-claude-sonnet-in-github-copilot.md
index 4b435beebeda..58a41a587a48 100644
--- a/content/copilot/using-github-copilot/using-claude-sonnet-in-github-copilot.md
+++ b/content/copilot/using-github-copilot/using-claude-sonnet-in-github-copilot.md
@@ -52,4 +52,4 @@ For details of how to change the model for {% data variables.product.prodname_co
## Leaving feedback
-To leave feedback about Claude 3.5 Sonnet in {% data variables.product.prodname_copilot %}, or to ask a question, see the {% data variables.product.prodname_github_community %} discussion "[Claude 3.5 Sonnet is now available to all {% data variables.product.prodname_copilot_short %} users in Public Preview](https://github.com/orgs/community/discussions/143337)."
+To leave feedback about Claude 3.5 Sonnet in {% data variables.product.prodname_copilot %}, or to ask a question, see the {% data variables.product.prodname_github_community %} discussion [Claude 3.5 Sonnet is now available to all {% data variables.product.prodname_copilot_short %} users in Public Preview](https://github.com/orgs/community/discussions/143337).
diff --git a/content/repositories/viewing-activity-and-data-for-your-repository/viewing-traffic-to-a-repository.md b/content/repositories/viewing-activity-and-data-for-your-repository/viewing-traffic-to-a-repository.md
index 4b238fd8964f..7e7d647adc1b 100644
--- a/content/repositories/viewing-activity-and-data-for-your-repository/viewing-traffic-to-a-repository.md
+++ b/content/repositories/viewing-activity-and-data-for-your-repository/viewing-traffic-to-a-repository.md
@@ -1,7 +1,7 @@
---
title: Viewing traffic to a repository
intro: 'Anyone with push access to a repository can view its traffic, including full clones (not fetches), visitors from the past 14 days, referring sites, and popular content in the traffic graph.'
-product: 'This repository insights graph is available in public repositories with {% data variables.product.prodname_free_user %} and {% data variables.product.prodname_free_team %} for organizations, and in public and private repositories with {% data variables.product.prodname_pro %}, {% data variables.product.prodname_team %}, and {% data variables.product.prodname_ghe_cloud %}.{% ifversion fpt %} For more information, see "[About repository graphs](/articles/about-repository-graphs)" and "[{% data variables.product.prodname_dotcom %}''s products](/articles/github-s-products)."{% endif %}'
+product: 'This repository insights graph is available in public repositories with {% data variables.product.prodname_free_user %} and {% data variables.product.prodname_free_team %} for organizations, and in public and private repositories with {% data variables.product.prodname_pro %}, {% data variables.product.prodname_team %}, and {% data variables.product.prodname_ghe_cloud %}.{% ifversion fpt %} For more information, see [About repository graphs](/articles/about-repository-graphs) and [{% data variables.product.prodname_dotcom %}''s products](/articles/github-s-products).{% endif %}'
redirect_from:
- /articles/viewing-traffic-to-a-repository
- /github/visualizing-repository-data-with-graphs/viewing-traffic-to-a-repository
diff --git a/data/reusables/contributing/content-linter-rules.md b/data/reusables/contributing/content-linter-rules.md
index 82d8de234160..f471bf450f22 100644
--- a/data/reusables/contributing/content-linter-rules.md
+++ b/data/reusables/contributing/content-linter-rules.md
@@ -65,4 +65,5 @@
| GHD039 | expiring-soon | Content that expires soon should be proactively addressed. | warning | expired |
| [GHD040](https://github.com/github/docs/blob/main/src/content-linter/README.md) | table-liquid-versioning | Tables must use the correct liquid versioning format | error | tables |
| GHD041 | third-party-action-pinning | Code examples that use third-party actions must always pin to a full length commit SHA | error | feature, actions |
-| GHD042 | liquid-tag-whitespace | Liquid tags should start and end with one whitespace. Liquid tag arguments should be separated by only one whitespace. | error | liquid, format |
\ No newline at end of file
+| GHD042 | liquid-tag-whitespace | Liquid tags should start and end with one whitespace. Liquid tag arguments should be separated by only one whitespace. | error | liquid, format |
+| GHD043 | link-quotation | Internal link titles must not be surrounded by quotations | error | links, url |
\ No newline at end of file
diff --git a/src/content-linter/lib/helpers/utils.js b/src/content-linter/lib/helpers/utils.js
index c6538fe1f98d..25d009c0df50 100644
--- a/src/content-linter/lib/helpers/utils.js
+++ b/src/content-linter/lib/helpers/utils.js
@@ -50,6 +50,11 @@ export function doesStringEndWithPeriod(text) {
return /^.*\.['"]?$/.test(text)
}
+export function quotePrecedesLinkOpen(text) {
+ if (!text) return false
+ return text.endsWith('"') || text.endsWith("'")
+}
+
// Filters a list of tokens by token type only when they match
// a specific token type order.
// For example, if a list of tokens contains:
diff --git a/src/content-linter/lib/linting-rules/index.js b/src/content-linter/lib/linting-rules/index.js
index 6c37c8f6016a..0c8ab9b0c2d4 100644
--- a/src/content-linter/lib/linting-rules/index.js
+++ b/src/content-linter/lib/linting-rules/index.js
@@ -32,6 +32,7 @@ import { expiredContent, expiringSoon } from './expired-content.js'
import { tableLiquidVersioning } from './table-liquid-versioning.js'
import { thirdPartyActionPinning } from './third-party-action-pinning.js'
import { liquidTagWhitespace } from './liquid-tag-whitespace.js'
+import { linkQuotation } from './link-quotation.js'
const noDefaultAltText = markdownlintGitHub.find((elem) =>
elem.names.includes('no-default-alt-text'),
@@ -79,5 +80,6 @@ export const gitHubDocsMarkdownlint = {
tableLiquidVersioning,
thirdPartyActionPinning,
liquidTagWhitespace,
+ linkQuotation,
],
}
diff --git a/src/content-linter/lib/linting-rules/link-quotation.js b/src/content-linter/lib/linting-rules/link-quotation.js
new file mode 100644
index 000000000000..6d6db6bcf8cd
--- /dev/null
+++ b/src/content-linter/lib/linting-rules/link-quotation.js
@@ -0,0 +1,68 @@
+import { addError, filterTokens } from 'markdownlint-rule-helpers'
+import { getRange, quotePrecedesLinkOpen } from '../helpers/utils.js'
+import { escapeRegExp } from 'lodash-es'
+
+export const linkQuotation = {
+ names: ['GHD043', 'link-quotation'],
+ description: 'Internal link titles must not be surrounded by quotations',
+ tags: ['links', 'url'],
+ parser: 'markdownit',
+ function: (params, onError) => {
+ filterTokens(params, 'inline', (token) => {
+ const { children } = token
+ let previous_child = children[0]
+ let inLinkWithPrecedingQuotes = false
+ let linkUrl = ''
+ let content = []
+ let line = ''
+ for (let i = 1; i < children.length; i++) {
+ const child = children[i]
+ if (child.type === 'link_open' && quotePrecedesLinkOpen(previous_child.content)) {
+ inLinkWithPrecedingQuotes = true
+ linkUrl = escapeRegExp(child.attrs[0][1])
+ line = child.line
+ } else if (inLinkWithPrecedingQuotes && child.type === 'text') {
+ content.push(escapeRegExp(child.content.trim()))
+ } else if (inLinkWithPrecedingQuotes && child.type === 'code_inline') {
+ content.push('`' + escapeRegExp(child.content.trim()) + '`')
+ } else if (child.type === 'link_close') {
+ const title = content.join(' ')
+ const regex = new RegExp(`"\\[${title}\\]\\(${linkUrl}\\)({%.*%})?(!|\\.|\\?|,)?"`)
+ if (regex.test(child.line)) {
+ const match = child.line.match(regex)[0]
+ const range = getRange(child.line, match)
+ let newLine = match
+ if (newLine.startsWith('"')) {
+ newLine = newLine.slice(1)
+ }
+ if (newLine.endsWith('"')) {
+ newLine = newLine.slice(0, -1)
+ }
+ if (newLine.endsWith('".')) {
+ newLine = newLine.slice(0, -2) + '.'
+ }
+ const lineNumber = child.lineNumber
+ addError(
+ onError,
+ lineNumber,
+ 'Remove quotes surrounding the link title.',
+ match,
+ range,
+ {
+ lineNumber,
+ editColumn: range[0],
+ deleteCount: range[1],
+ insertText: newLine,
+ },
+ )
+ }
+ inLinkWithPrecedingQuotes = false
+ content = []
+ line = ''
+ linkUrl = ''
+ }
+ previous_child = child
+ }
+ })
+ },
+}
diff --git a/src/content-linter/style/github-docs.js b/src/content-linter/style/github-docs.js
index ca9119db3510..6c92e6154f40 100644
--- a/src/content-linter/style/github-docs.js
+++ b/src/content-linter/style/github-docs.js
@@ -167,6 +167,12 @@ const githubDocsConfig = {
'partial-markdown-files': true,
'yml-files': true,
},
+ 'link-quotation': {
+ // GHD043
+ severity: 'error',
+ 'partial-markdown-files': true,
+ 'yml-files': true,
+ },
}
export const githubDocsFrontmatterConfig = {
@@ -210,6 +216,11 @@ export const githubDocsFrontmatterConfig = {
severity: 'warning',
'partial-markdown-files': false,
},
+ 'link-quotation': {
+ // GHD043
+ severity: 'error',
+ 'partial-markdown-files': false,
+ },
}
// Configures rules from the `github/markdownlint-github` repo
diff --git a/src/content-linter/tests/unit/link-quotation.js b/src/content-linter/tests/unit/link-quotation.js
new file mode 100644
index 000000000000..d76ef0266d34
--- /dev/null
+++ b/src/content-linter/tests/unit/link-quotation.js
@@ -0,0 +1,36 @@
+import { describe, expect, test } from 'vitest'
+
+import { runRule } from '../../lib/init-test.js'
+import { linkQuotation } from '../../lib/linting-rules/link-quotation.js'
+
+describe(linkQuotation.names.join(' - '), () => {
+ test('links that are formatted correctly should not generate an error', async () => {
+ const markdown = [
+ 'Random stuff [A title](./image.png)',
+ '"This is a direct quote" [A title](./image.png)',
+ ].join('\n')
+ const result = await runRule(linkQuotation, { strings: { markdown } })
+ const errors = result.markdown
+ expect(errors.length).toBe(0)
+ })
+
+ test('links with quotes around them should error out', async () => {
+ const markdown = [
+ 'Random stuff "[A title](./image.png)."',
+ 'Random stuff "[A title](./image.png)?"',
+ 'Random stuff "[A title](./image.png)!"',
+ 'Random stuff "[A title](./image.png)".',
+ 'Random stuff "[A title](./image.png)"?',
+ 'Random stuff "[A title](./image.png)"!',
+ 'See "[AUTOTITLE](/foo/bar){% ifversion fpt %}."{% elsif ghes or ghec %}" and "[AUTOTITLE](/foo/bar)."{% endif %}',
+ 'See "[AUTOTITLE](/foo/bar)," "[AUTOTITLE](/foo/bar2)," "[AUTOTITLE](/foo/bar3)," and "[AUTOTITLE](/foo/bar4)."',
+ 'See "[Anchor link](#anchor-link)."',
+ ].join('\n')
+ const result = await runRule(linkQuotation, { strings: { markdown } })
+ const errors = result.markdown
+ expect(errors.length).toBe(13)
+ expect(errors[0].errorRange).toEqual([14, 25])
+ expect(errors[0].fixInfo.insertText).toBe('[A title](./image.png).')
+ expect(errors[1].fixInfo.insertText).toBe('[A title](./image.png)?')
+ })
+})
diff --git a/src/deployments/staging/.env.example b/src/deployments/staging/.env.example
new file mode 100644
index 000000000000..fedee045f613
--- /dev/null
+++ b/src/deployments/staging/.env.example
@@ -0,0 +1,27 @@
+# The .env file in every docs-staging-X repo can be adjusted freely and is not synchronized
+
+# - - -
+# Unique per staging server
+# - - -
+# The name of the staging branch (should be the same as the repo name except for the review server)
+STAGING_BRANCH=docs-staging-{{x}}
+# Required for identifing image in datadog metrics
+MODA_APP_NAME=docs-staging-{{x}}
+# The most recent SHA of the STAGING_BRANCH
+SHA={{sha}}
+
+# - - -
+# Unique per review server
+# - - -
+# Empty for regular staging servers, 'internal' or 'external' for review server
+REVIEW_SERVER=
+
+# - - -
+# Shared defaults
+# - - -
+NODE_ENV=production
+PORT=4000
+ENABLED_LANGUAGES='en,zh,es,pt,ru,ja,fr,de,ko'
+RATE_LIMIT_MAX='21'
+# Moda uses a non-default port for sending datadog metrics
+DD_DOGSTATSD_PORT='28125'
diff --git a/src/deployments/staging/.github/workflows/deploy-on-repo-dispatch.yml b/src/deployments/staging/.github/workflows/deploy-on-repo-dispatch.yml
new file mode 100644
index 000000000000..6d22400ee493
--- /dev/null
+++ b/src/deployments/staging/.github/workflows/deploy-on-repo-dispatch.yml
@@ -0,0 +1,72 @@
+# This file is the source of truth for all `docs-staging-X` repos. The copy of this workflow should be synchronized with each staging repo.
+# It triggers on the update-sha repository dispatch event, which is dispatched whenever a `docs-staging-X` branch is pushed to in `docs-intenal`.
+# This workflow updates the SHA in the staging repo's `.env` using the SHA sent in the dispatch event to the latest commit in the `docs-staging-X` branch
+# The merge should trigger an automatic Moda deploy using the contents pulled from the SHA pointing to a branch in `docs-internal`.
+
+name: Deploy on repo dispatch
+
+on:
+ repository_dispatch:
+ # This event is dispatched whenever a `docs-staging-X` branch is pushed to
+ types: [update-sha]
+
+permissions:
+ contents: write
+ pull-requests: write
+
+jobs:
+ update-sha:
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout Repository
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ with:
+ fetch-depth: 0 # Ensure full history for PR creation
+
+ # Extract SHA from the dispatch payload
+ - name: Set SHA from Payload
+ id: set_sha
+ run: echo "SHA=${{ github.event.client_payload.SHA }}" >> $GITHUB_ENV
+
+ # Update the .env file with the new SHA
+ - name: Update .env File
+ run: |
+ if grep -q "^SHA=" .env; then
+ sed -i "s/^SHA=.*/SHA=${SHA}/" .env
+ else
+ echo "SHA=${SHA}" >> .env
+
+ - name: Commit Changes to new branch
+ run: |
+ BRANCH_NAME=update-sha-${{ github.run_id }}
+ git config user.name "github-actions[bot]"
+ git config user.email "github-actions[bot]@users.noreply.github.com"
+ git checkout -b $BRANCH_NAME
+ git add .env
+ git commit -m "Update SHA to ${{ env.SHA }}"
+ echo "BRANCH_NAME=$BRANCH_NAME" >> $GITHUB_ENV
+
+ - name: Push Branch
+ run: git push origin ${{ env.BRANCH_NAME }}
+
+ - name: Create Pull Request
+ id: create_pr
+ env:
+ GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ run: |
+ PR_URL=$(gh pr create \
+ --title "Update SHA to ${{ env.SHA }}" \
+ --body "This PR updates the SHA in the \`.env\` file to \`${{ env.SHA }}\`." \
+ --base main \
+ --head ${{ env.BRANCH_NAME }} \
+ --json url \
+ --jq .url)
+ echo "PR_URL=$PR_URL" >> $GITHUB_ENV
+
+ - name: Merge Pull Request
+ env:
+ GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ run: |
+ PR_NUMBER=$(gh pr view $PR_URL --json number --jq .number)
+ gh pr merge $PR_NUMBER --merge --delete-branch --auto --squash --yes
diff --git a/src/deployments/staging/Dockerfile b/src/deployments/staging/Dockerfile
new file mode 100644
index 000000000000..dba2321b7ea5
--- /dev/null
+++ b/src/deployments/staging/Dockerfile
@@ -0,0 +1,137 @@
+# --------------------------------------------------------------------------------
+# BASE IMAGE
+# --------------------------------------------------------------------------------
+# To update the sha:
+# https://github.com/github/gh-base-image/pkgs/container/gh-base-image%2Fgh-base-noble
+FROM ghcr.io/github/gh-base-image/gh-base-noble:20250108-185521-gcd4825276 AS base
+
+# Install git for cloning docs-early-access & translations repos
+# Install curl for determining the early access branch
+RUN apt-get -qq update && apt-get -qq install --no-install-recommends git curl
+
+# Install Node.js latest LTS
+# https://github.com/nodejs/release#release-schedule
+# Ubuntu's apt-get install nodejs is _very_ outdated
+RUN curl -sL https://deb.nodesource.com/setup_22.x | bash -
+RUN apt-get install -y nodejs
+RUN node --version
+
+# This directory is owned by the node user
+RUN useradd -ms /bin/bash node
+ARG APP_HOME=/home/node/app
+RUN mkdir -p $APP_HOME && chown -R node:node $APP_HOME
+WORKDIR $APP_HOME
+
+# Switch to root to ensure we have permissions to copy, chmod, and install
+USER root
+
+# Copy in scripts and .env
+COPY .env .
+COPY build-scripts/*.sh ./build-scripts/
+
+# Make scripts executable
+RUN chmod +x build-scripts/*.sh
+
+# Use the mounted --secret to:
+# - 1. Fetch the docs-internal repo
+# - 2. Fetch the docs-early-access repo & override docs-internal with early access content
+# - 3. Fetch each translations repo to the repo/translations directory
+# We use --mount-type=secret to avoid the secret being copied into the image layers for security
+# The secret passed via --secret can only be used in this RUN command
+RUN --mount=type=secret,id=DOCS_BOT_PAT_READPUBLICKEY \
+ # We don't cache because Docker can't know if we need to fetch new content from remote repos
+ echo "Don't cache this step by printing date: $(date)" && \
+ . ./build-scripts/fetch-repos.sh
+
+# Give node user access to the cloned repo & scripts
+RUN chown -R node:node $APP_HOME/repo
+RUN chown -R node:node $APP_HOME/build-scripts
+RUN chown -R node:node $APP_HOME/.env
+
+# Change back to node to make sure we don't run anything as the root user
+USER node
+
+# ---------------
+# ALL DEPS Image
+# ---------------
+FROM node:22-alpine@sha256:c13b26e7e602ef2f1074aef304ce6e9b7dd284c419b35d89fcf3cc8e44a8def9 AS all_deps
+
+ARG APP_HOME=/home/node/app
+WORKDIR $APP_HOME
+
+# Copy what is needed to run npm ci
+COPY --from=base $APP_HOME/repo/package.json $APP_HOME/repo/package-lock.json ./
+
+RUN npm ci --no-optional --registry https://registry.npmjs.org/
+
+# Sharp requires optional deps: https://github.com/lovell/sharp/issues/4001
+RUN npm install --cpu=x64 --os=linux --include=optional sharp
+
+# ---------------
+# BUILDER Image
+# ---------------
+FROM all_deps AS builder
+
+ARG APP_HOME=/home/node/app
+WORKDIR $APP_HOME
+
+# Copy what is needed to:
+# 1. Build the app
+# 2. run warmup-remotejson script
+# 3. run precompute-pageinfo script
+# Dependencies
+COPY --from=all_deps $APP_HOME/package.json ./
+COPY --from=all_deps $APP_HOME/node_modules ./node_modules
+# Content with merged early-access content
+COPY --from=base $APP_HOME/repo/content ./content
+COPY --from=base $APP_HOME/repo/data ./data
+COPY --from=base $APP_HOME/repo/assets ./assets
+# Source code
+COPY --from=base $APP_HOME/repo/src ./src
+COPY --from=base $APP_HOME/repo/next.config.js ./
+COPY --from=base $APP_HOME/repo/tsconfig.json ./
+
+# 1. Build
+RUN npm run build
+
+# 2. Warm up the remotejson cache
+RUN npm run warmup-remotejson
+
+# 3. Precompute the pageinfo cache
+RUN npm run precompute-pageinfo -- --max-versions 2
+
+# Prune deps for prod image
+RUN npm prune --production
+
+# --------------------------------------------------------------------------------
+# STAGING IMAGE
+# --------------------------------------------------------------------------------
+FROM base AS staging
+
+ARG APP_HOME=/home/node/app
+WORKDIR $APP_HOME
+
+# Copy translations and build scripts from base image
+COPY --from=base $APP_HOME/repo/translations ./translations
+COPY --from=base $APP_HOME/build-scripts ./build-scripts
+COPY --from=base $APP_HOME/.env ./
+# Copy prod dependencies from deps image
+COPY --from=all_deps $APP_HOME/node_modules ./node_modules
+# Copy built artifacts from builder image
+COPY --from=builder $APP_HOME/.next ./.next
+COPY --from=builder $APP_HOME/.remotejson-cache ./.remotejson-cache
+COPY --from=builder $APP_HOME/.pageinfo-cache.json.br* ./.pageinfo-cache.json.br
+
+# Copy source code needed to run the server
+COPY --from=builder $APP_HOME/package.json ./
+## Content
+COPY --from=builder $APP_HOME/content ./content
+COPY --from=builder $APP_HOME/data ./data
+COPY --from=builder $APP_HOME/assets ./assets
+## Code
+COPY --from=builder $APP_HOME/src ./src
+## Config
+COPY --from=builder $APP_HOME/next.config.js ./
+COPY --from=builder $APP_HOME/tsconfig.json ./
+
+CMD ["./build-scripts/server-entrypoint.sh"]
diff --git a/src/deployments/staging/README.example.md b/src/deployments/staging/README.example.md
new file mode 100644
index 000000000000..9a00b81eb635
--- /dev/null
+++ b/src/deployments/staging/README.example.md
@@ -0,0 +1,12 @@
+# Staging {{x}}
+
+This is the staging repo and corresponding Moda deployment for the GitHub Docs {{x}} staging server.
+
+> [!NOTE]
+> Do not change any file other than `.env` and `README.md` in this repo. Instead, change the files in [src/deployments/staging of docs-internal](https://github.com/github/docs-internal/tree/main/src/staging/deployments/README.md) which will cascade update the files in each `docs-staging-X` repo (apart from `README.md` and `.env`).
+
+**URL:** https://docs-staging-{{x}}.github.com
+
+**Docs**: [src/deployments/staging of docs-internal](https://github.com/github/docs-internal/tree/main/src/staging/deployments/README.md)
+
+The contents of this repo are kept in a one-directional sync with the contents of `src/deployments/staging` in the [docs-internal](https://github.com/github/docs-internal) repo, where `src/deployments/staging` from `docs-internal` is the source and this repo is the destination.
diff --git a/src/deployments/staging/README.md b/src/deployments/staging/README.md
new file mode 100644
index 000000000000..2b58c3d3f5e5
--- /dev/null
+++ b/src/deployments/staging/README.md
@@ -0,0 +1,119 @@
+# Staging Servers
+
+This directory contains the build tools, workflows, and files used to build and deploy our staging (and dedicated review) servers.
+
+For internal documentation, please see the Moda directory in the internal Docs Engineering repo.
+
+1. [Why staging servers?](#why-staging-servers)
+1. [What are staging servers?](#what-are-staging-servers)
+1. [How do staging deploys work from docs-internal?](#how-do-they-work)
+1. [Keeping build configurations in sync](#keeping-build-configurations-in-sync)
+
+## Why staging servers?
+
+Previously, Docs had automatic preview deploys for each branch. When a PR was opened, a dedicated server was spun up from the contents of that branch.
+
+In the future, we may be able to accomplish automatic branch deploys again if Moda's features are expanded.
+
+Until then, we have staging servers that we manually push changes from a branch to spin up.
+
+We have 8 dedicated staging servers that developers can use to test their code changes. For purely content changes we use the [review server](../review-server/README.md) which automatically previews content changes on a remote branch.
+
+The dedicated review servers are deployed in a similar fashion that the staging servers are. The difference is that review servers have different environment variables set in `.env` and two additional secrets, like a PAT for reading from remote repos and an access token.
+
+## What are staging servers?
+
+Each staging server requires its own `github/` repo in order to deploy to Moda in the form of `github/docs-staging-X` where X is the number of that staging server e.g. `github/docs-staging-0` or `github/docs-staging-1`.
+
+The URLs of the staging servers also follow this pattern, `docs-staging-x.github.net`, e.g. `docs-staging-2.github.net`
+
+With the exception of the first 2 which are our review servers:
+
+- `docs-staging-0` -> https://docs-review.github.com
+- `docs-staging-1` -> https://os-docs-review.github.com
+
+Ideally there should always be enough staging servers for each developer on the team to have a dedicated server to deploy to while they are on the team.
+
+So we have 8 dedicated staging servers, `docs-staging-{2-9}`:
+
+- `docs-staging-2` -> https://docs-staging-2.github.net
+- `docs-staging-3` -> https://docs-staging-3.github.net
+- etc
+- `docs-staging-9` -> https://docs-staging-9.github.net
+
+## How do staging deploys work from docs-internal?
+
+```mermaid
+sequenceDiagram
+ autonumber
+ participant DI as docs-internal
+ participant WF1 as docs-internal/dispatch-sha-on-staging-push.yml
+ participant WF2 as docs-staging-X/deploy-on-repo-dispatch.yml
+ participant MD as Moda
+
+ DI->DI: Developer pushes code to `docs-staging-X` branch
OR Developer adds `docs-staging-X` label to a PR
+ DI->WF1: Workflow trigger
+ WF1->WF1: Extract latest SHA from docs-staging-X branch that triggered event
+ WF1->WF2: Sends `repository_dispatch` event with SHA
+ note over WF2: Now we are working out of the docs-staging-X repo
+ WF2->WF2: 1. Extracts SHA from `repository_dispatch` event
2. Updates `.env` in docs-staging-x with SHA value
3. Auto-merges the PR into docs-staging-x
+ WF2->MD: Auto-merge kicks off Moda deploy
+ MD->MD: Dockerfile build clones docs-internal code from SHA target set in `.env`
+ note over MD: Deployed to
`docs-staging-X.github.net`
+```
+
+Whenever a developer pushes code to a staging branch in `docs-internal`, e.g. `docs-staging-2`, a pipeline begins with the final result being a staging server running with the latest changes from that branch. See the above diagram, or read below for a textual explanation.
+
+The pipeline is as follows:
+
+1. Pushing to a `docs-staging-X` branch on `docs-internal` triggers the [dispatch-sha-on-staging-push.yml](../../.github/workflows/dispatch-sha-on-staging-push.yml) workflow in `docs-internal` that fires a [repository_dispatch](https://docs.github.com/en/actions/writing-workflows/choosing-when-your-workflow-runs/events-that-trigger-workflows#repository_dispatch) event to the `docs-staging-X` repo corresponding to the `docs-staging-X` branch that was pushed to.
+
+1. The `repository_dispatch` event will include the latest `SHA` from the `docs-staging-X` branch in its payload.
+
+1. The repo receiving the `repository_dispatch` event, `docs-staging-X` has a `deploy-on-repo-dispatch.yml` workflow that triggers whenever a `repository_dispatch` event is received.
+
+1. `deploy-on-repo-dispatch.yml`:
+
+ 1. Extracts the `SHA` from the `repository_dispatch` payload
+ 2. Opens a PR replacing the existing `SHA` in the `.env` file in the repo with the new `SHA`
+ 3. Automatically merges the PR
+
+1. The PR merge kicks off an automatic Moda deploy for the `docs-staging-X` server.
+
+1. At build time, the [Dockerfile](./Dockerfile) clones the `SHA` from `docs-internal` and builds, runs, and deploys it to https://docs-staging-X.github.net which is only accessible behind the devvpn.
+
+## How do review server deploys work from docs-internal?
+
+The process is very similar to the process in the previous section for staging servers. The differences are as follows:
+
+1. Review servers live in:
+ 1. Repo [docs-staging-0](https://github.com/github/doc-staging-0) (internal) @ https://docs-review.github.com
+ 1. Repo [docs-staging-1](https://gthub.com/github/doc-staging-1) (external) @ https://os-docs-review.github.com
+
+1. When a _code_ change happens in `main` e.g. `.ts` or `.js` file is changed, the `update-review-servers-on-code-push.yml` runs instead of `update-docs-staging-x-repo.yml` in the docs-internal repo.
+1. The `STAGING_BRANCH` in the these repos is set to `main` since they pull and run the latest code from `main`
+1. The `SHA=` is set to the latest commit in `main` using the same process that staging servers use to deploy (repo dispatch from `docs-internal` to `docs-staging-X`)
+
+The primary reason for this is that the review servers need to be up to date with the latest _code_ changes with `docs-internal:main` so that they can accurately live preview content changes on branch targets. They don't need to be deployed each time just to preview content changes. They only need to be deployed when code changes, hence the separate process.
+
+Additionally, the review server is running the code from `main` instead of a staging branch, like `docs-staging-0`.
+
+## Keeping build configurations in sync
+
+Since we may increase or decrease the number of staging servers, repos, and branches e.g. `docs-staging-X+1`, we centralize all of the build config in the `src/deployments/staging` directory of this repo.
+
+Each of the following files & directories are synced when changed between each of the `docs-staging-X` repos using the [sync-staging-repo-files.yml](../../.github/workflows/sync-staging-repo-files.yml) workflow.
+
+The source of truth for the number of staging repos is in [src/deployments/staging/config.json](./config.json) along with other data like the domain names of each server.
+
+- The [Dockerfile](./Dockerfile) in `src/deployments/staging/Dockerfile` is the same Dockerfile that lives in each of the `docs-staging-X` repos.
+
+- The [src/deployments/staging/workflows](./workflows/) workflows are the same workflows that live in each of the `docs-staging-X` repo's `.github/.workflows/` directory.
+
+- The [build-scripts/](./build-scripts) directory live in the staging repo's `build-scripts` directory
+
+> [!NOTE]
+> The `.env` in each repo is unique to that repo since it stores that repo's `SHA` and in the case of the dedicated review server, `REVIEW_SERVER=`. The Dockerfile reads the `.env` file at build time and extracts the env variables into its environment.
+
+> [!NOTE]
+> Each `docs-staging-X` repo must have `docs-bot` as a member with `write` access to enable syncing
diff --git a/src/deployments/staging/build-scripts/clone-or-use-cached-repo.sh b/src/deployments/staging/build-scripts/clone-or-use-cached-repo.sh
new file mode 100644
index 000000000000..600de379bd98
--- /dev/null
+++ b/src/deployments/staging/build-scripts/clone-or-use-cached-repo.sh
@@ -0,0 +1,63 @@
+set -e
+
+# We use this function to use the cached version of the repo if it exists from
+# a previous Dockerfile build. Otherwise, we clone the repo and check out the
+# specified branch/SHA.
+# Arguments:
+# $1 - Repository name (for directory naming)
+# $2 - Repository URL
+# $3 - Branch to clone
+# $4 - Specific SHA to check out (optional)
+clone_or_use_cached_repo() {
+ repo_name="$1"
+ repo_url="$2"
+ branch="$3"
+ sha="$4"
+
+ echo "Processing repository '$repo_name'..."
+
+ if [ -d "$repo_name/.git" ]; then
+ echo "Repository '$repo_name' already exists. Fetching updates..."
+ cd "$repo_name"
+
+ # Fetch latest changes
+ git fetch origin "$branch"
+
+ # If a specific SHA is provided, check it out
+ if [ -n "$sha" ]; then
+ echo "Checking out SHA: $sha"
+ git checkout "$sha"
+ else
+ echo "Checking out branch: $branch"
+ git checkout "$branch"
+ git pull origin "$branch"
+ fi
+
+ cd ..
+ else
+ echo "Cloning repository '$repo_name' from branch '$branch'..."
+
+ # We use --depth 5 for the docs-internal branch we are checking out as a bit of a gamble for performace optimization.
+ # We assume that the latest changes are within the last few commits.
+ # Which should always be the case with how our staging servers are built via actions
+ # If someone manually sets `.env` this may break the build
+ if [ -n "$sha" ]; then
+ depth=5
+ else
+ depth=1
+ fi
+
+ git clone --depth "$depth" --branch "$branch" "https://${GITHUB_TOKEN}@github.com/github/$repo_url.git" "$repo_name"
+
+ cd "$repo_name"
+
+ if [ -n "$sha" ]; then
+ echo "Checking out SHA: $sha"
+ git checkout "$sha"
+ fi
+
+ cd ..
+ fi
+
+ echo "Repository '$repo_name' is up to date."
+}
\ No newline at end of file
diff --git a/src/deployments/staging/build-scripts/determine-early-access-branch.sh b/src/deployments/staging/build-scripts/determine-early-access-branch.sh
new file mode 100755
index 000000000000..5630064506fc
--- /dev/null
+++ b/src/deployments/staging/build-scripts/determine-early-access-branch.sh
@@ -0,0 +1,30 @@
+#!/usr/bin/env sh
+
+# These should be set already in the Dockerfile's env
+if [ -z "$GITHUB_TOKEN" ] || [ -z "$STAGING_BRANCH" ]; then
+ echo "Error: GITHUB_TOKEN and STAGING_BRANCH environment variables must be set."
+ exit 1
+fi
+
+OWNER="github"
+REPO="docs-early-access"
+BRANCH_NAME="$STAGING_BRANCH"
+API_URL="https://api.github.com/repos/${OWNER}/${REPO}/branches/${BRANCH_NAME}"
+
+fetch_branch() {
+ curl -s -o /dev/null -w "%{http_code}" -H "Authorization: token $GITHUB_TOKEN" "$API_URL"
+}
+
+# Check branch using curl
+STATUS=$(fetch_branch)
+
+if [ "$STATUS" -eq 200 ]; then
+ EARLY_ACCESS_BRANCH="$BRANCH_NAME"
+ echo "Using docs-early-access branch '${EARLY_ACCESS_BRANCH}'"
+else
+ EARLY_ACCESS_BRANCH="main"
+ echo "Failed to get docs-early-access branch '${BRANCH_NAME}', 'main' will be used instead."
+fi
+
+# Export the branch name to be consumed by the Dockerfile
+export EARLY_ACCESS_BRANCH
diff --git a/src/deployments/staging/build-scripts/fetch-repos.sh b/src/deployments/staging/build-scripts/fetch-repos.sh
new file mode 100644
index 000000000000..9408da09cf16
--- /dev/null
+++ b/src/deployments/staging/build-scripts/fetch-repos.sh
@@ -0,0 +1,63 @@
+#!/usr/bin/env sh
+
+# Fetches and resolves docs-internal, early-access, and translations repos
+echo "Fetching and resolving docs-internal, early-access, and translations repos"
+
+# Don't show advice logging about checking out a SHA with git
+git config --global advice.detachedHead false
+
+# Exit immediately if a command exits with a non-zero status
+set -e
+
+# Import the clone_or_use_cached_repo function
+. ./build-scripts/clone-or-use-cached-repo.sh
+
+# - - - - - - - - - -
+# Read variables from .env
+# - - - - - - - - - -
+. ./build-scripts/read-dot-env.sh
+
+GITHUB_TOKEN=$(cat /run/secrets/DOCS_BOT_PAT_READPUBLICKEY)
+
+# - - - - - - - - - -
+# Get docs-internal contents
+# - - - - - - - - - -
+clone_or_use_cached_repo "repo" "docs-internal" "$STAGING_BRANCH" "$SHA"
+# Clone other repo from the root of docs-internal
+cd repo
+
+
+# - - - - - - - - - -
+# Early access
+# - - - - - - - - - -
+. ../build-scripts/determine-early-access-branch.sh
+echo "EARLY_ACCESS_BRANCH is set to '${EARLY_ACCESS_BRANCH}'"
+clone_or_use_cached_repo "docs-early-access" "docs-early-access" "$EARLY_ACCESS_BRANCH" ""
+# - - - - - - - - - -
+# !Important!
+# - - - - - - - - - -
+# Note that we use ../build-script instead of the merge-early-access script in the docs-internal that we checked out
+# This is for security. We don't want to run user-supplied code for the build step
+. ../build-scripts/merge-early-access.sh
+
+# - - - - - - - - - -
+# Clone the translations repos
+# - - - - - - - - - -
+mkdir -p translations
+cd translations
+
+# Iterate over each language
+for lang in "zh-cn" "es-es" "pt-br" "ru-ru" "ja-jp" "fr-fr" "de-de" "ko-kr"
+do
+ translations_repo="docs-internal.$lang"
+ clone_or_use_cached_repo "$lang" "$translations_repo" "main" ""
+done
+
+# Go back to the root of the docs-internal repo
+cd ..
+
+# - - - - - - - - - -
+# Cleanup
+# - - - - - - - - - -
+# Delete GITHUB_TOKEN from the environment
+unset GITHUB_TOKEN
\ No newline at end of file
diff --git a/src/deployments/staging/build-scripts/read-dot-env.sh b/src/deployments/staging/build-scripts/read-dot-env.sh
new file mode 100755
index 000000000000..fbae81db2873
--- /dev/null
+++ b/src/deployments/staging/build-scripts/read-dot-env.sh
@@ -0,0 +1,6 @@
+#!/usr/bin/env sh
+set -e
+
+if [ -f ".env" ]; then
+ export $(grep -v '^#' .env | xargs)
+fi
\ No newline at end of file
diff --git a/src/deployments/staging/build-scripts/server-entrypoint.sh b/src/deployments/staging/build-scripts/server-entrypoint.sh
new file mode 100755
index 000000000000..84bb2130b031
--- /dev/null
+++ b/src/deployments/staging/build-scripts/server-entrypoint.sh
@@ -0,0 +1,16 @@
+#!/usr/bin/env sh
+
+# We require a server-entrypoint to set environment variables that can't be set via Docker ENV
+# This is a workaround to set vars from the .env file
+
+. ./build-scripts/read-dot-env.sh
+
+# We keep these logs here to make it clear what env vars are set in server logs
+echo "MODA_APP_NAME: $MODA_APP_NAME"
+echo "Using port: $PORT"
+echo "Using branch: $STAGING_BRANCH"
+echo "Using SHA: $SHA"
+echo "Is review server?, $REVIEW_SERVER"
+
+# Start the server
+exec ./node_modules/.bin/tsx src/frame/server.ts
diff --git a/src/deployments/staging/config.json b/src/deployments/staging/config.json
new file mode 100644
index 000000000000..af266a1ee425
--- /dev/null
+++ b/src/deployments/staging/config.json
@@ -0,0 +1,13 @@
+{
+ "number_of_staging_repos": 10,
+ "server_domain_name": {
+ "internal": "docs-review.github.com",
+ "external": "os-docs-review.github.com",
+ "docs-staging-x": "docs-staging-{{x}}.github.net"
+ },
+ "load_balancer_type": {
+ "internal": "public-external-http",
+ "external": "public-external-http",
+ "docs-staging-x": "internal-http"
+ }
+}
diff --git a/src/deployments/staging/config/kubernetes/staging/deployments/webapp.yaml b/src/deployments/staging/config/kubernetes/staging/deployments/webapp.yaml
new file mode 100644
index 000000000000..125551530bc9
--- /dev/null
+++ b/src/deployments/staging/config/kubernetes/staging/deployments/webapp.yaml
@@ -0,0 +1,54 @@
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: webapp
+spec:
+ replicas: 2
+ selector:
+ matchLabels:
+ app: webapp
+ template:
+ metadata:
+ labels:
+ app: webapp
+ annotations:
+ # Our internal logs aren't structured so we use logfmt_sloppy to just log stdout and error
+ # See https://thehub.github.com/epd/engineering/dev-practicals/observability/logging/ for more details
+ fluentbit.io/parser: logfmt_sloppy
+ observability.github.com/splunk_index: docs-internal
+ spec:
+ dnsPolicy: Default
+ containers:
+ - name: webapp
+ image: docs-staging-{{x}}
+ ports:
+ - name: http
+ containerPort: 4000
+ protocol: TCP
+ envFrom:
+ - configMapRef:
+ name: kube-cluster-metadata
+ - secretRef:
+ name: vault-secrets
+ # Zero-downtime deploys
+ # https://thehub.github.com/engineering/products-and-services/internal/moda/feature-documentation/pod-lifecycle/#required-prestop-hook
+ # https://kubernetes.io/docs/concepts/containers/container-lifecycle-hooks/#container-hooks
+ lifecycle:
+ preStop:
+ exec:
+ command: ['sleep', '5']
+ readinessProbe:
+ # Add delay to allow the app to initialize
+ initialDelaySeconds: 5
+ # See: https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/#define-readiness-probes
+ httpGet:
+ path: /healthz
+ port: http
+ resources:
+ # These values should match the production values to ensure parity when testing
+ requests:
+ cpu: 8000m
+ memory: 10Gi
+ limits:
+ cpu: 16000m
+ memory: 14Gi
diff --git a/src/deployments/staging/config/kubernetes/staging/services/webapp.yaml b/src/deployments/staging/config/kubernetes/staging/services/webapp.yaml
new file mode 100644
index 000000000000..d9a1c4431556
--- /dev/null
+++ b/src/deployments/staging/config/kubernetes/staging/services/webapp.yaml
@@ -0,0 +1,19 @@
+apiVersion: v1
+kind: Service
+metadata:
+ name: webapp
+ labels:
+ service: webapp
+ annotations:
+ moda.github.net/domain-name: '{{server_domain_name}}'
+ moda.github.net/dns-registration-enabled: 'false'
+ moda.github.net/load-balancer-type: '{{load_balancer_type}}'
+spec:
+ ports:
+ - name: http
+ port: 4000
+ protocol: TCP
+ targetPort: http
+ selector:
+ app: webapp
+ type: LoadBalancer
diff --git a/src/deployments/staging/config/moda/build_options.yaml b/src/deployments/staging/config/moda/build_options.yaml
new file mode 100644
index 000000000000..1d3695874e45
--- /dev/null
+++ b/src/deployments/staging/config/moda/build_options.yaml
@@ -0,0 +1,10 @@
+# Array of rules to ignore
+ignored_rules: []
+# Array of files for kubeconform to ignore
+ignored_files: []
+# Array of directories to be ignored under `config/kubernetes`
+ignored_dirs: []
+# Array of fragment paths of the deployment config for schema validations to ignore
+ignored_deployment_config_fragments: []
+# Set to true to enable auto-commits for the generation of kubernetes resources from kustomize
+enable_kustomize_auto_commit: false
diff --git a/src/deployments/staging/config/moda/deployment.yaml b/src/deployments/staging/config/moda/deployment.yaml
new file mode 100644
index 000000000000..03702830b043
--- /dev/null
+++ b/src/deployments/staging/config/moda/deployment.yaml
@@ -0,0 +1,13 @@
+required_builds:
+ - docs-staging-{{x}}-moda-config-bundle / docs-staging-{{x}}-moda-config-bundle
+ - docs-staging-{{x}}-docker-image / docs-staging-{{x}}-docker-image
+ - docs-staging-{{x}}-docker-security / docs-staging-{{x}}-docker-security
+environments:
+ - name: staging
+ auto_deploy: true
+ cluster_selector:
+ profile: general
+ region: iad
+notifications:
+ slack_channels:
+ - '#docs-ops'