diff --git a/.coverage b/.coverage new file mode 100644 index 0000000..d559d53 Binary files /dev/null and b/.coverage differ diff --git a/.github/actions/collect-org-info/action.yml b/.github/actions/collect-org-info/action.yml index 87d350c..5357f38 100644 --- a/.github/actions/collect-org-info/action.yml +++ b/.github/actions/collect-org-info/action.yml @@ -53,4 +53,4 @@ runs: fi shell: bash env: - GITHUB_TOKEN: ${{ github.token }} \ No newline at end of file + GITHUB_TOKEN: ${{ github.token }} diff --git a/.github/workflows/delete-org-session.yml b/.github/workflows/delete-org-session.yml new file mode 100644 index 0000000..d73f8fe --- /dev/null +++ b/.github/workflows/delete-org-session.yml @@ -0,0 +1,47 @@ +name: Delete Org Session + +on: + workflow_call: + inputs: + environment_name: + description: "The name of the GitHub Environment to delete the org session from" + required: true + type: string + github_auth_environment: + description: "The name of the GitHub Environment to get the GitHub Access token from" + required: true + type: string + secrets: + github-token: + required: true + +jobs: + delete-org-session: + name: "Delete Org Session" + runs-on: ubuntu-latest + steps: + - name: Get GitHub Access Token + run: | + echo "Retrieving GitHub Access Token from environment: ${{ inputs.github_auth_environment }}" + GITHUB_ACCESS_TOKEN=$(gh api \ + -H "Authorization: token ${{ secrets.github-token }}" \ + "/repos/${{ github.repository }}/environments/${{ inputs.github_auth_environment }}/variables/GITHUB_ACCESS_TOKEN" \ + | jq -r '.value') + echo "GITHUB_ACCESS_TOKEN=${GITHUB_ACCESS_TOKEN}" >> $GITHUB_ENV + shell: bash + + - name: Delete Org Session + run: | + echo "Deleting org session from environment: ${{ inputs.environment_name }}" + gh api \ + -X DELETE \ + -H "Authorization: token ${{ env.GITHUB_ACCESS_TOKEN }}" \ + "/repos/${{ github.repository }}/environments/${{ inputs.environment_name }}/variables/ACCESS_TOKEN" + shell: bash + + - name: Add Job Summary + run: | + echo "## Org Session Deletion Summary" >> $GITHUB_STEP_SUMMARY + echo "Environment: ${{ inputs.environment_name }}" >> $GITHUB_STEP_SUMMARY + echo "Status: Org session deleted successfully" >> $GITHUB_STEP_SUMMARY + shell: bash diff --git a/.github/workflows/org-login-slack.yml b/.github/workflows/org-login-slack.yml index b5d06cc..a14958e 100644 --- a/.github/workflows/org-login-slack.yml +++ b/.github/workflows/org-login-slack.yml @@ -137,4 +137,4 @@ jobs: exit 1 fi - echo "Slack message sent successfully." + echo "Slack message sent successfully." \ No newline at end of file diff --git a/.github/workflows/test-github-auth.yml b/.github/workflows/test-github-auth.yml new file mode 100644 index 0000000..5a4d15f --- /dev/null +++ b/.github/workflows/test-github-auth.yml @@ -0,0 +1,35 @@ +name: Test GitHub Auth + +on: + push: + branches: + - "**" + +jobs: + test-github-auth: + runs-on: ubuntu-latest + environment: test + container: + image: ghcr.io/muselab-d2x/d2x:cumulusci-next-snapshots + options: --user root + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github-token }} + env: + DEV_HUB_AUTH_URL: "${{ secrets.dev-hub-auth-url }}" + CUMULUSCI_SERVICE_github: '{ "username": "${{ github.actor }}", "token": "${{ secrets.github-token }}", "email": "${{ secrets.gh-email }}" }' + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Auth to DevHub + run: /usr/local/bin/devhub.sh + - name: Test GitHub Auth + run: | + d2x auth url + d2x auth login + shell: bash + - name: Record API Requests + run: | + pip install vcrpy + vcrpy --record-mode=once --filter-headers Authorization --filter-headers X-Auth-Token --filter-headers X-API-Key + shell: bash diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 0000000..514b3df --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,54 @@ +name: Run Tests +on: + push: + branches: + - "**" +jobs: + test: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.10" + + - name: Install Poetry + uses: snok/install-poetry@v1 + with: + version: 1.7.1 + virtualenvs-create: true + virtualenvs-in-project: true + + - name: Cache dependencies + uses: actions/cache@v4 + with: + path: | + .venv + ~/.cache/pypoetry + key: ${{ runner.os }}-poetry-${{ hashFiles('**/poetry.lock') }} + restore-keys: | + ${{ runner.os }}-poetry- + + - name: Install dependencies + run: | + poetry install --with dev + + - name: Run tests + run: | + poetry run pytest tests/ --cov=d2x --cov-report=xml --junitxml=tests/results.xml + echo "✅ Tests and coverage completed" + + - name: Upload coverage report + uses: actions/upload-artifact@v4 + with: + name: coverage-report + path: coverage.xml + + - name: Upload test results + uses: actions/upload-artifact@v4 + with: + name: test-results + path: tests/results.xml diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 0000000..e69c626 --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,25 @@ +// .vscode/launch.json +{ + "version": "0.2.0", + "configurations": [ + { + "name": "Python: Debug Active File", + "type": "python", + "request": "launch", + "program": "${file}", + "console": "integratedTerminal", + "justMyCode": false + }, + { + "name": "Python: Debug Tests", + "type": "python", + "request": "launch", + "module": "pytest", + "args": [ + "tests" + ], + "console": "integratedTerminal", + "justMyCode": false + } + ] +} \ No newline at end of file diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..9b38853 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,7 @@ +{ + "python.testing.pytestArgs": [ + "tests" + ], + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true +} \ No newline at end of file diff --git a/d2x/api.py b/d2x/api.py new file mode 100644 index 0000000..baa3478 --- /dev/null +++ b/d2x/api.py @@ -0,0 +1 @@ +"""d2x.api""" diff --git a/d2x/api/__init__.py b/d2x/api/__init__.py index baa3478..3551b5d 100644 --- a/d2x/api/__init__.py +++ b/d2x/api/__init__.py @@ -1 +1 @@ -"""d2x.api""" +# d2x.api module diff --git a/d2x/api/gh.py b/d2x/api/gh.py new file mode 100644 index 0000000..3e2813e --- /dev/null +++ b/d2x/api/gh.py @@ -0,0 +1,91 @@ +import os +import requests + +GITHUB_REPO = os.environ.get("GITHUB_REPOSITORY") + + +def get_github_token() -> str: + """Get the GitHub token from the environment""" + token = os.environ.get("GITHUB_TOKEN") + if not token: + raise ValueError("GITHUB_TOKEN environment variable not set") + return token + + +def get_repo_full_name() -> str: + """Get the full name of the GitHub repository""" + repo = os.environ.get("GITHUB_REPOSITORY") + if not repo: + raise ValueError("GITHUB_REPOSITORY environment variable not set") + return repo + + +def set_environment_variable(env_name: str, var_name: str, var_value: str) -> None: + """Set a variable in a GitHub Environment""" + token = os.environ.get("GITHUB_TOKEN") + repo = os.environ.get("GITHUB_REPOSITORY") + if not token: + raise ValueError("GITHUB_TOKEN environment variable not set") + + url = f"https://api.github.com/repos/{GITHUB_REPO}/environments/{env_name}/variables/{var_name}" + headers = { + "Authorization": f"Bearer {token}", + "Accept": "application/vnd.github.v3+json", + } + data = {"name": var_name, "value": var_value} + + response = requests.put(url, headers=headers, json=data) + response.raise_for_status() + + +def get_environment_variable(env_name: str, var_name: str) -> str: + """Get a variable from a GitHub Environment""" + token = os.environ.get("GITHUB_TOKEN") + if not token: + raise ValueError("GITHUB_TOKEN environment variable not set") + + url = f"https://api.github.com/repos/{GITHUB_REPO}/environments/{env_name}/variables/{var_name}" + headers = { + "Authorization": f"Bearer {token}", + "Accept": "application/vnd.github.v3+json", + } + + response = requests.get(url, headers=headers) + response.raise_for_status() + + return response.json()["value"] + + +def set_environment_secret(env_name: str, secret_name: str, secret_value: str) -> None: + """Set a secret in a GitHub Environment""" + token = os.environ.get("GITHUB_TOKEN") + if not token: + raise ValueError("GITHUB_TOKEN environment variable not set") + + url = f"https://api.github.com/repos/{GITHUB_REPO}/environments/{env_name}/secrets/{secret_name}" + headers = { + "Authorization": f"Bearer {token}", + "Accept": "application/vnd.github.v3+json", + } + data = {"encrypted_value": secret_value} + + response = requests.put(url, headers=headers, json=data) + response.raise_for_status() + + +def get_environment_secret(env_name: str, secret_name: str) -> str: + """Get a secret from a GitHub Environment""" + token = os.environ.get("GITHUB_TOKEN") + if not token: + raise ValueError("GITHUB_TOKEN environment variable not set") + + url = f"https://api.github.com/repos/{GITHUB_REPO}/environments/{env_name}/secrets/{secret_name}" + headers = { + "Authorization": f"Bearer {token}", + "Accept": "application/vnd.github.v3+json", + } + + response = requests.get(url, headers=headers) + response.raise_for_status() + + return response.json()["encrypted_value"] diff --git a/d2x/auth/sf/__init__.py b/d2x/auth/sf/__init__.py index de6764a..9ac9c98 100644 --- a/d2x/auth/sf/__init__.py +++ b/d2x/auth/sf/__init__.py @@ -1 +1 @@ -"""d2x.auth.sf""" +# This is the __init__.py file for the d2x.auth.sf module. diff --git a/d2x/auth/sf/auth_url.py b/d2x/auth/sf/auth_url.py new file mode 100644 index 0000000..9013460 --- /dev/null +++ b/d2x/auth/sf/auth_url.py @@ -0,0 +1,247 @@ +# Standard library imports +import http.client +import json +import os +import sys +import urllib.parse +from datetime import datetime, timedelta + +# Third party imports +from rich import box +from rich.console import Console +from rich.panel import Panel +from rich.progress import Progress, SpinnerColumn, TextColumn +from rich.table import Table + +# Local imports +from d2x.models.sf.auth import ( + DomainType, + TokenRequest, + TokenResponse, + HttpResponse, + TokenExchangeDebug, + SfdxAuthUrlModel, +) +from d2x.ux.gh.actions import summary as gha_summary, output as gha_output +from d2x.models.sf.org import SalesforceOrgInfo +from d2x.base.types import CLIOptions +from d2x.api.gh import ( + set_environment_variable, + get_environment_variable, +) # Ensure get_environment_variable is imported + + +def exchange_token(org_info: SalesforceOrgInfo, cli_options: CLIOptions): + """Exchange refresh token for access token with detailed error handling""" + console = cli_options.console + debug_info = None # Initialize debug_info before the try block + with Progress( + SpinnerColumn(), + TextColumn("[progress.description]{task.description}"), + console=console, + transient=True, + ) as progress: + try: + progress.add_task("Preparing token request...", total=None) + + # Create token request using auth_info + token_request = TokenRequest( + client_id=org_info.auth_info.client_id, + client_secret=( + org_info.auth_info.client_secret.get_secret_value() + if org_info.auth_info.client_secret + else None + ), + refresh_token=org_info.auth_info.refresh_token, + ) + + # Prepare the request + token_url_path = "/services/oauth2/token" + headers = {"Content-Type": "application/x-www-form-urlencoded"} + body = token_request.to_form() + + # Create debug info + debug_info = TokenExchangeDebug( + url=f"https://{org_info.full_domain}{token_url_path}", + method="POST", + headers=headers, + request=token_request, + ) + + console.print(debug_info.to_table()) + + # Make request + progress.add_task(f"Connecting to {org_info.full_domain}...", total=None) + conn = http.client.HTTPSConnection(org_info.full_domain) + + task = progress.add_task("Exchanging tokens...", total=None) + conn.request("POST", token_url_path, body, headers) + response = conn.getresponse() + response_data = response.read() + + # Create response object + http_response = HttpResponse( + status=response.status, + reason=response.reason, + headers=dict(response.getheaders()), + body=response_data.decode("utf-8"), + ) + + try: + http_response.parsed_body = json.loads(http_response.body) + except json.JSONDecodeError: + pass + + debug_info.response = http_response + + if response.status != 200: + error_panel = Panel( + f"[red]HTTP Status: {http_response.status} {http_response.reason}\n\n" + f"[yellow]Response Headers:[/]\n" + f"{http_response.headers}\n\n" + f"[yellow]Response Body:[/]\n" + f"{http_response.body}", + title="[red]Token Exchange Failed", + border_style="red", + ) + console.print(error_panel) + raise RuntimeError( + f"Token exchange failed: {response.status} {response.reason}" + ) + + progress.update(task, description="Token exchange successful!") + + # Parse token response + token_response = TokenResponse.model_validate(http_response.parsed_body) + + # Display success + success_panel = Panel( + f"[green]Successfully authenticated to {org_info.full_domain}\n" + f"[blue]Token Details:[/]\n" + f" Issued At: {token_response.issued_at.strftime('%Y-%m-%d %H:%M:%S')}\n" + f" Expires At: {token_response.expires_at.strftime('%Y-%m-%d %H:%M:%S')}\n" + f" ({token_response.expires_in} seconds)\n" + f"[cyan]Instance URL: {token_response.instance_url}", + title="[green]Authentication Success", + border_style="green", + ) + console.print(success_panel) + + # Store access token in GitHub Environment + set_environment_variable( + "salesforce", + "ACCESS_TOKEN", + token_response.access_token.get_secret_value(), + ) + + return token_response + + except Exception as e: + if debug_info is not None: + debug_info.error = str(e) + error_panel = Panel( + f"[red]Error: {str(e)}", + title="[red]Authentication Failed", + border_style="red", + ) + console.print(error_panel) + raise + + +def get_full_domain(org_info: SalesforceOrgInfo) -> str: + """Construct the full domain from SalesforceOrgInfo.""" + return org_info.full_domain.rstrip("/") + + +def main(cli_options: CLIOptions): + """Main CLI entrypoint""" + console = cli_options.console + + try: + # Get auth URL from environment or args + auth_url = os.environ.get("SFDX_AUTH_URL") or sys.argv[1] + + # Remove the console.status context manager + # with console.status("[bold blue]Parsing SFDX Auth URL..."): + # org_info = parse_sfdx_auth_url(auth_url) + org_info = SfdxAuthUrlModel(auth_url=auth_url).parse_sfdx_auth_url() + + table = Table(title="Salesforce Org Information", box=box.ROUNDED) + table.add_column("Property", style="cyan") + table.add_column("Value", style="green") + + table.add_row("Org Type", org_info["org_type"]) + table.add_row("Domain Type", org_info["domain_type"]) + table.add_row("Full Domain", org_info["full_domain"]) + + if org_info["domain_type"] == DomainType.POD: + table.add_row("Region", org_info["region"] or "Classic") + table.add_row("Pod Number", org_info["pod_number"] or "N/A") + table.add_row("Pod Type", org_info["pod_type"] or "Standard") + table.add_row("Is Classic Pod", "✓" if org_info["is_classic_pod"] else "✗") + table.add_row("Is Hyperforce", "✓" if org_info["is_hyperforce"] else "✗") + else: + table.add_row("MyDomain", org_info["mydomain"] or "N/A") + table.add_row("Sandbox Name", org_info["sandbox_name"] or "N/A") + table.add_row("Is Sandbox", "✓" if org_info["is_sandbox"] else "✗") + + console.print(table) + + # Exchange token + token_response = exchange_token(org_info, cli_options) + + # Create step summary + summary_md = f""" +## Salesforce Authentication Results + +### Organization Details +- **Domain**: {org_info["full_domain"]} +- **Type**: {org_info["org_type"]} +{"- **Region**: " + (org_info["region"] or "Classic") if org_info["domain_type"] == DomainType.POD else ""} +{"- **Hyperforce**: " + ("Yes" if org_info["is_hyperforce"] else "No") if org_info["domain_type"] == DomainType.POD else ""} + +### Authentication Status +- **Status**: ✅ Success +- **Timestamp**: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')} +- **Token Expiry**: {token_response.expires_in} seconds + """ + gha_summary(summary_md) + + # Set action outputs + gha_output("access_token", token_response.access_token) + gha_output("instance_url", token_response.instance_url) + gha_output("org_type", org_info["org_type"]) + if org_info["domain_type"] == DomainType.POD: + gha_output("region", org_info["region"] or "classic") + gha_output("is_hyperforce", str(org_info["is_hyperforce"]).lower()) + + sys.exit(0) + + except Exception as e: + # Create error panel + error_panel = Panel( + f"[red]Error: {str(e)}", + title="[red]Authentication Failed", + border_style="red", + ) + console.print(error_panel) + + # Add error to job summary + error_summary = f""" +## ❌ Authentication Failed + +**Error**: {str(e)} +**Timestamp**: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')} + """ + gha_summary(error_summary) + + sys.exit(1) + + +if __name__ == "__main__": + import sys + from d2x.base.types import CLIOptions + + # Assuming CLIOptions is instantiated before calling main + # This part is handled in cli.py + pass diff --git a/d2x/auth/sf/auth_url/__init__.py b/d2x/auth/sf/auth_url/__init__.py deleted file mode 100644 index e7cc3cd..0000000 --- a/d2x/auth/sf/auth_url/__init__.py +++ /dev/null @@ -1,353 +0,0 @@ -# Standard library imports -import http.client -import json -import os -import sys -import urllib.parse -from datetime import datetime, timedelta -from typing import Optional, Literal - -# Third party imports -from pydantic import BaseModel, Field, SecretStr, computed_field -from rich import box -from rich.console import Console -from rich.panel import Panel -from rich.progress import Progress, SpinnerColumn, TextColumn -from rich.table import Table - -# Local imports -from d2x.parse.sf.auth_url import parse_sfdx_auth_url, SalesforceOrgInfo -from d2x.ux.gh.actions import summary as gha_summary, output as gha_output - - -# Type definitions -OrgType = Literal["production", "sandbox", "scratch", "developer", "demo"] -DomainType = Literal["my", "lightning", "pod"] - - -class TokenRequest(BaseModel): - """OAuth token request parameters for Salesforce authentication""" - - grant_type: str = Field( - default="refresh_token", - description="OAuth grant type, always 'refresh_token' for this flow", - ) - client_id: str = Field( - description="The connected app's client ID/consumer key", - examples=["PlatformCLI", "3MVG9..."], - ) - client_secret: Optional[SecretStr] = Field( - default=None, - description="The connected app's client secret/consumer secret if required", - ) - refresh_token: SecretStr = Field( - description="The SFDX refresh token obtained from auth URL" - ) - - def to_form(self) -> str: - """Convert to URL encoded form data, only including client_secret if provided""" - data = { - "grant_type": self.grant_type, - "client_id": self.client_id, - "refresh_token": self.refresh_token.get_secret_value(), - } - # Only include client_secret if it's provided - if self.client_secret: - data["client_secret"] = self.client_secret.get_secret_value() - - return urllib.parse.urlencode(data) - - -class TokenResponse(BaseModel): - """Salesforce OAuth token response""" - - access_token: SecretStr = Field(description="The OAuth access token for API calls") - instance_url: str = Field( - description="The Salesforce instance URL for API calls", - examples=["https://mycompany.my.salesforce.com"], - ) - issued_at: datetime = Field( - default_factory=datetime.now, description="Timestamp when the token was issued" - ) - expires_in: int = Field( - default=7200, description="Token lifetime in seconds", ge=0, examples=[7200] - ) - token_type: str = Field( - default="Bearer", - description="OAuth token type, typically 'Bearer'", - pattern="^Bearer$", - ) - scope: Optional[str] = Field( - default=None, description="OAuth scopes granted to the token" - ) - signature: Optional[str] = Field( - default=None, description="Request signature for verification" - ) - id_token: Optional[SecretStr] = Field( - default=None, description="OpenID Connect ID token if requested" - ) - - @computed_field - def expires_at(self) -> datetime: - """Calculate token expiration time""" - return self.issued_at.replace(microsecond=0) + timedelta( - seconds=self.expires_in - ) - - def model_dump_safe(self) -> dict: - """Dump model while masking sensitive fields""" - data = self.model_dump() - data["access_token"] = "**********" + self.access_token.get_secret_value()[-4:] - if self.id_token: - data["id_token"] = "*" * 10 - return data - - -class HttpResponse(BaseModel): - """HTTP response details""" - - status: int = Field(description="HTTP status code", ge=100, le=599) - reason: str = Field(description="HTTP status reason phrase") - headers: dict[str, str] = Field(description="HTTP response headers") - body: str = Field(description="Raw response body") - parsed_body: Optional[dict] = Field( - default=None, description="Parsed JSON response body if available" - ) - - -class TokenExchangeDebug(BaseModel): - """Debug information for token exchange""" - - url: str = Field( - description="Full URL for token exchange request", - examples=["https://login.salesforce.com/services/oauth2/token"], - ) - method: str = Field(description="HTTP method used", pattern="^POST$") - headers: dict[str, str] = Field(description="HTTP request headers") - request: TokenRequest = Field(description="Token request parameters") - response: Optional[HttpResponse] = Field( - default=None, description="Response information when available" - ) - error: Optional[str] = Field( - default=None, description="Error message if exchange failed" - ) - - def to_table(self) -> Table: - """Convert debug info to rich table""" - table = Table(title="Token Exchange Details", box=box.ROUNDED) - table.add_column("Property", style="cyan") - table.add_column("Value", style="yellow") - - table.add_row("URL", self.url) - table.add_row("Method", self.method) - for header, value in self.headers.items(): - table.add_row(f"Header: {header}", value) - table.add_row("Client ID", self.request.client_id) - table.add_row( - "Client Secret", - ( - "*" * len(self.request.client_secret.get_secret_value()) - if self.request.client_secret - else "Not provided" - ), - ) - table.add_row( - "Refresh Token", - "*" * 10 + self.request.refresh_token.get_secret_value()[-4:], - ) - - return table - - -def exchange_token(org_info: SalesforceOrgInfo, console: Console) -> TokenResponse: - """Exchange refresh token for access token with detailed error handling""" - with Progress( - SpinnerColumn(), - TextColumn("[progress.description]{task.description}"), - console=console, - transient=True, - ) as progress: - try: - progress.add_task("Preparing token request...", total=None) - - # Create token request - only include client_secret if provided in URL - token_request = TokenRequest( - client_id=org_info.client_id, - client_secret=( - SecretStr(org_info.client_secret) - if org_info.client_secret - else None - ), - refresh_token=SecretStr(org_info.refresh_token), - ) - - # Prepare the request - token_url_path = "/services/oauth2/token" - headers = {"Content-Type": "application/x-www-form-urlencoded"} - body = token_request.to_form() - - # Create debug info - debug = TokenExchangeDebug( - url=f"https://{org_info.full_domain}{token_url_path}", - method="POST", - headers=headers, - request=token_request, - ) - - console.print(debug.to_table()) - - # Make request - progress.add_task(f"Connecting to {org_info.full_domain}...", total=None) - conn = http.client.HTTPSConnection(org_info.full_domain) - - task = progress.add_task("Exchanging tokens...", total=None) - conn.request("POST", token_url_path, body, headers) - response = conn.getresponse() - response_data = response.read() - - # Create response object - http_response = HttpResponse( - status=response.status, - reason=response.reason, - headers=dict(response.getheaders()), - body=response_data.decode("utf-8"), - ) - - try: - http_response.parsed_body = json.loads(http_response.body) - except json.JSONDecodeError: - pass - - debug.response = http_response - - if response.status != 200: - error_panel = Panel( - f"[red]HTTP Status: {http_response.status} {http_response.reason}\n\n" - f"[yellow]Response Headers:[/]\n" - f"{http_response.headers}\n\n" - f"[yellow]Response Body:[/]\n" - f"{http_response.body}", - title="[red]Token Exchange Failed", - border_style="red", - ) - console.print(error_panel) - raise RuntimeError( - f"Token exchange failed: {response.status} {response.reason}" - ) - - progress.update(task, description="Token exchange successful!") - - # Parse token response - token_response = TokenResponse.model_validate(http_response.parsed_body) - - # Display success - success_panel = Panel( - f"[green]Successfully authenticated to {org_info.full_domain}\n" - f"[blue]Token Details:[/]\n" - f" Issued At: {token_response.issued_at.strftime('%Y-%m-%d %H:%M:%S')}\n" - f" Expires At: {token_response.expires_at.strftime('%Y-%m-%d %H:%M:%S')}\n" - f" ({token_response.expires_in} seconds)\n" - f"[cyan]Instance URL: {token_response.instance_url}", - title="[green]Authentication Success", - border_style="green", - ) - console.print(success_panel) - - return token_response - - except Exception as e: - debug.error = str(e) - error_panel = Panel( - f"[red]Error: {str(e)}", - title="[red]Authentication Failed", - border_style="red", - ) - console.print(error_panel) - raise - - -def main(): - console = Console(record=True) - - try: - # Get auth URL from environment or args - auth_url = os.environ.get("SFDX_AUTH_URL") or sys.argv[1] - - # Parse URL and display org info - with console.status("[bold blue]Parsing SFDX Auth URL..."): - org_info = parse_sfdx_auth_url(auth_url) - - table = Table(title="Salesforce Org Information", box=box.ROUNDED) - table.add_column("Property", style="cyan") - table.add_column("Value", style="green") - - table.add_row("Org Type", org_info.org_type) - table.add_row("Domain Type", org_info.domain_type) - table.add_row("Full Domain", org_info.full_domain) - - if org_info.domain_type == "pod": - table.add_row("Region", org_info.region or "Classic") - table.add_row("Pod Number", org_info.pod_number or "N/A") - table.add_row("Pod Type", org_info.pod_type or "Standard") - table.add_row("Is Classic Pod", "✓" if org_info.is_classic_pod else "✗") - table.add_row("Is Hyperforce", "✓" if org_info.is_hyperforce else "✗") - else: - table.add_row("MyDomain", org_info.mydomain or "N/A") - table.add_row("Sandbox Name", org_info.sandbox_name or "N/A") - table.add_row("Is Sandbox", "✓" if org_info.is_sandbox else "✗") - - console.print(table) - - # Exchange token - token_response = exchange_token(org_info, console) - - # Create step summary - summary_md = f""" -## Salesforce Authentication Results - -### Organization Details -- **Domain**: {org_info.full_domain} -- **Type**: {org_info.org_type} -{"- **Region**: " + (org_info.region or "Classic") if org_info.domain_type == 'pod' else ""} -{"- **Hyperforce**: " + ("Yes" if org_info.is_hyperforce else "No") if org_info.domain_type == 'pod' else ""} - -### Authentication Status -- **Status**: ✅ Success -- **Timestamp**: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')} -- **Token Expiry**: {token_response.expires_in} seconds - """ - gha_summary(summary_md) - - # Set action outputs - gha_output("access_token", token_response.access_token.get_secret_value()) - gha_output("instance_url", token_response.instance_url) - gha_output("org_type", org_info.org_type) - if org_info.domain_type == "pod": - gha_output("region", org_info.region or "classic") - gha_output("is_hyperforce", str(org_info.is_hyperforce).lower()) - - sys.exit(0) - - except Exception as e: - # Create error panel - error_panel = Panel( - f"[red]Error: {str(e)}", - title="[red]Authentication Failed", - border_style="red", - ) - console.print(error_panel) - - # Add error to job summary - error_summary = f""" -## ❌ Authentication Failed - -**Error**: {str(e)} -**Timestamp**: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')} - """ - gha_summary(error_summary) - - sys.exit(1) - - -if __name__ == "__main__": - main() diff --git a/d2x/auth/sf/login_url.py b/d2x/auth/sf/login_url.py new file mode 100644 index 0000000..b32d91b --- /dev/null +++ b/d2x/auth/sf/login_url.py @@ -0,0 +1,79 @@ +import sys +import os +from rich.console import Console +from d2x.models.sf.auth import LoginUrlModel, SfdxAuthUrlModel +from d2x.ux.gh.actions import summary, output +from d2x.base.types import CLIOptions +from typing import Optional +from d2x.api.gh import get_environment_variable # Add get_environment_variable import + + +def generate_login_url(instance_url: str, access_token: str) -> str: + """Generate the login URL using the instance URL and access token.""" + login_url, _ = LoginUrlModel( + access_token=access_token, login_url=instance_url + ).get_login_url_and_token() + return login_url + + +def main(cli_options: CLIOptions): + """Main CLI entrypoint""" + console = cli_options.console + + auth_url = os.environ.get("SFDX_AUTH_URL") + + if not auth_url: + raise ValueError( + "Salesforce Auth Url not found. Set the SFDX_AUTH_URL environment variable." + ) + + org_info = SfdxAuthUrlModel(auth_url=auth_url).parse_sfdx_auth_url() + + from d2x.auth.sf.auth_url import exchange_token + + try: + access_token = get_environment_variable("salesforce", "ACCESS_TOKEN") + except Exception as e: + console.print(f"[red]Error retrieving access token: {e}") + sys.exit(1) + + start_url = generate_login_url( + instance_url=org_info.auth_info.instance_url, + access_token=access_token, + ) + + output("access_token", access_token) # Use access_token directly + output("instance_url", org_info.auth_info.instance_url) + + output("start_url", start_url) + output("org_type", org_info["org_type"]) + + if org_info["domain_type"] == "pod": + output("region", org_info["region"] or "classic") + output("is_hyperforce", str(org_info["is_hyperforce"]).lower()) + + from d2x.auth.sf.auth_url import get_full_domain + + summary_md = f""" +## Salesforce Authentication Successful 🚀 + +### Organization Details +- **Domain**: {get_full_domain(org_info)} +- **Type**: {org_info["org_type"]} +{"- **Region**: " + (org_info["region"] or "Classic") if org_info["domain_type"] == 'pod' else ""} +{"- **Hyperforce**: " + ("Yes" if org_info["is_hyperforce"] else "No") if org_info["domain_type"] == 'pod' else ""} + +### Authentication Status +- **Status**: ✅ Success +- **Timestamp**: {token_response.issued_at.strftime('%Y-%m-%d %H:%M:%S')} +- **Token Expiry**: {token_response.expires_in} seconds +- **Instance URL**: {org_info.auth_info.instance_url} +""" + summary(summary_md) + + console.print("\n[green]✓ Successfully authenticated to Salesforce!") + console.print(f"\n[yellow]Login URL:[/]\n{start_url}") + + +if __name__ == "__main__": + main() diff --git a/d2x/base/__init__.py b/d2x/base/__init__.py new file mode 100644 index 0000000..bbd06fc --- /dev/null +++ b/d2x/base/__init__.py @@ -0,0 +1 @@ +# d2x.base diff --git a/d2x/base/models.py b/d2x/base/models.py new file mode 100644 index 0000000..4f641d0 --- /dev/null +++ b/d2x/base/models.py @@ -0,0 +1,57 @@ +from pydantic import BaseModel, Field +from rich.table import Table +from typing import Optional +from datetime import datetime, timedelta + + +class CommonBaseModel(BaseModel): + """Common base class for all models""" + + class Config: + from_attributes = True + populate_by_name = True + use_enum_values = True + + def to_dict(self): + """Convert model to dictionary""" + return self.dict(by_alias=True) + + def to_json(self): + """Convert model to JSON string""" + return self.json(by_alias=True) + + def to_yaml(self): + """Convert model to YAML string""" + try: + import yaml + except ImportError: + raise ImportError( + "PyYAML is not installed. Please install it to use this method." + ) + return yaml.dump(self.dict(by_alias=True)) + + @classmethod + def from_yaml(cls, yaml_str: str): + """Create model instance from YAML string""" + try: + import yaml + except ImportError: + raise ImportError( + "PyYAML is not installed. Please install it to use this method." + ) + data = yaml.safe_load(yaml_str) + return cls(**data) + + @classmethod + def from_dict(cls, data: dict): + """Create model instance from dictionary""" + return cls(**data) + + @classmethod + def from_json(cls, json_str: str): + """Create model instance from JSON string""" + return cls.parse_raw(json_str) + + def to_openapi_schema(self): + """Convert model to OpenAPI 3.1 schema""" + return self.schema_json(by_alias=True) diff --git a/d2x/base/types.py b/d2x/base/types.py new file mode 100644 index 0000000..84d94a6 --- /dev/null +++ b/d2x/base/types.py @@ -0,0 +1,36 @@ +from enum import Enum +from typing import Literal +from pydantic import BaseModel, Field +from rich.console import Console + + +class OutputFormat(str, Enum): + JSON = "json" + YAML = "yaml" + TEXT = "text" + MARKDOWN = "markdown" + + +# Redefine DebugModeType as bool +OutputFormatType = OutputFormat +DebugModeType = bool + + +class CLIOptions(BaseModel): + """Model to encapsulate CLI options.""" + + output_format: OutputFormatType = Field( + default=OutputFormat.TEXT, description="Output format for CLI commands." + ) + debug: DebugModeType = Field( + default=False, description="Enable or disable debug mode." + ) + console: Console = Field( + default_factory=Console, description="Rich Console for output." + ) + + class Config: + arbitrary_types_allowed = True + + +# Add other enums and types as needed diff --git a/d2x/cli/__init__.py b/d2x/cli/__init__.py new file mode 100644 index 0000000..9978d03 --- /dev/null +++ b/d2x/cli/__init__.py @@ -0,0 +1 @@ +# d2x.cli diff --git a/d2x/cli/main.py b/d2x/cli/main.py new file mode 100644 index 0000000..2e34dae --- /dev/null +++ b/d2x/cli/main.py @@ -0,0 +1,167 @@ +# cli.py +import rich_click as click +from d2x.models.sf.auth import LoginUrlModel, SfdxAuthUrlModel +import sys +import pdb +from d2x.base.types import OutputFormat, OutputFormatType, CLIOptions +from typing import Optional +from importlib.metadata import version, PackageNotFoundError +from d2x.env.gh import set_environment_variable, get_environment_variable, set_environment_secret, get_environment_secret + +# Disable rich_click's syntax highlighting +click.SHOW_ARGUMENTS = False +click.SHOW_METAVARS_COLUMN = False +click.SHOW_OPTIONS = False + +try: + VERSION = version("d2x") +except PackageNotFoundError: + VERSION = "dev" + + +def common_options(func): + """Decorator to add common options to all commands.""" + func = click.option( + "--output-format", + type=click.Choice([format.value for format in OutputFormat]), + default=OutputFormat.TEXT.value, + help="Output format.", + )(func) + func = click.option("--debug", is_flag=True, help="Enable debug mode.")(func) + return func + + +@click.group(name="d2x") +@click.version_option(version=VERSION, prog_name="d2x") +def d2x_cli(): + """D2X CLI main command group""" + pass + + +@d2x_cli.group() +def sf(): + """Salesforce commands""" + pass + + +@sf.group() +def auth(): + """Salesforce authentication commands""" + pass + + +@auth.command() +@common_options +def login(output_format: OutputFormatType, debug: bool): + """Exchange Salesforce refresh token for a current login session start url.""" + cli_options = CLIOptions(output_format=output_format, debug=debug) + try: + login_url_main(cli_options) + except: + if debug: + type, value, tb = sys.exc_info() + pdb.post_mortem(tb) + else: + raise + + +@auth.command() +@common_options +def url(output_format: OutputFormatType, debug: bool): + """Exchange SFDX_AUTH_URL for a Salesfoce access token session""" + cli_options = CLIOptions(output_format=output_format, debug=debug) + try: + auth_url_main(cli_options) + except: + if debug: + type, value, tb = sys.exc_info() + pdb.post_mortem(tb) + else: + raise + + +@d2x_cli.group() +def env(): + """Environment commands""" + pass + + +@env.command() +@click.argument("env_name") +@click.argument("var_name") +@click.argument("var_value") +@common_options +def set_var(env_name: str, var_name: str, var_value: str, output_format: OutputFormatType, debug: bool): + """Set an environment variable""" + cli_options = CLIOptions(output_format=output_format, debug=debug) + try: + set_environment_variable(env_name, var_name, var_value) + except: + if debug: + type, value, tb = sys.exc_info() + pdb.post_mortem(tb) + else: + raise + + +@env.command() +@click.argument("env_name") +@click.argument("var_name") +@common_options +def get_var(env_name: str, var_name: str, output_format: OutputFormatType, debug: bool): + """Get an environment variable""" + cli_options = CLIOptions(output_format=output_format, debug=debug) + try: + value = get_environment_variable(env_name, var_name) + click.echo(value) + except: + if debug: + type, value, tb = sys.exc_info() + pdb.post_mortem(tb) + else: + raise + + +@env.command() +@click.argument("env_name") +@click.argument("secret_name") +@click.argument("secret_value") +@common_options +def set_secret(env_name: str, secret_name: str, secret_value: str, output_format: OutputFormatType, debug: bool): + """Set an environment secret""" + cli_options = CLIOptions(output_format=output_format, debug=debug) + try: + set_environment_secret(env_name, secret_name, secret_value) + except: + if debug: + type, value, tb = sys.exc_info() + pdb.post_mortem(tb) + else: + raise + + +@env.command() +@click.argument("env_name") +@click.argument("secret_name") +@common_options +def get_secret(env_name: str, secret_name: str, output_format: OutputFormatType, debug: bool): + """Get an environment secret""" + cli_options = CLIOptions(output_format=output_format, debug=debug) + try: + value = get_environment_secret(env_name, secret_name) + click.echo(value) + except: + if debug: + type, value, tb = sys.exc_info() + pdb.post_mortem(tb) + else: + raise + + +def get_cli(): + """Get the CLI entry point""" + return d2x_cli + + +if __name__ == "__main__": + d2x_cli() diff --git a/d2x/env/__init__.py b/d2x/env/__init__.py new file mode 100644 index 0000000..5baebcd --- /dev/null +++ b/d2x/env/__init__.py @@ -0,0 +1 @@ +# This is an empty __init__.py file for the d2x.env module diff --git a/d2x/env/gh.py b/d2x/env/gh.py new file mode 100644 index 0000000..b362ea5 --- /dev/null +++ b/d2x/env/gh.py @@ -0,0 +1,7 @@ +import os +from d2x.api.gh import ( + set_environment_variable, + get_environment_variable, + set_environment_secret, + get_environment_secret, +) diff --git a/d2x/gen/sf/__init__.py b/d2x/gen/sf/__init__.py index e69de29..e507010 100644 --- a/d2x/gen/sf/__init__.py +++ b/d2x/gen/sf/__init__.py @@ -0,0 +1 @@ +"""d2x.gen.sf""" diff --git a/d2x/gen/sf/login_url/__init__.py b/d2x/gen/sf/login_url/__init__.py deleted file mode 100644 index 848f482..0000000 --- a/d2x/gen/sf/login_url/__init__.py +++ /dev/null @@ -1,97 +0,0 @@ -import sys -import os -from rich.console import Console -from d2x.auth.sf_auth_url import exchange_token, parse_sfdx_auth_url -from d2x.auth.sf_login_url import generate_login_url -from d2x.gh.actions.ux import summary, output - - -def main(): - """Main CLI entrypoint""" - console = Console() - - if len(sys.argv) < 2: - console.print("[red]Error: No authentication URL provided") - console.print("Usage: d2x auth login --url ") - sys.exit(1) - - try: - # Get auth URL from args or env - auth_url = None - if "--url" in sys.argv: - url_index = sys.argv.index("--url") + 1 - if url_index < len(sys.argv): - auth_url = sys.argv[url_index] - - if not auth_url: - auth_url = os.environ.get("SFDX_AUTH_URL") - - if not auth_url: - raise ValueError( - "No authentication URL provided via --url or SFDX_AUTH_URL" - ) - - # Execute the login flow - with console.status("[bold blue]Authenticating to Salesforce..."): - # Parse and validate the auth URL - org_info = parse_sfdx_auth_url(auth_url) - - # Exchange tokens - token_response = exchange_token(org_info, console) - - # Generate login URL - start_url = generate_login_url( - instance_url=token_response.instance_url, - access_token=token_response.access_token.get_secret_value(), - ) - - # Set outputs for GitHub Actions - output.add("access_token", token_response.access_token.get_secret_value()) - output.add("instance_url", token_response.instance_url) - output.add("start_url", start_url) - output.add("org_type", org_info.org_type) - - if org_info.domain_type == "pod": - output.add("region", org_info.region or "classic") - output.add("is_hyperforce", str(org_info.is_hyperforce).lower()) - - # Add summary for GitHub Actions - summary_md = f""" -## Salesforce Authentication Successful 🚀 - -### Organization Details -- **Domain**: {org_info.full_domain} -- **Type**: {org_info.org_type} -{"- **Region**: " + (org_info.region or "Classic") if org_info.domain_type == 'pod' else ""} -{"- **Hyperforce**: " + ("Yes" if org_info.is_hyperforce else "No") if org_info.domain_type == 'pod' else ""} - -### Authentication Status -- **Status**: ✅ Success -- **Timestamp**: {token_response.issued_at.strftime('%Y-%m-%d %H:%M:%S')} -- **Token Expiry**: {token_response.expires_in} seconds -- **Instance URL**: {token_response.instance_url} - -### Quick Access -``` -{start_url} -``` -""" - summary.add(summary_md) - - # Success output - console.print("\n[green]✓ Successfully authenticated to Salesforce!") - console.print(f"\n[yellow]Login URL:[/]\n{start_url}") - - except Exception as e: - console.print(f"[red]Error: {str(e)}") - error_md = f""" -## ❌ Authentication Failed - -**Error**: {str(e)} -""" - summary.add(error_md) - sys.exit(1) - - -if __name__ == "__main__": - main() diff --git a/d2x/models/__init__.py b/d2x/models/__init__.py new file mode 100644 index 0000000..9b2e7a1 --- /dev/null +++ b/d2x/models/__init__.py @@ -0,0 +1,3 @@ +# d2x.models + +# This is an empty __init__.py file for the d2x.models module diff --git a/d2x/models/sf/__init__.py b/d2x/models/sf/__init__.py new file mode 100644 index 0000000..c1fb9b4 --- /dev/null +++ b/d2x/models/sf/__init__.py @@ -0,0 +1,3 @@ +# d2x.models.sf + +# This is an empty __init__.py file for the d2x.models.sf module diff --git a/d2x/models/sf/auth.py b/d2x/models/sf/auth.py new file mode 100644 index 0000000..f3a2e16 --- /dev/null +++ b/d2x/models/sf/auth.py @@ -0,0 +1,253 @@ +# auth.py +import re +import urllib.parse +from datetime import datetime, timedelta +from enum import Enum +from typing import Optional +from pydantic import BaseModel, Field, SecretStr, computed_field +from rich.table import Table +from rich import box +from d2x.base.models import CommonBaseModel + +# Remove OutputFormatType import if not used +# from d2x.base.types import OutputFormatType + + +class OrgType(str, Enum): + PRODUCTION = "production" + SANDBOX = "sandbox" + SCRATCH = "scratch" + DEVELOPER = "developer" + DEMO = "demo" # Assigned value to DEMO + + +class DomainType(str, Enum): + POD = "pod" + LIGHTNING = "lightning" + MY = "my" + + +class AuthInfo(CommonBaseModel): + """Authentication components for Salesforce org.""" + + client_id: str + client_secret: SecretStr # Changed from str to SecretStr + refresh_token: str + instance_url: str + + +class TokenRequest(BaseModel): + """OAuth token request parameters for Salesforce authentication""" + + grant_type: str = Field( + default="refresh_token", + description="OAuth grant type, always 'refresh_token' for this flow", + pattern="^refresh_token$", # Changed from regex to pattern + ) + client_id: str = Field( + description="The connected app's client ID/consumer key", + examples=["PlatformCLI", "3MVG9..."], + ) + client_secret: Optional[SecretStr] = Field( + default=None, + description="The connected app's client secret/consumer secret if required", + ) + refresh_token: SecretStr = Field( + description="The SFDX refresh token obtained from auth URL" + ) + + def to_form(self) -> str: + """Convert to URL encoded form data, only including client_secret if provided""" + data = { + "grant_type": self.grant_type, + "client_id": self.client_id, + "refresh_token": self.refresh_token.get_secret_value(), + } + # Only include client_secret if it's provided + if self.client_secret: + data["client_secret"] = self.client_secret.get_secret_value() + + return urllib.parse.urlencode(data) + + +class TokenResponse(BaseModel): + """Salesforce OAuth token response""" + + access_token: SecretStr = Field(description="The OAuth access token for API calls") + instance_url: str = Field( + description="The Salesforce instance URL for API calls", + examples=["https://mycompany.my.salesforce.com"], + ) + issued_at: datetime = Field( + default_factory=datetime.now, description="Timestamp when the token was issued" + ) + expires_in: int = Field( + default=7200, description="Token lifetime in seconds", ge=0, examples=[7200] + ) + token_type: str = Field( + default="Bearer", + description="OAuth token type, typically 'Bearer'", + pattern="^Bearer$", # Changed from regex to pattern + ) + scope: Optional[str] = Field( + default=None, description="OAuth scopes granted to the token" + ) + signature: Optional[str] = Field( + default=None, description="Request signature for verification" + ) + id_token: Optional[SecretStr] = Field( + default=None, description="OpenID Connect ID token if requested" + ) + + @computed_field + def expires_at(self) -> datetime: + """Calculate token expiration time""" + return self.issued_at.replace(microsecond=0) + timedelta( + seconds=self.expires_in + ) + + def model_dump_safe(self) -> dict: + """Dump model while masking sensitive fields""" + data = self.model_dump() + data["access_token"] = "**********" + self.access_token.get_secret_value()[-4:] + if self.id_token: + data["id_token"] = "*" * 10 + return data + + +class HttpResponse(BaseModel): + """HTTP response details""" + + status: int = Field(description="HTTP status code", ge=100, le=599) + reason: str = Field(description="HTTP status reason phrase") + headers: dict[str, str] = Field(description="HTTP response headers") + body: str = Field(description="Raw response body") + parsed_body: Optional[dict] = Field( + default=None, description="Parsed JSON response body if available" + ) + + +class TokenExchangeDebug(BaseModel): + """Debug information for token exchange""" + + url: str = Field( + description="Full URL for token exchange request", + examples=["https://login.salesforce.com/services/oauth2/token"], + ) + method: str = Field(description="HTTP method used", pattern="^POST$") + headers: dict[str, str] = Field(description="HTTP request headers") + request: TokenRequest = Field(description="Token request parameters") + response: Optional[HttpResponse] = Field( + default=None, description="Response information when available" + ) + error: Optional[str] = Field( + default=None, description="Error message if exchange failed" + ) + + def to_table(self) -> Table: + """Convert debug info to rich table""" + table = Table(title="Token Exchange Details", box=box.ROUNDED) + table.add_column("Property", style="cyan") + table.add_column("Value", style="yellow") + + table.add_row("URL", self.url) + table.add_row("Method", self.method) + for header, value in self.headers.items(): + table.add_row(f"Header: {header}", value) + table.add_row("Client ID", self.request.client_id) + table.add_row( + "Client Secret", + ( + "*" * len(self.request.client_secret.get_secret_value()) + if self.request.client_secret + else "Not provided" + ), + ) + table.add_row( + "Refresh Token", + "*" * 10 + self.request.refresh_token.get_secret_value()[-4:], + ) + + return table + + +class LoginUrlModel(CommonBaseModel): + """Model to generate login URL and token""" + + access_token: str + login_url: str + ret_url: str = "/" + + def get_login_url_and_token(self) -> tuple[str, str]: + """Generate login URL and token""" + ret_url_encoded = urllib.parse.quote(self.ret_url) if self.ret_url else "%2F" + login_url_formatted = f"{self.login_url}/secur/frontdoor.jsp?sid={self.access_token}&retURL={ret_url_encoded}" + return login_url_formatted, self.access_token + + +class SfdxAuthUrlModel(CommonBaseModel): + """Model to parse SFDX auth URL""" + + auth_url: str + + def parse_sfdx_auth_url(self) -> dict: + """Parse SFDX auth URL and extract detailed org information""" + sfdx_auth_url_pattern = re.compile( + r"^force://" + r"(?P[a-zA-Z0-9]{0,64})" + r":" + r"(?P[a-zA-Z0-9._~\-]*)" + r":" + r"(?P[a-zA-Z0-9._~\-]+)" + r"@" + r"(?P" + r"(?:https?://)?" + r"(?P[a-zA-Z0-9\-]+)?" + r"(?:--(?P[a-zA-Z0-9\-]+))?" + r"(?:(?Psandbox|scratch|developer|demo)?\.my\.salesforce\.com" + r"|\.lightning\.force\.com" + r"|\.my\.salesforce.com" + r"|(?Pcs|db)" + r"|(?P(?:na|eu|ap|au|uk|in|de|jp|sg|ca|br|fr|ae|il))" + r")" + r"(?P[0-9]+)?" + r"(?:\.salesforce\.com)?" + r")$" + ) + + match = sfdx_auth_url_pattern.match(self.auth_url) + if not match: + raise ValueError("Invalid SFDX auth URL format") + + groups = match.groupdict() + + org_type = OrgType.PRODUCTION + if groups.get("org_suffix"): + org_type = OrgType(groups["org_suffix"]) + elif groups.get("sandbox_name"): + org_type = OrgType.SANDBOX + + domain_type = DomainType.POD + if ".my.salesforce.com" in groups["instance_url"]: + domain_type = DomainType.MY + elif ".lightning.force.com" in groups["instance_url"]: + domain_type = DomainType.LIGHTNING + + auth_info = AuthInfo( + client_id=groups["client_id"], + client_secret=groups["client_secret"] or "", + refresh_token=groups["refresh_token"], + instance_url=groups["instance_url"], + ) + + return { + "auth_info": auth_info, + "org_type": org_type, + "domain_type": domain_type, + "full_domain": groups["instance_url"], + "region": groups.get("region"), + "pod_number": groups.get("pod_number"), + "pod_type": groups.get("pod_type"), + "mydomain": groups.get("mydomain"), + "sandbox_name": groups.get("sandbox_name"), + } diff --git a/d2x/models/sf/org.py b/d2x/models/sf/org.py new file mode 100644 index 0000000..8553717 --- /dev/null +++ b/d2x/models/sf/org.py @@ -0,0 +1,60 @@ +from typing import Optional, Literal +from pydantic import Field +from d2x.base.models import CommonBaseModel +from d2x.models.sf.auth import AuthInfo, DomainType, OrgType + +RegionType = Literal[ + "na", + "eu", + "ap", + "au", + "uk", + "in", + "de", + "jp", + "sg", + "ca", + "br", + "fr", + "ae", + "il", + None, +] +PodType = Literal["cs", "db", None] + + +class SalesforceOrgInfo(CommonBaseModel): + """Structured information about a Salesforce org.""" + + auth_info: AuthInfo = Field( + ..., description="Authentication information for the Salesforce org." + ) + org_type: OrgType = Field(..., description="Type of the Salesforce org.") + domain_type: DomainType = Field( + ..., description="Type of domain for the Salesforce org." + ) + full_domain: str = Field(..., description="Full domain of the Salesforce org.") + mydomain: Optional[str] = Field( + None, description="MyDomain name of the Salesforce org." + ) + sandbox_name: Optional[str] = Field(None, description="Sandbox name if applicable.") + region: Optional[RegionType] = Field( + None, description="Region of the Salesforce org." + ) + pod_number: Optional[str] = Field(None, description="Pod number if applicable.") + pod_type: Optional[PodType] = Field(None, description="Pod type if applicable.") + + @property + def is_classic_pod(self) -> bool: + """Determine if the pod is a classic pod.""" + return self.pod_type in ["cs", "db"] + + @property + def is_hyperforce(self) -> bool: + """Determine if the org is on Hyperforce.""" + return False # Placeholder implementation + + @property + def is_sandbox(self) -> bool: + """Determine if the org is a sandbox.""" + return self.org_type == OrgType.SANDBOX diff --git a/d2x/parse/__init__.py b/d2x/parse/__init__.py new file mode 100644 index 0000000..dd8bcd1 --- /dev/null +++ b/d2x/parse/__init__.py @@ -0,0 +1 @@ +"""d2x.parse""" diff --git a/d2x/parse/sf/auth_url/__init__.py b/d2x/parse/sf/auth_url/__init__.py deleted file mode 100644 index 3b4685e..0000000 --- a/d2x/parse/sf/auth_url/__init__.py +++ /dev/null @@ -1,211 +0,0 @@ -import re -from dataclasses import dataclass -from typing import Optional, Literal - -# Define explicit type literals for better type hints -OrgType = Literal["production", "sandbox", "scratch", "developer", "demo"] -DomainType = Literal["my", "lightning", "pod"] -PodType = Literal["cs", "db", None] -RegionType = Literal[ - "na", - "eu", - "ap", - "au", - "uk", - "in", - "de", - "jp", - "sg", - "ca", - "br", - "fr", - "ae", - "il", - None, -] - - -@dataclass -class SalesforceOrgInfo: - """Structured information about a Salesforce org parsed from SFDX auth URL""" - - # Auth components - client_id: str - client_secret: str - refresh_token: str - instance_url: str - - # Org identification - org_type: OrgType - domain_type: DomainType - - # Pod/Instance information - region: RegionType - pod_number: Optional[str] - pod_type: PodType - - # MyDomain information - mydomain: Optional[str] - sandbox_name: Optional[str] # The name after -- for sandbox/scratch orgs - - @property - def is_classic_pod(self) -> bool: - """Whether this is a classic pod (cs/db)""" - return bool(self.pod_type in ("cs", "db")) - - @property - def is_hyperforce(self) -> bool: - """Whether this org is on Hyperforce based on region""" - hyperforce_regions = { - "au", - "uk", - "in", - "de", - "jp", - "sg", - "ca", - "br", - "fr", - "ae", - "il", - } - return bool(self.region and self.region.lower() in hyperforce_regions) - - @property - def is_sandbox(self) -> bool: - """Whether this is a sandbox org""" - return self.org_type in ("sandbox", "scratch", "developer", "demo") - - @property - def full_domain(self) -> str: - """Reconstructed full domain without protocol""" - if self.domain_type == "pod": - base = f"{self.region or self.pod_type}{self.pod_number}" - return f"{base}.salesforce.com" - elif self.domain_type == "lightning": - return f"{self.mydomain}.lightning.force.com" - else: # my - base = f"{self.mydomain}" - if self.sandbox_name: - base = f"{base}--{self.sandbox_name}" - if self.org_type != "production": - return f"{base}.{self.org_type}.my.salesforce.com" - return f"{base}.my.salesforce.com" - - -# Updated regex pattern for better metadata extraction -sfdx_auth_url_pattern = re.compile( - r"^force://" # Protocol prefix - r"(?P[a-zA-Z0-9]{0,64})" # Client ID: alphanumeric, 0-64 chars - r":" # Separator - r"(?P[a-zA-Z0-9._~\-]*)" # Client secret: optional - r":" # Separator - r"(?P[a-zA-Z0-9._~\-]+)" # Refresh token: required - r"@" # Separator for instance URL - r"(?P" # Instance URL group - r"(?:https?://)?" # Protocol is optional - r"(?:" # Start non-capturing group for all possible domains - r"(?:" # Domain patterns group - # MyDomain with optional sandbox/scratch org - r"(?P[a-zA-Z0-9\-]+)" # Base domain - r"(?:--(?P[a-zA-Z0-9\-]+))?" # Optional sandbox name - r"(?:" # Start non-capturing group for domain types - r"\.(?Psandbox|scratch|developer|demo)?\.my\.salesforce\.com" # .my.salesforce.com domains - r"|" - r"\.lightning\.force\.com" # lightning.force.com domains - r"|" - r"\.my\.salesforce\.com" # Regular my.salesforce.com - r")" - r"|" # OR - r"(?Pcs|db)" # Classic pods (cs/db) - r"|" # OR - r"(?P(?:na|eu|ap|au|uk|in|de|jp|sg|ca|br|fr|ae|il))" # Region codes - r")" - r"(?P[0-9]+)?" # Optional pod number - r"(?:\.salesforce\.com)?" # Domain suffix for non-lightning domains - r")" - r")$" -) - - -def parse_sfdx_auth_url(auth_url: str) -> SalesforceOrgInfo: - """Parse an SFDX auth URL and extract detailed org information""" - match = sfdx_auth_url_pattern.match(auth_url) - if not match: - raise ValueError("Invalid SFDX auth URL format") - - groups = match.groupdict() - - # Determine org type - org_type: OrgType = "production" - if groups.get("org_suffix"): - org_type = groups["org_suffix"] # type: ignore - elif groups.get("sandbox_name"): - org_type = "sandbox" - - # Determine domain type - domain_type: DomainType = "pod" - if ".my.salesforce.com" in groups["instance_url"]: - domain_type = "my" - elif ".lightning.force.com" in groups["instance_url"]: - domain_type = "lightning" - - return SalesforceOrgInfo( - # Auth components - client_id=groups["client_id"], - client_secret=groups["client_secret"] or "", - refresh_token=groups["refresh_token"], - instance_url=groups["instance_url"], - # Org identification - org_type=org_type, - domain_type=domain_type, - # Pod/Instance information - region=groups.get("region"), # type: ignore - pod_number=groups.get("pod_number"), - pod_type=groups.get("pod_type"), # type: ignore - # MyDomain information - mydomain=groups.get("mydomain"), - sandbox_name=groups.get("sandbox_name"), - ) - - -def test_sfdx_auth_url_parser(): - test_urls = [ - "force://PlatformCLI::5Aep861T.BgtJABwpkWJm7RYLcqlS4pV50Iqxf8rqKD4F09oWzHo1vYJpfDnO0YpZ5lNfgw6wqUVShF2qVS2oSh@platypus-aries-9947-dev-ed.scratch.my.salesforce.com", - "force://PlatformCLI::token123@https://mycompany.my.salesforce.com", - "force://PlatformCLI::token123@https://mycompany.lightning.force.com", - "force://PlatformCLI::token123@https://mycompany--dev.sandbox.my.salesforce.com", - "force://PlatformCLI::token123@https://cs89.salesforce.com", - "force://PlatformCLI::token123@https://na139.salesforce.com", - "force://PlatformCLI::token123@https://au5.salesforce.com", - ] - - print("\nTesting SFDX Auth URL Parser:") - print("-" * 50) - - for url in test_urls: - try: - info = parse_sfdx_auth_url(url) - print(f"\nParsed URL: {url[:50]}...") - print(f"Full Domain: {info.full_domain}") - print(f"Org Type: {info.org_type}") - print(f"Domain Type: {info.domain_type}") - if info.domain_type == "pod": - print(f"Pod Details:") - print(f" Region: {info.region or 'Classic'}") - print(f" Number: {info.pod_number}") - print(f" Type: {info.pod_type or 'Standard'}") - print(f" Classic: {info.is_classic_pod}") - print(f" Hyperforce: {info.is_hyperforce}") - else: - print(f"MyDomain: {info.mydomain}") - if info.sandbox_name: - print(f"Sandbox Name: {info.sandbox_name}") - print(f"Is Sandbox: {info.is_sandbox}") - print("-" * 30) - except ValueError as e: - print(f"Error parsing URL: {e}") - - -if __name__ == "__main__": - test_sfdx_auth_url_parser() diff --git a/d2x/ux/__init__.py b/d2x/ux/__init__.py new file mode 100644 index 0000000..867ff69 --- /dev/null +++ b/d2x/ux/__init__.py @@ -0,0 +1 @@ +# This is an empty __init__.py file for the d2x.ux module diff --git a/d2x/ux/cci/__init__.py b/d2x/ux/cci.py similarity index 100% rename from d2x/ux/cci/__init__.py rename to d2x/ux/cci.py diff --git a/d2x/ux/gh/__init__.py b/d2x/ux/gh/__init__.py new file mode 100644 index 0000000..3b82c32 --- /dev/null +++ b/d2x/ux/gh/__init__.py @@ -0,0 +1 @@ +# This is an empty __init__.py file for the d2x.ux.gh module diff --git a/d2x/ux/gh/actions/__init__.py b/d2x/ux/gh/actions.py similarity index 100% rename from d2x/ux/gh/actions/__init__.py rename to d2x/ux/gh/actions.py diff --git a/devhub.sh b/devhub.sh old mode 100755 new mode 100644 index 1135b83..90698e0 --- a/devhub.sh +++ b/devhub.sh @@ -4,7 +4,6 @@ if [ -f ~/.dev_hub_authenticated ]; then exit 0 fi - if [ -z "$DEV_HUB_AUTH_URL" ]; then if [ -z "$DEV_HUB_USERNAME" ]; then echo "DEV_HUB_USERNAME is not set, length is $(echo $(($(echo $DEV_HUB_USERNAME|wc -c)-1))). You must set either DEV_HUB_AUTH_URL or DEV_HUB_USERNAME, DEV_HUB_CLIENT_ID, and DEV_HUB_PRIVATE_KEY." diff --git a/poetry.lock b/poetry.lock index 698d0ed..05832d9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. [[package]] name = "annotated-types" @@ -44,6 +44,31 @@ files = [ [package.dependencies] chardet = ">=3.0.2" +[[package]] +name = "build" +version = "1.2.2.post1" +description = "A simple, correct Python build frontend" +optional = false +python-versions = ">=3.8" +files = [ + {file = "build-1.2.2.post1-py3-none-any.whl", hash = "sha256:1d61c0887fa860c01971625baae8bdd338e517b836a2f70dd1f7aa3a6b2fc5b5"}, + {file = "build-1.2.2.post1.tar.gz", hash = "sha256:b36993e92ca9375a219c99e606a122ff365a760a2d4bba0caa09bd5278b608b7"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "os_name == \"nt\""} +importlib-metadata = {version = ">=4.6", markers = "python_full_version < \"3.10.2\""} +packaging = ">=19.1" +pyproject_hooks = "*" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} + +[package.extras] +docs = ["furo (>=2023.08.17)", "sphinx (>=7.0,<8.0)", "sphinx-argparse-cli (>=1.5)", "sphinx-autodoc-typehints (>=1.10)", "sphinx-issues (>=3.0.0)"] +test = ["build[uv,virtualenv]", "filelock (>=3)", "pytest (>=6.2.4)", "pytest-cov (>=2.12)", "pytest-mock (>=2)", "pytest-rerunfailures (>=9.1)", "pytest-xdist (>=1.34)", "setuptools (>=42.0.0)", "setuptools (>=56.0.0)", "setuptools (>=56.0.0)", "setuptools (>=67.8.0)", "wheel (>=0.36.0)"] +typing = ["build[uv]", "importlib-metadata (>=5.1)", "mypy (>=1.9.0,<1.10.0)", "tomli", "typing-extensions (>=3.7.4.3)"] +uv = ["uv (>=0.1.18)"] +virtualenv = ["virtualenv (>=20.0.35)"] + [[package]] name = "certifi" version = "2024.8.30" @@ -55,6 +80,85 @@ files = [ {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] +[[package]] +name = "cffi" +version = "1.17.1" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, +] + +[package.dependencies] +pycparser = "*" + [[package]] name = "chardet" version = "5.2.0" @@ -226,6 +330,132 @@ pyyaml = ">=5.3.1" requests = ">=2.23.0" rich = "*" +[[package]] +name = "coverage" +version = "7.6.4" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "coverage-7.6.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5f8ae553cba74085db385d489c7a792ad66f7f9ba2ee85bfa508aeb84cf0ba07"}, + {file = "coverage-7.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8165b796df0bd42e10527a3f493c592ba494f16ef3c8b531288e3d0d72c1f6f0"}, + {file = "coverage-7.6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7c8b95bf47db6d19096a5e052ffca0a05f335bc63cef281a6e8fe864d450a72"}, + {file = "coverage-7.6.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ed9281d1b52628e81393f5eaee24a45cbd64965f41857559c2b7ff19385df51"}, + {file = "coverage-7.6.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0809082ee480bb8f7416507538243c8863ac74fd8a5d2485c46f0f7499f2b491"}, + {file = "coverage-7.6.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d541423cdd416b78626b55f123412fcf979d22a2c39fce251b350de38c15c15b"}, + {file = "coverage-7.6.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:58809e238a8a12a625c70450b48e8767cff9eb67c62e6154a642b21ddf79baea"}, + {file = "coverage-7.6.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c9b8e184898ed014884ca84c70562b4a82cbc63b044d366fedc68bc2b2f3394a"}, + {file = "coverage-7.6.4-cp310-cp310-win32.whl", hash = "sha256:6bd818b7ea14bc6e1f06e241e8234508b21edf1b242d49831831a9450e2f35fa"}, + {file = "coverage-7.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:06babbb8f4e74b063dbaeb74ad68dfce9186c595a15f11f5d5683f748fa1d172"}, + {file = "coverage-7.6.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:73d2b73584446e66ee633eaad1a56aad577c077f46c35ca3283cd687b7715b0b"}, + {file = "coverage-7.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:51b44306032045b383a7a8a2c13878de375117946d68dcb54308111f39775a25"}, + {file = "coverage-7.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b3fb02fe73bed561fa12d279a417b432e5b50fe03e8d663d61b3d5990f29546"}, + {file = "coverage-7.6.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed8fe9189d2beb6edc14d3ad19800626e1d9f2d975e436f84e19efb7fa19469b"}, + {file = "coverage-7.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b369ead6527d025a0fe7bd3864e46dbee3aa8f652d48df6174f8d0bac9e26e0e"}, + {file = "coverage-7.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ade3ca1e5f0ff46b678b66201f7ff477e8fa11fb537f3b55c3f0568fbfe6e718"}, + {file = "coverage-7.6.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:27fb4a050aaf18772db513091c9c13f6cb94ed40eacdef8dad8411d92d9992db"}, + {file = "coverage-7.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4f704f0998911abf728a7783799444fcbbe8261c4a6c166f667937ae6a8aa522"}, + {file = "coverage-7.6.4-cp311-cp311-win32.whl", hash = "sha256:29155cd511ee058e260db648b6182c419422a0d2e9a4fa44501898cf918866cf"}, + {file = "coverage-7.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:8902dd6a30173d4ef09954bfcb24b5d7b5190cf14a43170e386979651e09ba19"}, + {file = "coverage-7.6.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:12394842a3a8affa3ba62b0d4ab7e9e210c5e366fbac3e8b2a68636fb19892c2"}, + {file = "coverage-7.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2b6b4c83d8e8ea79f27ab80778c19bc037759aea298da4b56621f4474ffeb117"}, + {file = "coverage-7.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d5b8007f81b88696d06f7df0cb9af0d3b835fe0c8dbf489bad70b45f0e45613"}, + {file = "coverage-7.6.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b57b768feb866f44eeed9f46975f3d6406380275c5ddfe22f531a2bf187eda27"}, + {file = "coverage-7.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5915fcdec0e54ee229926868e9b08586376cae1f5faa9bbaf8faf3561b393d52"}, + {file = "coverage-7.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0b58c672d14f16ed92a48db984612f5ce3836ae7d72cdd161001cc54512571f2"}, + {file = "coverage-7.6.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2fdef0d83a2d08d69b1f2210a93c416d54e14d9eb398f6ab2f0a209433db19e1"}, + {file = "coverage-7.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8cf717ee42012be8c0cb205dbbf18ffa9003c4cbf4ad078db47b95e10748eec5"}, + {file = "coverage-7.6.4-cp312-cp312-win32.whl", hash = "sha256:7bb92c539a624cf86296dd0c68cd5cc286c9eef2d0c3b8b192b604ce9de20a17"}, + {file = "coverage-7.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:1032e178b76a4e2b5b32e19d0fd0abbce4b58e77a1ca695820d10e491fa32b08"}, + {file = "coverage-7.6.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:023bf8ee3ec6d35af9c1c6ccc1d18fa69afa1cb29eaac57cb064dbb262a517f9"}, + {file = "coverage-7.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b0ac3d42cb51c4b12df9c5f0dd2f13a4f24f01943627120ec4d293c9181219ba"}, + {file = "coverage-7.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8fe4984b431f8621ca53d9380901f62bfb54ff759a1348cd140490ada7b693c"}, + {file = "coverage-7.6.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5fbd612f8a091954a0c8dd4c0b571b973487277d26476f8480bfa4b2a65b5d06"}, + {file = "coverage-7.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dacbc52de979f2823a819571f2e3a350a7e36b8cb7484cdb1e289bceaf35305f"}, + {file = "coverage-7.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:dab4d16dfef34b185032580e2f2f89253d302facba093d5fa9dbe04f569c4f4b"}, + {file = "coverage-7.6.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:862264b12ebb65ad8d863d51f17758b1684560b66ab02770d4f0baf2ff75da21"}, + {file = "coverage-7.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5beb1ee382ad32afe424097de57134175fea3faf847b9af002cc7895be4e2a5a"}, + {file = "coverage-7.6.4-cp313-cp313-win32.whl", hash = "sha256:bf20494da9653f6410213424f5f8ad0ed885e01f7e8e59811f572bdb20b8972e"}, + {file = "coverage-7.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:182e6cd5c040cec0a1c8d415a87b67ed01193ed9ad458ee427741c7d8513d963"}, + {file = "coverage-7.6.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a181e99301a0ae128493a24cfe5cfb5b488c4e0bf2f8702091473d033494d04f"}, + {file = "coverage-7.6.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:df57bdbeffe694e7842092c5e2e0bc80fff7f43379d465f932ef36f027179806"}, + {file = "coverage-7.6.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bcd1069e710600e8e4cf27f65c90c7843fa8edfb4520fb0ccb88894cad08b11"}, + {file = "coverage-7.6.4-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99b41d18e6b2a48ba949418db48159d7a2e81c5cc290fc934b7d2380515bd0e3"}, + {file = "coverage-7.6.4-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1e54712ba3474f34b7ef7a41e65bd9037ad47916ccb1cc78769bae324c01a"}, + {file = "coverage-7.6.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:53d202fd109416ce011578f321460795abfe10bb901b883cafd9b3ef851bacfc"}, + {file = "coverage-7.6.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:c48167910a8f644671de9f2083a23630fbf7a1cb70ce939440cd3328e0919f70"}, + {file = "coverage-7.6.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:cc8ff50b50ce532de2fa7a7daae9dd12f0a699bfcd47f20945364e5c31799fef"}, + {file = "coverage-7.6.4-cp313-cp313t-win32.whl", hash = "sha256:b8d3a03d9bfcaf5b0141d07a88456bb6a4c3ce55c080712fec8418ef3610230e"}, + {file = "coverage-7.6.4-cp313-cp313t-win_amd64.whl", hash = "sha256:f3ddf056d3ebcf6ce47bdaf56142af51bb7fad09e4af310241e9db7a3a8022e1"}, + {file = "coverage-7.6.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9cb7fa111d21a6b55cbf633039f7bc2749e74932e3aa7cb7333f675a58a58bf3"}, + {file = "coverage-7.6.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:11a223a14e91a4693d2d0755c7a043db43d96a7450b4f356d506c2562c48642c"}, + {file = "coverage-7.6.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a413a096c4cbac202433c850ee43fa326d2e871b24554da8327b01632673a076"}, + {file = "coverage-7.6.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00a1d69c112ff5149cabe60d2e2ee948752c975d95f1e1096742e6077affd376"}, + {file = "coverage-7.6.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f76846299ba5c54d12c91d776d9605ae33f8ae2b9d1d3c3703cf2db1a67f2c0"}, + {file = "coverage-7.6.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fe439416eb6380de434886b00c859304338f8b19f6f54811984f3420a2e03858"}, + {file = "coverage-7.6.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:0294ca37f1ba500667b1aef631e48d875ced93ad5e06fa665a3295bdd1d95111"}, + {file = "coverage-7.6.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6f01ba56b1c0e9d149f9ac85a2f999724895229eb36bd997b61e62999e9b0901"}, + {file = "coverage-7.6.4-cp39-cp39-win32.whl", hash = "sha256:bc66f0bf1d7730a17430a50163bb264ba9ded56739112368ba985ddaa9c3bd09"}, + {file = "coverage-7.6.4-cp39-cp39-win_amd64.whl", hash = "sha256:c481b47f6b5845064c65a7bc78bc0860e635a9b055af0df46fdf1c58cebf8e8f"}, + {file = "coverage-7.6.4-pp39.pp310-none-any.whl", hash = "sha256:3c65d37f3a9ebb703e710befdc489a38683a5b152242664b973a7b7b22348a4e"}, + {file = "coverage-7.6.4.tar.gz", hash = "sha256:29fc0f17b1d3fea332f8001d4558f8214af7f1d87a345f3a133c901d60347c73"}, +] + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "cryptography" +version = "43.0.3" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cryptography-43.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf7a1932ac4176486eab36a19ed4c0492da5d97123f1406cf15e41b05e787d2e"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63efa177ff54aec6e1c0aefaa1a241232dcd37413835a9b674b6e3f0ae2bfd3e"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e1ce50266f4f70bf41a2c6dc4358afadae90e2a1e5342d3c08883df1675374f"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:443c4a81bb10daed9a8f334365fe52542771f25aedaf889fd323a853ce7377d6"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:74f57f24754fe349223792466a709f8e0c093205ff0dca557af51072ff47ab18"}, + {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9762ea51a8fc2a88b70cf2995e5675b38d93bf36bd67d91721c309df184f49bd"}, + {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:81ef806b1fef6b06dcebad789f988d3b37ccaee225695cf3e07648eee0fc6b73"}, + {file = "cryptography-43.0.3-cp37-abi3-win32.whl", hash = "sha256:cbeb489927bd7af4aa98d4b261af9a5bc025bd87f0e3547e11584be9e9427be2"}, + {file = "cryptography-43.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:f46304d6f0c6ab8e52770addfa2fc41e6629495548862279641972b6215451cd"}, + {file = "cryptography-43.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8ac43ae87929a5982f5948ceda07001ee5e83227fd69cf55b109144938d96984"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:846da004a5804145a5f441b8530b4bf35afbf7da70f82409f151695b127213d5"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f996e7268af62598f2fc1204afa98a3b5712313a55c4c9d434aef49cadc91d4"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f7b178f11ed3664fd0e995a47ed2b5ff0a12d893e41dd0494f406d1cf555cab7"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:c2e6fc39c4ab499049df3bdf567f768a723a5e8464816e8f009f121a5a9f4405"}, + {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e1be4655c7ef6e1bbe6b5d0403526601323420bcf414598955968c9ef3eb7d16"}, + {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:df6b6c6d742395dd77a23ea3728ab62f98379eff8fb61be2744d4679ab678f73"}, + {file = "cryptography-43.0.3-cp39-abi3-win32.whl", hash = "sha256:d56e96520b1020449bbace2b78b603442e7e378a9b3bd68de65c782db1507995"}, + {file = "cryptography-43.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:0c580952eef9bf68c4747774cde7ec1d85a6e61de97281f2dba83c7d2c806362"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d03b5621a135bffecad2c73e9f4deb1a0f977b9a8ffe6f8e002bf6c9d07b918c"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a2a431ee15799d6db9fe80c82b055bae5a752bef645bba795e8e52687c69efe3"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:281c945d0e28c92ca5e5930664c1cefd85efe80e5c0d2bc58dd63383fda29f83"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f18c716be16bc1fea8e95def49edf46b82fccaa88587a45f8dc0ff6ab5d8e0a7"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a02ded6cd4f0a5562a8887df8b3bd14e822a90f97ac5e544c162899bc467664"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53a583b6637ab4c4e3591a15bc9db855b8d9dee9a669b550f311480acab6eb08"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1ec0bcf7e17c0c5669d881b1cd38c4972fade441b27bda1051665faaa89bdcaa"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ce6fae5bdad59577b44e4dfed356944fbf1d925269114c28be377692643b4ff"}, + {file = "cryptography-43.0.3.tar.gz", hash = "sha256:315b9001266a492a6ff443b61238f956b214dbec9910a081ba5b6646a055a805"}, +] + +[package.dependencies] +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] +nox = ["nox"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi", "cryptography-vectors (==43.0.3)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + [[package]] name = "exceptiongroup" version = "1.2.2" @@ -254,6 +484,29 @@ files = [ [package.extras] all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] +[[package]] +name = "importlib-metadata" +version = "8.5.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, + {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, +] + +[package.dependencies] +zipp = ">=3.20" + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +perf = ["ipython"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] + [[package]] name = "iniconfig" version = "2.0.0" @@ -398,6 +651,41 @@ files = [ {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] +[[package]] +name = "pip" +version = "24.3.1" +description = "The PyPA recommended tool for installing Python packages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pip-24.3.1-py3-none-any.whl", hash = "sha256:3790624780082365f47549d032f3770eeb2b1e8bd1f7b2e02dace1afa361b4ed"}, + {file = "pip-24.3.1.tar.gz", hash = "sha256:ebcb60557f2aefabc2e0f918751cd24ea0d56d8ec5445fe1807f1d2109660b99"}, +] + +[[package]] +name = "pip-tools" +version = "7.4.1" +description = "pip-tools keeps your pinned dependencies fresh." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pip-tools-7.4.1.tar.gz", hash = "sha256:864826f5073864450e24dbeeb85ce3920cdfb09848a3d69ebf537b521f14bcc9"}, + {file = "pip_tools-7.4.1-py3-none-any.whl", hash = "sha256:4c690e5fbae2f21e87843e89c26191f0d9454f362d8acdbd695716493ec8b3a9"}, +] + +[package.dependencies] +build = ">=1.0.0" +click = ">=8" +pip = ">=22.2" +pyproject_hooks = "*" +setuptools = "*" +tomli = {version = "*", markers = "python_version < \"3.11\""} +wheel = "*" + +[package.extras] +coverage = ["covdefaults", "pytest-cov"] +testing = ["flit_core (>=2,<4)", "poetry_core (>=1.0.0)", "pytest (>=7.2.0)", "pytest-rerunfailures", "pytest-xdist", "tomli-w"] + [[package]] name = "pluggy" version = "1.5.0" @@ -413,6 +701,17 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "pycparser" +version = "2.22" +description = "C parser in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, +] + [[package]] name = "pydantic" version = "2.9.2" @@ -551,6 +850,43 @@ files = [ [package.extras] windows-terminal = ["colorama (>=0.4.6)"] +[[package]] +name = "pynacl" +version = "1.5.0" +description = "Python binding to the Networking and Cryptography (NaCl) library" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858"}, + {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b"}, + {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff"}, + {file = "PyNaCl-1.5.0-cp36-abi3-win32.whl", hash = "sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543"}, + {file = "PyNaCl-1.5.0-cp36-abi3-win_amd64.whl", hash = "sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93"}, + {file = "PyNaCl-1.5.0.tar.gz", hash = "sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba"}, +] + +[package.dependencies] +cffi = ">=1.4.1" + +[package.extras] +docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"] +tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] + +[[package]] +name = "pyproject-hooks" +version = "1.2.0" +description = "Wrappers to call pyproject.toml-based build backend hooks." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyproject_hooks-1.2.0-py3-none-any.whl", hash = "sha256:9e5c6bfa8dcc30091c74b0cf803c81fdd29d94f01992a7707bc97babb1141913"}, + {file = "pyproject_hooks-1.2.0.tar.gz", hash = "sha256:1e859bd5c40fae9448642dd871adf459e5e2084186e8d2c2a79a824c970da1f8"}, +] + [[package]] name = "pytest" version = "8.3.3" @@ -573,6 +909,24 @@ tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +[[package]] +name = "pytest-cov" +version = "4.1.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] + [[package]] name = "python-dateutil" version = "2.9.0.post0" @@ -706,6 +1060,46 @@ typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.1 [package.extras] jupyter = ["ipywidgets (>=7.5.1,<9)"] +[[package]] +name = "rich-click" +version = "1.8.3" +description = "Format click help output nicely with rich" +optional = false +python-versions = ">=3.7" +files = [ + {file = "rich_click-1.8.3-py3-none-any.whl", hash = "sha256:636d9c040d31c5eee242201b5bf4f2d358bfae4db14bb22ec1cafa717cfd02cd"}, + {file = "rich_click-1.8.3.tar.gz", hash = "sha256:6d75bdfa7aa9ed2c467789a0688bc6da23fbe3a143e19aa6ad3f8bac113d2ab3"}, +] + +[package.dependencies] +click = ">=7" +rich = ">=10.7" +typing-extensions = "*" + +[package.extras] +dev = ["mypy", "packaging", "pre-commit", "pytest", "pytest-cov", "rich-codex", "ruff", "types-setuptools"] +docs = ["markdown-include", "mkdocs", "mkdocs-glightbox", "mkdocs-material-extensions", "mkdocs-material[imaging] (>=9.5.18,<9.6.0)", "mkdocs-rss-plugin", "mkdocstrings[python]", "rich-codex"] + +[[package]] +name = "setuptools" +version = "75.3.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-75.3.0-py3-none-any.whl", hash = "sha256:f2504966861356aa38616760c0f66568e535562374995367b4e69c7143cf6bcd"}, + {file = "setuptools-75.3.0.tar.gz", hash = "sha256:fba5dd4d766e97be1b1681d98712680ae8f2f26d7881245f2ce9e40714f1a686"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] +core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.12.*)", "pytest-mypy"] + [[package]] name = "six" version = "1.16.0" @@ -778,7 +1172,40 @@ h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] +[[package]] +name = "wheel" +version = "0.44.0" +description = "A built-package format for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "wheel-0.44.0-py3-none-any.whl", hash = "sha256:2376a90c98cc337d18623527a97c31797bd02bad0033d41547043a1cbfbe448f"}, + {file = "wheel-0.44.0.tar.gz", hash = "sha256:a29c3f2817e95ab89aa4660681ad547c0e9547f20e75b0562fe7723c9a2a9d49"}, +] + +[package.extras] +test = ["pytest (>=6.0.0)", "setuptools (>=65)"] + +[[package]] +name = "zipp" +version = "3.20.2" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"}, + {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] + [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "1e9bb07a5756a1a1086a5bd8a2466785039eedd72412cc30e638f3935c432a3c" +content-hash = "481f107ad34cd16189493bc7567a6963744f8e3d6f33c3f35770aab15529adcd" diff --git a/pyproject.toml b/pyproject.toml index ad95d2e..dad2b42 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,8 @@ +# pyproject.toml [tool.poetry] name = "d2x" -version = "0.1.2" -description = "Salesforce DevOps Helper" +version = "0.1.3" +description = "Composable Salesforce DevOps on GitHub" authors = ["Muselab LLC"] license = "BSD3" readme = "README.md" @@ -11,10 +12,32 @@ python = "^3.10" rich = "^13.9.3" pydantic = "^2.9.2" cookiecutter = "^2.6.0" +requests = "^2.28.1" +click = "^8.1.3" +rich_click = "^1.0.0" +pynacl = "^1.4.0" [tool.poetry.group.dev.dependencies] pytest = "^8.3.3" +pip-tools = "^7.4.1" +pytest-cov = "^4.0.0" + +[tool.poetry.scripts] +d2x = "d2x.cli.main:d2x_cli" + +[project] +name = "d2x" + +[project.scripts] +d2x = "d2x.cli:d2x_cli" [build-system] requires = ["poetry-core"] -build-backend = "poetry.core.masonry.api" \ No newline at end of file +build-backend = "poetry.core.masonry.api" + +[tool.pip-compile] +generate-hashes = true +output-file = "requirements.txt" + +[tool.pytest.ini_options] +addopts = "--cov=d2x --cov-report=term-missing" diff --git a/requirements-dev.txt b/requirements-dev.txt new file mode 100644 index 0000000..14d1ab8 --- /dev/null +++ b/requirements-dev.txt @@ -0,0 +1,64 @@ +# +# This file is autogenerated by pip-compile with Python 3.12 +# by the following command: +# +# pip-compile --extra=dev --output-file=requirements-dev.txt pyproject.toml +# +annotated-types==0.7.0 + # via pydantic +arrow==1.3.0 + # via cookiecutter +binaryornot==0.4.4 + # via cookiecutter +certifi==2024.8.30 + # via requests +chardet==5.2.0 + # via binaryornot +charset-normalizer==3.4.0 + # via requests +click==8.1.7 + # via cookiecutter +cookiecutter==2.6.0 + # via d2x (pyproject.toml) +idna==3.10 + # via requests +jinja2==3.1.4 + # via cookiecutter +markdown-it-py==3.0.0 + # via rich +markupsafe==3.0.2 + # via jinja2 +mdurl==0.1.2 + # via markdown-it-py +pydantic==2.9.2 + # via d2x (pyproject.toml) +pydantic-core==2.23.4 + # via pydantic +pygments==2.18.0 + # via rich +python-dateutil==2.9.0.post0 + # via arrow +python-slugify==8.0.4 + # via cookiecutter +pyyaml==6.0.2 + # via cookiecutter +requests==2.32.3 + # via + # cookiecutter + # d2x (pyproject.toml) +rich==13.9.3 + # via + # cookiecutter + # d2x (pyproject.toml) +six==1.16.0 + # via python-dateutil +text-unidecode==1.3 + # via python-slugify +types-python-dateutil==2.9.0.20241003 + # via arrow +typing-extensions==4.12.2 + # via + # pydantic + # pydantic-core +urllib3==2.2.3 + # via requests diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..694d407 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,64 @@ +# +# This file is autogenerated by pip-compile with Python 3.12 +# by the following command: +# +# pip-compile pyproject.toml +# +annotated-types==0.7.0 + # via pydantic +arrow==1.3.0 + # via cookiecutter +binaryornot==0.4.4 + # via cookiecutter +certifi==2024.8.30 + # via requests +chardet==5.2.0 + # via binaryornot +charset-normalizer==3.4.0 + # via requests +click==8.1.7 + # via cookiecutter +cookiecutter==2.6.0 + # via d2x (pyproject.toml) +idna==3.10 + # via requests +jinja2==3.1.4 + # via cookiecutter +markdown-it-py==3.0.0 + # via rich +markupsafe==3.0.2 + # via jinja2 +mdurl==0.1.2 + # via markdown-it-py +pydantic==2.9.2 + # via d2x (pyproject.toml) +pydantic-core==2.23.4 + # via pydantic +pygments==2.18.0 + # via rich +python-dateutil==2.9.0.post0 + # via arrow +python-slugify==8.0.4 + # via cookiecutter +pyyaml==6.0.2 + # via cookiecutter +requests==2.32.3 + # via + # cookiecutter + # d2x (pyproject.toml) +rich==13.9.3 + # via + # cookiecutter + # d2x (pyproject.toml) +six==1.16.0 + # via python-dateutil +text-unidecode==1.3 + # via python-slugify +types-python-dateutil==2.9.0.20241003 + # via arrow +typing-extensions==4.12.2 + # via + # pydantic + # pydantic-core +urllib3==2.2.3 + # via requests diff --git a/requirements/dev.in b/requirements/dev.in new file mode 100644 index 0000000..e69de29 diff --git a/requirements/production.in b/requirements/production.in new file mode 100644 index 0000000..e69de29 diff --git a/tests/test_auth.py b/tests/test_auth.py new file mode 100644 index 0000000..536c2d8 --- /dev/null +++ b/tests/test_auth.py @@ -0,0 +1,54 @@ +import re +import pytest +from pydantic import ValidationError +from d2x.models.sf.auth import LoginUrlModel, SfdxAuthUrlModel + + +def test_login_url_model(): + model = LoginUrlModel(access_token="test_token", login_url="https://example.com") + login_url, token = model.get_login_url_and_token() + assert ( + login_url == "https://example.com/secur/frontdoor.jsp?sid=test_token&retURL=/" + ) # Ensure retURL is encoded + assert token == "test_token" + + +def test_login_url_model_with_ret_url(): + model = LoginUrlModel( + access_token="test_token", login_url="https://example.com", ret_url="/home" + ) + login_url, token = model.get_login_url_and_token() + assert ( + login_url + == "https://example.com/secur/frontdoor.jsp?sid=test_token&retURL=/home" + ) + assert token == "test_token" + + +def test_login_url_model_missing_access_token(): + with pytest.raises(ValidationError): + LoginUrlModel(login_url="https://example.com") + + +def test_login_url_model_missing_login_url(): + with pytest.raises(ValidationError): + LoginUrlModel(access_token="test_token") + + +def test_sfdx_auth_url_model(): + auth_url = "force://PlatformCLI::token123@https://mycompany.my.salesforce.com" + model = SfdxAuthUrlModel(auth_url=auth_url) + org_info = model.parse_sfdx_auth_url() + assert org_info["auth_info"].client_id == "PlatformCLI" + assert org_info["auth_info"].refresh_token == "token123" + assert org_info["auth_info"].instance_url == "https://mycompany.my.salesforce.com" + assert org_info["org_type"] == "production" + assert org_info["domain_type"] == "my" + assert org_info["full_domain"] == "https://mycompany.my.salesforce.com" + + +def test_sfdx_auth_url_model_invalid_url(): + auth_url = "invalid_url" + model = SfdxAuthUrlModel(auth_url=auth_url) + with pytest.raises(ValueError): + model.parse_sfdx_auth_url() diff --git a/tests/test_auth_url.py b/tests/test_auth_url.py new file mode 100644 index 0000000..a925713 --- /dev/null +++ b/tests/test_auth_url.py @@ -0,0 +1,97 @@ +import json +import unittest +from unittest.mock import patch, MagicMock +from pydantic import SecretStr +from d2x.auth.sf.auth_url import exchange_token +from d2x.models.sf.org import SalesforceOrgInfo +from d2x.base.types import CLIOptions +from d2x.models.sf.auth import AuthInfo + + +class TestExchangeToken(unittest.TestCase): + @patch("d2x.auth.sf.auth_url.set_environment_variable") + @patch("d2x.auth.sf.auth_url.http.client.HTTPSConnection") + def test_exchange_token_success(self, mock_https_connection, mock_set_env_var): + # Mock the SalesforceOrgInfo + org_info = SalesforceOrgInfo( + auth_info=AuthInfo( + client_id="test_client_id", + client_secret=SecretStr("test_client_secret"), # Wrapped with SecretStr + refresh_token="test_refresh_token", + instance_url="https://test.salesforce.com", + ), + org_type="production", + domain_type="pod", + full_domain="test.salesforce.com", + ) + + # Mock the CLIOptions + cli_options = CLIOptions(output_format="text", debug=False) + + # Mock the HTTPSConnection and response + mock_conn = MagicMock() + mock_https_connection.return_value = mock_conn + mock_response = MagicMock() + mock_response.status = 200 + mock_response.reason = "OK" + mock_response.read.return_value = json.dumps( + { + "access_token": "test_access_token", + "instance_url": "https://test.salesforce.com", + "id": "https://test.salesforce.com/id/00Dxx0000001gEREAY/005xx000001Sv6eAAC", + "token_type": "Bearer", + "issued_at": "1627382400000", + "signature": "test_signature", + } + ).encode("utf-8") + mock_conn.getresponse.return_value = mock_response + + # Call the function + token_response = exchange_token(org_info, cli_options) + + # Assertions + self.assertEqual( + token_response.access_token.get_secret_value(), "test_access_token" + ) + self.assertEqual(token_response.instance_url, "https://test.salesforce.com") + mock_set_env_var.assert_called_once_with( + "salesforce", "ACCESS_TOKEN", "test_access_token" + ) + + @patch("d2x.auth.sf.auth_url.set_environment_variable") + @patch("d2x.auth.sf.auth_url.http.client.HTTPSConnection") + def test_exchange_token_failure(self, mock_https_connection, mock_set_env_var): + # Mock the SalesforceOrgInfo + org_info = SalesforceOrgInfo( + auth_info=AuthInfo( + client_id="test_client_id", + client_secret=SecretStr("test_client_secret"), # Wrapped with SecretStr + refresh_token="test_refresh_token", + instance_url="https://test.salesforce.com", + ), + org_type="production", + domain_type="pod", + full_domain="test.salesforce.com", + ) + + # Mock the CLIOptions + cli_options = CLIOptions(output_format="text", debug=False) + + # Mock the HTTPSConnection and response + mock_conn = MagicMock() + mock_https_connection.return_value = mock_conn + mock_response = MagicMock() + mock_response.status = 400 + mock_response.reason = "Bad Request" + mock_response.read.return_value = json.dumps( + {"error": "invalid_grant", "error_description": "authentication failure"} + ).encode("utf-8") + mock_conn.getresponse.return_value = mock_response + + # Call the function and assert exception + with self.assertRaises(RuntimeError): + exchange_token(org_info, cli_options) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_login_url.py b/tests/test_login_url.py new file mode 100644 index 0000000..51de459 --- /dev/null +++ b/tests/test_login_url.py @@ -0,0 +1,69 @@ +import json +import unittest +from unittest.mock import patch +from d2x.auth.sf.login_url import generate_login_url, main as login_url_main +from d2x.models.sf.org import SalesforceOrgInfo +from d2x.base.types import CLIOptions +from d2x.models.sf.auth import AuthInfo + + +class TestGenerateLoginUrl(unittest.TestCase): + @patch("d2x.api.gh.get_environment_variable") # Updated patch target + def test_generate_login_url_success(self, mock_get_env_var): + # Mock the SalesforceOrgInfo + org_info = SalesforceOrgInfo( + auth_info=AuthInfo( + client_id="test_client_id", + client_secret="test_client_secret", + refresh_token="test_refresh_token", + instance_url="https://test.salesforce.com", + ), + org_type="production", + domain_type="pod", + full_domain="test.salesforce.com", + ) + + # Mock the CLIOptions + cli_options = CLIOptions(output_format="text", debug=False) + + # Mock the get_environment_variable function + mock_get_env_var.return_value = "test_access_token" + + # Call the function + login_url = generate_login_url( + instance_url=org_info.auth_info.instance_url, + access_token="test_access_token", + ) + + # Assertions + self.assertIn("https://test.salesforce.com", login_url) + self.assertIn("test_access_token", login_url) + + @patch("d2x.api.gh.get_environment_variable") # Updated patch target + def test_generate_login_url_failure(self, mock_get_env_var): + # Mock the SalesforceOrgInfo + org_info = SalesforceOrgInfo( + auth_info=AuthInfo( + client_id="test_client_id", + client_secret="test_client_secret", + refresh_token="test_refresh_token", + instance_url="https://test.salesforce.com", + ), + org_type="production", + domain_type="pod", + full_domain="test.salesforce.com", + ) + + # Mock the CLIOptions + cli_options = CLIOptions(output_format="text", debug=False) + + # Mock the get_environment_variable function to raise an exception + mock_get_env_var.side_effect = Exception("Error retrieving access token") + + # Call the function and assert exception + with self.assertRaises(Exception): + login_url_main(cli_options) + + +if __name__ == "__main__": + unittest.main()