diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml new file mode 100644 index 0000000..36e9d5d --- /dev/null +++ b/.github/workflows/push.yml @@ -0,0 +1,60 @@ +name: Build Artifacts +on: + release: + types: [created] + push: + branches: + - '**' +env: + TAG_NAME: ${{ github.event.release.tag_name || github.ref }} + +jobs: + multiplatform_build: + strategy: + fail-fast: false + matrix: + component: + - name: qubership-apihub-backend + file: Dockerfile + context: "" + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + - name: Login to Docker Hub + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${GITHUB_ACTOR} + password: ${{secrets.GITHUB_TOKEN}} + - name: Prepare Tag + run: echo "TAG_NAME=$(echo ${TAG_NAME} | sed 's@refs/tags/@@;s@refs/heads/@@;s@/@_@g')" >> $GITHUB_ENV + - name: Get package IDs for delete + id: get-ids-for-delete + uses: Netcracker/get-package-ids@v0.0.1 + with: + component-name: ${{ matrix.component.name }} + component-tag: ${{ env.TAG_NAME }} + access-token: ${{ secrets.GH_ACCESS_TOKEN }} + - name: Build and push + uses: docker/build-push-action@v5 + with: + no-cache: true + context: ${{ matrix.component.context }} + file: ${{ matrix.component.file }} + platforms: linux/amd64,linux/arm64 + push: true + tags: ghcr.io/netcracker/${{ matrix.component.name }}:${{ env.TAG_NAME }} + provenance: false + build-args: | + GH_ACCESS_TOKEN=${{ secrets.GH_ACCESS_TOKEN }} + - uses: actions/delete-package-versions@v5 + with: + package-name: ${{ matrix.component.name }} + package-type: 'container' + package-version-ids: ${{ steps.get-ids-for-delete.outputs.ids-for-delete }} + if: ${{ steps.get-ids-for-delete.outputs.ids-for-delete != '' }} \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..49de3f1 --- /dev/null +++ b/.gitignore @@ -0,0 +1,28 @@ +apihub-service/.idea +apihub-service/go.sum +.idea +.vscode +.history +apihub-service/data/** +apihub-service/static/** +apihub-service/logs/** +apihub-service/apihub-service.exe +apihub-service/__debug_bin.exe +apihub-service/apihub-service +logs +apihub-service/logs +apihub-service/tmp/** +apihub-service/bin/** + +docs/local_development/jwt_private_key +*.patch +*.iml +*.bak +*.orig +*.exe +*.env +env +jwt_private_key + +*.bak +*.patch \ No newline at end of file diff --git a/CODE-OF-CONDUCT.md b/CODE-OF-CONDUCT.md new file mode 100644 index 0000000..f5b511b --- /dev/null +++ b/CODE-OF-CONDUCT.md @@ -0,0 +1,73 @@ +# Code of Conduct + +This repository is governed by following code of conduct guidelines. + +We put collaboration, trust, respect and transparency as core values for our community. +Our community welcomes participants from all over the world with different experience, +opinion and ideas to share. + +We have adopted this code of conduct and require all contributors to agree with that to build a healthy, +safe and productive community for all. + +The guideline is aimed to support a community where all people should feel safe to participate, +introduce new ideas and inspire others, regardless of: + +* Age +* Gender +* Gender identity or expression +* Family status +* Marital status +* Ability +* Ethnicity +* Race +* Sex characteristics +* Sexual identity and orientation +* Education +* Native language +* Background +* Caste +* Religion +* Geographic location +* Socioeconomic status +* Personal appearance +* Any other dimension of diversity + +## Our Standards + +We are welcoming the following behavior: + +* Be respectful for different ideas, opinions and points of view +* Be constructive and professional +* Use inclusive language +* Be collaborative and show the empathy +* Focus on the best results for the community + +The following behavior is unacceptable: + +* Violence, threats of violence, or inciting others to commit self-harm +* Personal attacks, trolling, intentionally spreading misinformation, insulting/derogatory comments +* Public or private harassment +* Publishing others' private information, such as a physical or electronic address, without explicit permission +* Derogatory language +* Encouraging unacceptable behavior +* Other conduct which could reasonably be considered inappropriate in a professional community + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of the Code of Conduct +and are expected to take appropriate actions in response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject comments, +commits, code, wiki edits, issues, and other contributions that are not aligned +to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors +that they deem inappropriate, threatening, offensive, or harmful. + +## Reporting + +If you believe you’re experiencing unacceptable behavior that will not be tolerated as outlined above, +please report to `opensourcegroup@netcracker.com`. All complaints will be reviewed and investigated and will result in a response +that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality +with regard to the reporter of an incident. + +Please also report if you observe a potentially dangerous situation, someone in distress, or violations of these guidelines, +even if the situation is not happening to you. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..292ce26 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,12 @@ +# Contribution Guide + +We'd love to accept patches and contributions to this project. +Please, follow these guidelines to make the contribution process easy and effective for everyone involved. + +## Contributor License Agreement + +You must sign the [Contributor License Agreement](https://pages.netcracker.com/cla-main.html) in order to contribute. + +## Code of Conduct + +Please make sure to read and follow the [Code of Conduct](CODE-OF-CONDUCT.md). diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..730a2be --- /dev/null +++ b/Dockerfile @@ -0,0 +1,20 @@ +FROM docker.io/golang:1.23.4-alpine3.21 + +MAINTAINER qubership.org + +WORKDIR /app/qubership-apihub-service + +USER root + +RUN apk --no-cache add curl + +ADD qubership-apihub-service/qubership-apihub-service ./qubership-apihub-service +ADD qubership-apihub-service/static ./static +ADD qubership-apihub-service/resources ./resources +ADD docs/api ./api + +RUN chmod -R a+rwx /app + +USER 10001 + +ENTRYPOINT ./qubership-apihub-service diff --git a/LICENSE b/LICENSE index 261eeb9..d645695 100644 --- a/LICENSE +++ b/LICENSE @@ -1,3 +1,4 @@ + Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ diff --git a/README.md b/README.md index d253735..3ca5d1c 100644 --- a/README.md +++ b/README.md @@ -1 +1,27 @@ -# qubership-apihub-backend \ No newline at end of file +# qubership-apihub-backend + +qubership-apihub-backend is a part of global qubership-apihub application, which is described here: [qubership-apihub](https://github.com/Netcracker/qubership-apihub). + +qubership-apihub-backend (aka API Registry) is the main backend microservice of qubership-apihub solution. It accumulates all bsuiness domain and core logic, provides REST API for qubership-apihub-ui and integrations. + +## Installation + +Partial installation of qubership-apihub-backend makes no sense as it is a part of bigger application qubership-apihub. +qubership-apihub installation is possible via docker-compose and helm. Please refer to installation guides: [qubership-apihub](https://github.com/Netcracker/qubership-apihub). + +If you want to run qubership-apihub-backend only for developement or debugging purposes please refer to Debug section below + + +## Build + +Just run build.cmd(sh) file from this repository + + +## Debug + +[Local development principles](./docs/local_development/local_development.md) + + +## Developer Tools + +[Development tools setup](./docs/newcomer_env_setup.md) diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000..8162261 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,15 @@ +# Security Reporting Process + +Please, report any security issue to `opensourcegroup@netcracker.com` where the issue will be triaged appropriately. + +If you know of a publicly disclosed security vulnerability please IMMEDIATELY email `opensourcegroup@netcracker.com` +to inform the team about the vulnerability, so we may start the patch, release, and communication process. + +# Security Release Process + +If the vulnerability is found in the latest stable release, then it would be fixed in patch version for that release. +E.g., issue is found in 2.5.0 release, then 2.5.1 version with a fix will be released. +By default, older versions will not have security releases. + +If the issue doesn't affect any existing public releases, the fix for medium and high issues is performed +in a main branch before releasing a new version. For low priority issues the fix can be planned for future releases. diff --git a/build.cmd b/build.cmd new file mode 100644 index 0000000..05fba7d --- /dev/null +++ b/build.cmd @@ -0,0 +1,9 @@ +set GOSUMDB=off +set CGO_ENABLED=0 +set GOOS=linux +cd ./qubership-apihub-service +go mod tidy +go mod download +go build . +cd .. +podman build ./ -t netcracker/qubership-apihub-backend \ No newline at end of file diff --git a/docs/api/APIHUB API.yaml b/docs/api/APIHUB API.yaml new file mode 100644 index 0000000..ed9f55c --- /dev/null +++ b/docs/api/APIHUB API.yaml @@ -0,0 +1,3027 @@ +openapi: 3.1.0 +info: + title: APIHUB BE API contract + description: > + The API contract for APIHUB UI and APIDOC UI. + + You can find more information about the project at [DOC + page](url: https://qubership.org/APIHUB). + license: + name: NA + url: https://qubership.org + version: 0.7.18 + x-nc-api-audience: noBWC +externalDocs: + description: Find out more about project + url: https://qubership.org/APIHUB +servers: + - url: https://apihub.qubership.org/api/v1 + description: Direct API call host + - url: http://localhost:3000 + description: Local server +tags: + - name: Export + description: Export API documentation. + - name: "[Draft]" + description: API HUB draft endpoints. + - name: Auth + description: APIs for auth integrations + - name: Branch + description: Branch management + - name: BranchFiles + description: Branch files management + - name: CLI + description: CLI API + - name: Compare + description: Published project comparison API + - name: Custom + description: APIs for custom integrations + - name: Groups + description: APIs for the group management + - name: Packages + description: APIs for the package management + - name: History + description: Change history methods + - name: Integrations + description: APIs for git integrations + - name: Projects + description: APIs for the project management + - name: Users + description: APIs for the user operations + - name: Versions + description: Published project versions API + - name: Agent + description: APIHUB Agent API documentation. + - name: Publish + description: APIs for publication +paths: + /integrations/gitlab/apikey: + get: + tags: + - Integrations + summary: Get Gitlab-apikey integration status + description: Get integration status + operationId: getIntegrationsGitlabApiKey + responses: + "200": + description: Successful operation + content: + application/json: + schema: + $ref: "#/components/schemas/GitlabApiKeyIntegrationStatus" + "500": + $ref: "#/components/responses/internalServerError500" + put: + tags: + - Integrations + summary: Update Gitlab-apikey integration + description: Update integration data + operationId: putIntegrationsGitlabApikey + requestBody: + description: User ApiKey + content: + application/json: + schema: + $ref: "#/components/schemas/GitlabApiKeyIntegrationData" + required: true + responses: + "200": + description: Successful operation + content: + application/json: + schema: + $ref: "#/components/schemas/GitlabApiKeyIntegrationStatus" + "500": + $ref: "#/components/responses/internalServerError500" + /integrations/{integrationType}/repositories: + get: + tags: + - Integrations + summary: Get list of the GIT repositories + description: Get list of the GIT repositories + operationId: getIntegrationsTypeRepositories + parameters: + - name: filter + schema: + type: string + description: Filter string or GIT clone URL + in: query + required: false + - $ref: "#/components/parameters/integrationType" + - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/page" + responses: + "200": + description: Successful operation + content: + application/json: + schema: + description: Repositories list (max 15 items) + type: object + properties: + repositories: + type: array + items: + $ref: "#/components/schemas/Repository" + "400": + $ref: "#/components/responses/badRequest400" + "500": + $ref: "#/components/responses/internalServerError500" + /integrations/{integrationType}/repositories/{repositoryId}/branches: + get: + tags: + - Integrations + summary: Get list of branches + description: >- + Get list of branches (same logic as GET + `/projects/{projectId}/branches') + operationId: getIntegrationsTypeRepositoriesIdBranches + parameters: + - name: repositoryId + schema: + type: string + description: Repository Id + in: path + required: true + - $ref: "#/components/parameters/integrationType" + - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/page" + - $ref: "#/components/parameters/filter" + responses: + "200": + description: Successful operation + content: + application/json: + schema: + description: Branch list (all) + type: object + properties: + branches: + type: array + items: + $ref: "#/components/schemas/GitBranch" + "400": + $ref: "#/components/responses/badRequest400" + "500": + $ref: "#/components/responses/internalServerError500" + /groups: + post: + tags: + - Groups + summary: Create a new group + description: Create a new project group + operationId: postGroups + requestBody: + description: Group for creation + content: + application/json: + schema: + $ref: "#/components/schemas/GroupCreate" + required: true + responses: + "201": + description: Successful creation + content: + application/json: + schema: + $ref: "#/components/schemas/Group" + "400": + $ref: "#/components/responses/badRequest400" + "500": + $ref: "#/components/responses/internalServerError500" + get: + tags: + - Groups + summary: Get groups list + description: Retrieve the child group list or root groups list if alias is empty + operationId: getGroups + parameters: + - name: groupId + description: Result root GroupId (Full alias) + schema: + type: string + example: QS.CQSS + in: query + allowEmptyValue: true + - name: depth + schema: + type: number + enum: + - 0 + - 1 + default: 1 + in: query + description: Depth of subgroups. Full depth if 0. + allowEmptyValue: true + - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/page" + responses: + "200": + description: Successful operation + content: + application/json: + schema: + description: Groups list + type: object + properties: + groups: + type: array + items: + $ref: "#/components/schemas/Group" + "400": + $ref: "#/components/responses/badRequest400" + "404": + $ref: "#/components/responses/notFound404" + "500": + $ref: "#/components/responses/internalServerError500" + /groups/{groupId}: + parameters: + - $ref: "#/components/parameters/groupId" + get: + tags: + - Groups + summary: Get group info + operationId: getGroupsId + description: Get group info with parent groups list + responses: + "200": + description: Successful operation + content: + application/json: + schema: + $ref: "#/components/schemas/GroupInfo" + "500": + $ref: "#/components/responses/internalServerError500" + /groups/{groupId}/favor: + parameters: + - $ref: "#/components/parameters/groupId" + post: + tags: + - Groups + - Users + summary: Add group to favorites for user + description: >- + Add group to favorite list for the user. The user is taken from the + token info + operationId: postGroupsIdFavor + responses: + "200": + description: Successful operation + content: {} + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + $ref: "#/components/responses/notFound404" + "500": + $ref: "#/components/responses/internalServerError500" + /groups/{groupId}/disfavor: + parameters: + - $ref: "#/components/parameters/groupId" + post: + tags: + - Groups + - Users + summary: Remove group from favorites for user + description: >- + Remove group from favorite list for the user. The user is taken from the + token info + operationId: postGroupsIdDisfavor + responses: + "200": + description: Successful operation + content: {} + "400": + $ref: "#/components/responses/badRequest400" + "404": + $ref: "#/components/responses/notFound404" + "500": + $ref: "#/components/responses/internalServerError500" + /projects: + post: + tags: + - Projects + summary: Create a new project + description: Create a new project in the API HUB + operationId: postProjects + requestBody: + description: Project for creation + content: + application/json: + schema: + $ref: "#/components/schemas/ProjectCreate" + examples: + ProjectCreateInfo: + $ref: "#/components/examples/ProjectCreate" + required: true + responses: + "201": + description: Successful creation + content: + application/json: + schema: + $ref: "#/components/schemas/Project" + examples: + ProjectBaseInfo: + $ref: "#/components/examples/Project" + "400": + $ref: "#/components/responses/badRequest400" + "500": + $ref: "#/components/responses/internalServerError500" + get: + tags: + - Projects + summary: Get projects list + description: Retrieve the projects list + operationId: getProjects + parameters: + - name: groupId + in: query + description: filter by Group id (full alias). + schema: + type: string + - name: textFilter + in: query + description: filter by name/alias. + schema: + type: string + - name: onlyFavorite + in: query + description: filter only favorite projects + schema: + type: boolean + default: false + - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/page" + - name: onlyPublished + in: query + description: Filter only projects with published versions + schema: + type: boolean + default: false + responses: + "200": + description: Successful operation + content: + application/json: + schema: + description: Projects list with paging information + type: object + properties: + projects: + type: array + items: + $ref: "#/components/schemas/Project" + "400": + $ref: "#/components/responses/badRequest400" + "500": + $ref: "#/components/responses/internalServerError500" + /projects/{projectId}: + parameters: + - $ref: "#/components/parameters/projectId" + get: + tags: + - Projects + summary: Get project by Id + description: >- + Detail information about the project and references with the GIT + repository + operationId: getProjectsId + responses: + "200": + description: Successful operation + content: + application/json: + schema: + $ref: "#/components/schemas/Project" + examples: + Project: + $ref: "#/components/examples/Project" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/responses/notFound404" + examples: + ProjectNotFound: + $ref: "#/components/examples/ProjectNotFound" + "500": + $ref: "#/components/responses/internalServerError500" + put: + tags: + - "[Draft]" + - Projects + summary: "[Draft] Update project" + description: Change the project's parameters + operationId: putProjectsId + requestBody: + description: Project update parameters + content: + application/json: + schema: + $ref: "#/components/schemas/ProjectUpdate" + responses: + "200": + description: Successfully changed + content: + application/json: + schema: + $ref: "#/components/schemas/Project" + examples: + ProjectBaseInfo: + $ref: "#/components/examples/Project" + "400": + $ref: "#/components/responses/badRequest400" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/responses/notFound404" + examples: + ProjectNotFound: + $ref: "#/components/examples/ProjectNotFound" + "500": + $ref: "#/components/responses/internalServerError500" + delete: + tags: + - Projects + summary: Delete project + description: >- + Delete the project and all links to the GIT. The GIT objects are not + deleted + operationId: deleteProjectsId + responses: + "204": + description: Successfully deleted + content: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/responses/notFound404" + examples: + ProjectNotFound: + $ref: "#/components/examples/ProjectNotFound" + "500": + $ref: "#/components/responses/internalServerError500" + /projects/{projectId}/favor: + parameters: + - $ref: "#/components/parameters/projectId" + post: + tags: + - Projects + - Users + summary: Add project to favorites for user + description: >- + Add project to favorite list for the user. The user is taken from the + token info + operationId: postProjectsIdFavor + responses: + "200": + description: Successful operation + content: {} + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/responses/notFound404" + examples: + ProjectNotFound: + $ref: "#/components/examples/ProjectNotFound" + "500": + $ref: "#/components/responses/internalServerError500" + /projects/{projectId}/disfavor: + parameters: + - $ref: "#/components/parameters/projectId" + post: + tags: + - Projects + - Users + summary: Remove project from favorites for user + description: >- + Remove project from favorite list for the user. The user is taken from + the token info + operationId: postProjectsIdDisfavor + responses: + "200": + description: Successful operation + content: {} + "400": + $ref: "#/components/responses/badRequest400" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/responses/notFound404" + examples: + ProjectNotFound: + $ref: "#/components/examples/ProjectNotFound" + "500": + $ref: "#/components/responses/internalServerError500" + /projects/{projectId}/branches: + parameters: + - $ref: "#/components/parameters/projectId" + get: + tags: + - Projects + summary: Get project branches list + description: Retrieve the project branches list (20 items best match filter) + operationId: getProjectsIdBranches + parameters: + - name: filter + in: query + description: Branch filter + schema: + type: string + responses: + "200": + description: Successful operation + content: + application/json: + schema: + description: Branches list + type: object + properties: + branches: + type: array + items: + $ref: "#/components/schemas/ProjectBranch" + "400": + $ref: "#/components/responses/badRequest400" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/responses/notFound404" + examples: + ProjectNotFound: + $ref: "#/components/examples/ProjectNotFound" + "500": + $ref: "#/components/responses/internalServerError500" + /projects/{projectId}/branches/{branch}/config: + parameters: + - $ref: "#/components/parameters/projectId" + - $ref: "#/components/parameters/branch" + - name: original + in: query + schema: + type: boolean + default: false + description: Original branch config from GIT in Response + get: + tags: + - Branch + summary: Get branch GIT config + description: Get the project branch GIT config + operationId: getProjectsIdBranchesIdConfig + responses: + "200": + description: Successful operation + content: + application/json: + schema: + $ref: "#/components/schemas/BranchConfig" + "400": + $ref: "#/components/responses/badRequest400" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/responses/notFound404" + examples: + ProjectNotFound: + $ref: "#/components/examples/ProjectNotFound" + BranchNotFound: + $ref: "#/components/examples/BranchNotFound" + "500": + $ref: "#/components/responses/internalServerError500" + /projects/{projectId}/branches/{branch}: + parameters: + - $ref: "#/components/parameters/projectId" + - $ref: "#/components/parameters/branch" + get: + tags: + - Branch + summary: Get project branch content + description: Get the project branch details by ID + operationId: getProjectsIdBranchesId + responses: + "200": + description: Successful operation + content: + application/json: + schema: + $ref: "#/components/schemas/ProjectContent" + "400": + $ref: "#/components/responses/badRequest400" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/responses/notFound404" + examples: + ProjectNotFound: + $ref: "#/components/examples/ProjectNotFound" + BranchNotFound: + $ref: "#/components/examples/BranchNotFound" + "500": + $ref: "#/components/responses/internalServerError500" + /projects/{projectId}/branches/{branch}/reset: + parameters: + - $ref: "#/components/parameters/projectId" + - $ref: "#/components/parameters/branch" + post: + summary: Reset branch files to base commit + tags: + - Branch + operationId: postProjectsIdBranchReset + description: Delete draft changes and get config file from GIT + responses: + "200": + description: Successful operation + content: + application/json: + schema: + $ref: "#/components/schemas/ProjectContent" + "400": + $ref: "#/components/responses/badRequest400" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/responses/notFound404" + examples: + ProjectNotFound: + $ref: "#/components/examples/ProjectNotFound" + BranchNotFound: + $ref: "#/components/examples/BranchNotFound" + "500": + $ref: "#/components/responses/internalServerError500" + + /projects/{projectId}/branches/{branch}/conflicts: + parameters: + - $ref: "#/components/parameters/projectId" + - $ref: "#/components/parameters/branch" + get: + summary: Get list of conflicts in branch + tags: + - Branch + operationId: getProjectsIdBranchConflicts + description: Get list of changed files in branch + responses: + "200": + description: Successful operation + content: + application/json: + schema: + type: object + properties: + files: + description: List of FileIds with conflicts + type: array + items: + type: string + "400": + $ref: "#/components/responses/badRequest400" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/responses/notFound404" + examples: + ProjectNotFound: + $ref: "#/components/examples/ProjectNotFound" + BranchNotFound: + $ref: "#/components/examples/BranchNotFound" + "500": + $ref: "#/components/responses/internalServerError500" + /projects/{projectId}/branches/{branch}/clone: + parameters: + - $ref: "#/components/parameters/projectId" + - $ref: "#/components/parameters/branch" + post: + summary: Create new branch in GIT + tags: + - Branch + operationId: postProjectsIdBranchClone + description: Create new branch in GIT + requestBody: + description: Project update parameters + content: + application/json: + schema: + type: object + properties: + branch: + type: string + description: New branch name + required: + - branch + responses: + "201": + description: Successful operation + "400": + $ref: "#/components/responses/badRequest400" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/responses/notFound404" + examples: + ProjectNotFound: + $ref: "#/components/examples/ProjectNotFound" + BranchNotFound: + $ref: "#/components/examples/BranchNotFound" + "500": + $ref: "#/components/responses/internalServerError500" + /projects/{projectId}/branches/{branch}/history: + parameters: + - $ref: "#/components/parameters/projectId" + - $ref: "#/components/parameters/branch" + get: + tags: + - History + - Branch + summary: Get project branch commit history + description: Retrieve the project branch commit history + deprecated: true + operationId: getProjectsIdBranchesIdHistory + parameters: + - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/page" + responses: + "200": + description: Successful operation + content: + application/json: + schema: + description: History items list + type: object + properties: + changes: + type: array + items: + $ref: "#/components/schemas/ChangeHistory" + "400": + $ref: "#/components/responses/badRequest400" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/responses/notFound404" + examples: + ProjectNotFound: + $ref: "#/components/examples/ProjectNotFound" + "500": + $ref: "#/components/responses/internalServerError500" + /projects/{projectId}/branches/{branch}/upload: + parameters: + - $ref: "#/components/parameters/projectId" + - $ref: "#/components/parameters/branch" + post: + tags: + - Branch + summary: Upload files to project branch + description: Upload files to project branch + operationId: postProjectsIdBranchesIdUpload + requestBody: + content: + multipart/form-data: + schema: + type: object + properties: + file: + type: string + format: binary + path: + type: string + responses: + "200": + description: Successful execution + "400": + $ref: "#/components/responses/badRequest400" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/responses/notFound404" + examples: + ProjectNotFound: + $ref: "#/components/examples/ProjectNotFound" + BranchNotFound: + $ref: "#/components/examples/BranchNotFound" + "500": + $ref: "#/components/responses/internalServerError500" + + /projects/{projectId}/branches/{branch}/save: + parameters: + - $ref: "#/components/parameters/projectId" + - $ref: "#/components/parameters/branch" + post: + tags: + - Branch + summary: Save project branch (commit) + description: Commit project branch to Git + operationId: postProjectsIdBranchesIdSave + requestBody: + description: Publish params + content: + application/json: + schema: + $ref: "#/components/schemas/ProjectSaveParams" + required: true + responses: + "200": + description: Successful execution + content: {} + "400": + $ref: "#/components/responses/badRequest400" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/responses/notFound404" + examples: + ProjectNotFound: + $ref: "#/components/examples/ProjectNotFound" + BranchNotFound: + $ref: "#/components/examples/BranchNotFound" + "500": + $ref: "#/components/responses/internalServerError500" + /projects/{projectId}/branches/{branch}/zip: + parameters: + - $ref: "#/components/parameters/projectId" + - $ref: "#/components/parameters/branch" + get: + tags: + - "[Draft]" + - Branch + summary: "[Draft] Download project branch (zip)" + description: Get the whole project branch as a zip file + operationId: getProjectsIdBranchesIdZip + responses: + "200": + description: Successful execution + content: + application/zip: + schema: + type: string + format: binary + description: file to download + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/responses/notFound404" + examples: + ProjectNotFound: + $ref: "#/components/examples/ProjectNotFound" + BranchNotFound: + $ref: "#/components/examples/BranchNotFound" + "500": + $ref: "#/components/responses/internalServerError500" + /projects/{projectId}/branches/{branch}/refs: + parameters: + - $ref: "#/components/parameters/projectId" + - $ref: "#/components/parameters/branch" + patch: + tags: + - Branch + summary: Update refs in project branch + description: Update refs in project branch + operationId: patchProjectsIdBranchesIdRefs + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/Ref" + description: "" + responses: + "200": + description: successful operation + "400": + $ref: "#/components/responses/badRequest400" + "404": + $ref: "#/components/responses/notFound404" + "500": + $ref: "#/components/responses/internalServerError500" + x-internal: false + /projects/{projectId}/branches/{branch}/integration/files: + parameters: + - $ref: "#/components/parameters/projectId" + - $ref: "#/components/parameters/branch" + get: + tags: + - Branch + summary: Get GIT branch files/folders list + description: Retrieve the GIT branch files/folders list + operationId: getProjectsIdBranchesIdIntegrationFiles + parameters: + - name: path + in: query + description: Git path + schema: + type: string + - name: onlyAddable + in: query + description: Filter only addable files + schema: + type: boolean + - name: onlyFolders + in: query + description: Filter only folders + schema: + type: boolean + - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/page" + responses: + "200": + description: successful operation + content: + application/json: + schema: + description: List of content files + type: object + properties: + files: + type: array + items: + $ref: "#/components/schemas/GitBranchFile" + "400": + $ref: "#/components/responses/badRequest400" + "404": + $ref: "#/components/responses/notFound404" + "500": + $ref: "#/components/responses/internalServerError500" + post: + tags: + - Branch + summary: Add file to project branch + description: Add content to project branch + operationId: postProjectsIdBranchesIdFiles + requestBody: + content: + application/json: + schema: + type: object + properties: + source: + type: string + enum: + - git + - url + - new + data: + oneOf: + - type: object + properties: + paths: + type: array + description: Git fileId list + items: + type: string + - type: object + properties: + url: + type: string + description: File url + path: + type: string + description: Git path + - type: object + properties: + name: + type: string + description: New file name + type: + type: string + enum: + - OpenApi-3-1 + - OpenApi-2 + - custom + description: "" + path: + type: string + responses: + "200": + description: Successful execution + "400": + $ref: "#/components/responses/badRequest400" + "404": + $ref: "#/components/responses/notFound404" + "500": + $ref: "#/components/responses/internalServerError500" + /projects/{projectId}/branches/{branch}/files/{fileId}/rename: + parameters: + - $ref: "#/components/parameters/projectId" + - $ref: "#/components/parameters/branch" + - $ref: "#/components/parameters/fileId" + post: + tags: + - BranchFiles + summary: Rename/move file in project branch + description: Rename/move file in project branch + operationId: postProjectsIdBranchesIdFilesIdRename + requestBody: + content: + application/json: + schema: + type: object + properties: + newFileId: + type: string + description: New fileId + responses: + "200": + description: Successful operation + "400": + $ref: "#/components/responses/badRequest400" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/responses/notFound404" + examples: + ProjectNotFound: + $ref: "#/components/examples/ProjectNotFound" + "500": + $ref: "#/components/responses/internalServerError500" + /projects/{projectId}/branches/{branch}/files/{fileId}/restore: + parameters: + - $ref: "#/components/parameters/projectId" + - $ref: "#/components/parameters/branch" + - $ref: "#/components/parameters/fileId" + post: + tags: + - BranchFiles + summary: Restore deleted file in project branch + description: Restore deleted file in project branch + operationId: postProjectsIdBranchesIdFilesIdRestore + responses: + "200": + description: Successful operation + "400": + $ref: "#/components/responses/badRequest400" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/responses/notFound404" + examples: + ProjectNotFound: + $ref: "#/components/examples/ProjectNotFound" + "500": + $ref: "#/components/responses/internalServerError500" + /projects/{projectId}/branches/{branch}/files/{fileId}/meta: + parameters: + - $ref: "#/components/parameters/projectId" + - $ref: "#/components/parameters/branch" + - $ref: "#/components/parameters/fileId" + patch: + tags: + - BranchFiles + summary: Update the file metadata + description: Update the file metadata + operationId: patchProjectsIdBranchesIdFilesIdMeta + parameters: + - name: bulk + description: Bulk operation flag + in: query + schema: + type: boolean + deprecated: true + requestBody: + description: Meta update parameters + content: + application/json: + schema: + type: object + properties: + labels: + type: array + items: + type: string + publish: + type: boolean + responses: + "200": + description: Successful operation + "400": + $ref: "#/components/responses/badRequest400" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/responses/notFound404" + examples: + ProjectNotFound: + $ref: "#/components/examples/ProjectNotFound" + "500": + $ref: "#/components/responses/internalServerError500" + /projects/{projectId}/branches/{branch}/files/{fileId}: + parameters: + - $ref: "#/components/parameters/projectId" + - $ref: "#/components/parameters/branch" + - $ref: "#/components/parameters/fileId" + get: + tags: + - BranchFiles + summary: Get file data + description: Get project branch file data by fileId + operationId: getProjectsIdBranchesIdFilesId + responses: + "200": + description: Successful operation + content: + text/plain; charset=utf-8: + schema: + type: string + application/json: + schema: + type: object + "400": + $ref: "#/components/responses/badRequest400" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/responses/notFound404" + examples: + ProjectNotFound: + $ref: "#/components/examples/ProjectNotFound" + "500": + $ref: "#/components/responses/internalServerError500" + delete: + tags: + - BranchFiles + parameters: + - name: delete + description: Delete file from GIT + schema: + type: boolean + default: false + in: query + required: false + summary: Exclude file from project branch config + description: Exclude file from project branch config. Delete from GIT via parameter + operationId: deleteProjectsIdBranchesIdFilesId + responses: + "200": + description: Successful operation + "400": + $ref: "#/components/responses/badRequest400" + "404": + $ref: "#/components/responses/notFound404" + "500": + $ref: "#/components/responses/internalServerError500" + put: + tags: + - BranchFiles + summary: "[temp] Update file data" + description: Update project branch draft file data + operationId: putProjectsIdBranchesIdFilesId + requestBody: + description: Project file update + content: + text/plain; charset=utf-8: + schema: + type: string + responses: + "200": + description: Successful operation + content: {} + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/responses/notFound404" + examples: + ProjectNotFound: + $ref: "#/components/examples/ProjectNotFound" + "500": + $ref: "#/components/responses/internalServerError500" + /projects/{projectId}/branches/{branch}/files/{fileId}/reset: + parameters: + - $ref: "#/components/parameters/projectId" + - $ref: "#/components/parameters/branch" + - $ref: "#/components/parameters/fileId" + post: + tags: + - BranchFiles + summary: Restore file to last commit + description: Delete file draft changes from project branch (not from git) + operationId: postProjectsIdBranchesIdFilesIdReset + responses: + "200": + description: Successful operation + "400": + $ref: "#/components/responses/badRequest400" + "404": + $ref: "#/components/responses/notFound404" + "500": + $ref: "#/components/responses/internalServerError500" + /projects/{projectId}/branches/{branch}/files/{fileId}/history: + parameters: + - $ref: "#/components/parameters/projectId" + - $ref: "#/components/parameters/branch" + - $ref: "#/components/parameters/fileId" + get: + tags: + - History + - BranchFiles + summary: Get file changes history + description: Show the content object changes history + operationId: getProjectsIdBranchesIdFilesIdHistory + parameters: + - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/page" + responses: + "200": + description: Successful operation + content: + application/json: + schema: + description: History items list + type: object + properties: + changes: + type: array + items: + $ref: "#/components/schemas/ChangeHistory" + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/responses/notFound404" + examples: + ProjectNotFound: + $ref: "#/components/examples/ProjectNotFound" + "500": + $ref: "#/components/responses/internalServerError500" + /projects/{projectId}/branches/{branch}/files/{fileId}/history/{commitId}: + parameters: + - $ref: "#/components/parameters/projectId" + - $ref: "#/components/parameters/branch" + - $ref: "#/components/parameters/fileId" + - $ref: "#/components/parameters/commitId" + get: + tags: + - History + - BranchFiles + summary: Get file version by commit Id + description: Get file data from history by commit id + operationId: getProjectsIdBranchesIdFilesIdHistoryCommitId + responses: + "200": + description: Successful operation + content: + text/plain; charset=utf-8: + schema: + type: string + application/json: + schema: + type: object + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + $ref: "#/components/responses/notFound404" + "500": + $ref: "#/components/responses/internalServerError500" + /projects/{projectId}/blobs/{blobId}: + parameters: + - $ref: "#/components/parameters/projectId" + - $ref: "#/components/parameters/blobId" + get: + tags: + - History + - BranchFiles + summary: Get file content by blob Id + description: Get file data from git by blob id + operationId: getProjectsIdBlobId + responses: + "200": + description: Successful operation + content: + text/plain; charset=utf-8: + schema: + type: string + application/json: + schema: + type: object + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + $ref: "#/components/responses/notFound404" + "500": + $ref: "#/components/responses/internalServerError500" + /projects/{projectId}/versions/{version}: + parameters: + - $ref: "#/components/parameters/projectId" + - $ref: "#/components/parameters/version" + get: + tags: + - Versions + summary: Get project version content + description: >- + Get the published package's version content. Returns all content files + and refs. + operationId: getDocsProjectsIdVersionsId + parameters: + - name: dependFiles + description: Include files from depend refs + in: query + schema: + type: boolean + default: false + - name: importFiles + description: Include files from import refs + in: query + schema: + type: boolean + default: false + responses: + "200": + description: Successful operation + content: + application/json: + schema: + $ref: "#/components/schemas/ProjectVersionContent" + "400": + $ref: "#/components/responses/badRequest400" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/responses/notFound404" + examples: + ProjectNotFound: + $ref: "#/components/examples/ProjectNotFound" + "500": + $ref: "#/components/responses/internalServerError500" + /projects/{projectId}/versions/{version}/files/{slug}/share: + parameters: + - $ref: "#/components/parameters/projectId" + - $ref: "#/components/parameters/version" + - $ref: "#/components/parameters/slug" + post: + tags: + - Versions + summary: Share published file + description: >- + Create public link for file that can be used to retrieve the file + without security restrictions. The link could be used to embed file + content. + operationId: sharePublishedFile + responses: + "200": + description: Successful operation + content: + application/json: + schema: + $ref: "#/components/schemas/SharedIdResponse" + "400": + $ref: "#/components/responses/badRequest400" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/responses/notFound404" + examples: + ProjectNotFound: + $ref: "#/components/examples/ProjectNotFound" + "500": + $ref: "#/components/responses/internalServerError500" + /shared/{sharedId}: + parameters: + - $ref: "#/components/parameters/sharedId" + get: + tags: + - Versions + summary: Get shared file data + description: Get shared file data by share link + operationId: getSharedFileData + security: [{}] + responses: + "200": + description: Successful operation + "400": + $ref: "#/components/responses/badRequest400" + "404": + $ref: "#/components/responses/notFound404" + "410": + $ref: "#/components/responses/gone410" + "500": + $ref: "#/components/responses/internalServerError500" + /system/info: + get: + tags: + - Custom + summary: Get system info" + description: Get system info. + operationId: getInfo + parameters: [] + responses: + "200": + description: Successful execution + content: + application/json: + schema: + $ref: "#/components/schemas/SystemInfo" + examples: + SystemInfo: + $ref: "#/components/examples/SystemInfo" + "500": + $ref: "#/components/responses/internalServerError500" + /projects/{projectId}/versions/{version}/files/{slug}/documentation: + get: + tags: + - Export + summary: Export offline API documentation by selected file + description: | + Export of offline API documentation by selected file as a zip archive. + Type of the documentation file is provided as input parameters: + - STATIC/INTERACTIVE - html document. + - PDF - pdf document. + operationId: getProjectsIdVersionsIdFilesSlugDocumentation + parameters: + - $ref: "#/components/parameters/projectId" + - $ref: "#/components/parameters/version" + - $ref: "#/components/parameters/slug" + - name: docType + in: query + description: > + Type of the exported documentation. + + Available variants are: + + * INTERACTIVE (default) - An archive with interactive html + documentation by a selected specification file. + + * STATIC (currently not supported) - An archive with static, lined + vertical, html documentation by a selected specification file. + + * PDF (currently not supported) - A PDF documentation by a selected + specification file. + + * RAW (currently not supported) + required: false + schema: + type: string + enum: + - INTERACTIVE + - STATIC + - PDF + - RAW + default: INTERACTIVE + responses: + "200": + description: Successful execution + content: + application/zip: + schema: + type: string + format: binary + description: Documentation ZIP file to download + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/responses/notFound404" + examples: + ProjectNotFound: + $ref: "#/components/examples/ProjectNotFound" + VersionNotFound: + $ref: "#/components/examples/VersionNotFound" + FileNotFound: + $ref: "#/components/examples/FileNotFound" + "500": + $ref: "#/components/responses/internalServerError500" + /projects/{projectId}/versions/{version}/documentation: + get: + tags: + - Export + summary: Export offline API documentation by whole version + description: | + Export of offline API documentation as a zip archive. + Type of the documentation file is provided as input parameters: + - STATIC/INTERACTIVE - html document. + - PDF - pdf document. + operationId: getProjectsIdVersionsIdDocumentation + parameters: + - $ref: "#/components/parameters/projectId" + - $ref: "#/components/parameters/version" + - name: docType + in: query + description: > + Type of the exported documentation. + + Available variants are: + + * INTERACTIVE (default) - An archive with interactive html + documentation by a selected specification file. + + * STATIC (currently not supported) - An archive with static, lined + vertical, html documentation by a selected specification file. + + * PDF (currently not supported) - A PDF documentation by a selected + specification file. + + * RAW (currently not supported) + required: false + schema: + type: string + enum: + - INTERACTIVE + - STATIC + - PDF + - RAW + default: INTERACTIVE + responses: + "200": + description: Successful execution + content: + application/zip: + schema: + type: string + format: binary + description: Documentation ZIP file to download + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/responses/notFound404" + examples: + ProjectNotFound: + $ref: "#/components/examples/ProjectNotFound" + VersionNotFound: + $ref: "#/components/examples/VersionNotFound" + FileNotFound: + $ref: "#/components/examples/FileNotFound" + "500": + $ref: "#/components/responses/internalServerError500" + /api/v1/projects/{projectId}/integration/hooks: + put: + tags: + - Integrations + summary: Sets secret token for gitlab webhook integration for project + description: Sets secret token for gitlab webhook integration + operationId: setGitLabToken + parameters: + - $ref: "#/components/parameters/projectId" + requestBody: + description: Secret token + content: + application/json: + schema: + type: object + properties: + secretToken: + type: string + responses: + "200": + description: Successful + content: {} + "500": + $ref: "#/components/responses/internalServerError500" + /api/v1/git/webhook: + post: + tags: + - Integrations + summary: Handle gitlab event + description: Handle gitlab event + operationId: handleGitlabEvent + requestBody: + description: Push or Tag event https://docs.gitlab.com/ee/user/project/integrations/webhook_events.html + content: + application/json: + schema: + type: object + responses: + "200": + description: Successful + content: + application/json: + schema: + type: array + items: + type: object + properties: + publishId: + type: string + description: Publish process Id + format: uuid + example: 9c8e9045-dd9c-4946-b9e4-e05e3f41c4cc + "500": + $ref: "#/components/responses/internalServerError500" +components: + parameters: + integrationType: + name: integrationType + schema: + type: string + description: Integration type + in: path + required: true + groupId: + name: groupId + in: path + description: Group unique identifier (full alias) + required: true + schema: + type: string + example: QS.CQSS + projectId: + name: projectId + in: path + description: Project unique identifier (full alias) + required: true + schema: + type: string + example: QS.CQSS.CPQ.TMF + branch: + name: branch + in: path + description: Git branch name (URL encoded) + required: true + schema: + type: string + format: URLencoded + fileId: + name: fileId + in: path + description: File unique string identifier (GIT file path) + required: true + schema: + type: string + commitId: + name: commitId + in: path + description: Commit unique string identifier (GIT commit) + required: true + schema: + type: string + blobId: + name: blobId + in: path + description: Git blob ID of the file + required: true + schema: + type: string + version: + name: version + in: path + description: Project version + required: true + schema: + type: string + example: v1.2 + slug: + name: slug + in: path + description: File unique string identifier + required: true + schema: + type: string + limit: + name: limit + in: query + description: items per page + schema: + type: number + default: 100 + maximum: 100 + minimum: 1 + page: + name: page + in: query + description: page number + schema: + type: number + default: 0 + filter: + name: filter + in: query + description: filter by name + schema: + type: string + sharedId: + name: sharedId + in: path + description: Shared object id + required: true + schema: + type: string + example: ebbcce45 + schemas: + AuthResponse: + description: Auth response + type: object + properties: + token: + description: Bearer token + type: string + user: + $ref: "#/components/schemas/User" + required: + - token + GitlabApiKeyIntegrationStatus: + description: Gitlab ApiKey integration status + type: object + properties: + status: + description: Integration status + type: string + required: + - status + GitlabApiKeyIntegrationData: + description: Gitlab ApiKey integration params + type: object + properties: + apikey: + description: Gitlab user apikey + type: string + required: + - apikey + GroupCreate: + description: Project group/organization creation params + type: object + properties: + alias: + description: Group alias (abbreviation + type: string + name: + description: Group full name + type: string + parentId: + description: Parent group Id + type: string + imageUrl: + type: string + format: URL + description: + type: string + description: Group description + required: + - alias + - name + Group: + description: Project group/organization + type: object + title: "" + required: + - groupId + - alias + - name + properties: + groupId: + description: Internal unique group ID (full alias) + type: string + alias: + description: Group short alias + type: string + name: + description: Group display name + type: string + parentId: + description: Parent groupId (full alias) + type: string + example: QS.CQSS + description: + type: string + imageUrl: + type: string + format: URL + isFavorite: + description: Sign of the favorite project for the caller user + type: boolean + lastVersion: + type: string + GroupInfo: + description: Project group/organization info + type: object + title: "" + properties: + parents: + description: Parent groups list + type: array + items: + $ref: "#/components/schemas/Group" + alias: + description: Group short alias (abbreviation) + type: string + deprecated: true + name: + description: Group full name + type: string + imageUrl: + type: string + format: URL + description: + type: string + groupId: + type: string + parentId: + type: string + isFavorite: + type: boolean + lastVersion: + type: string + Repository: + description: Linked GIT repository params + type: object + properties: + repositoryId: + description: External GIT repository id + type: string + name: + description: External GIT repository full name + type: string + defaultBranch: + description: Name of the default branch of the linked repository + type: string + ProjectStatus: + description: Published project version content + type: object + properties: + status: + type: string + enum: + - exists + - deleted + ProjectCreate: + description: Parameters for the artifact creation + required: + - groupId + - alias + - name + - repositoryId + - defaultBranch + - defaultFolder + type: object + properties: + groupId: + description: Internal unique group ID + type: string + name: + description: Name of the new project + type: string + alias: + description: Project short alias (abbreviation) + type: string + description: + description: Common description of the project + type: string + integration: + type: object + properties: + type: + type: string + enum: + - gitlab + default: gitlab + repositoryId: + description: External Git repository id + type: string + defaultBranch: + description: Name of the default branch of the linked repository + type: string + defaultFolder: + description: Name of the default folder of the linked branch + type: string + Project: + description: Simple project object, without content and dependencies + type: object + properties: + projectId: + description: Project unique string identifier (full alias) + type: string + alias: + type: string + description: Project short alias + groupId: + description: Parent GroupId + type: string + groups: + description: Project groups list + type: array + items: + $ref: "#/components/schemas/Group" + name: + description: Name of the new project + type: string + description: + description: Common description of the project + type: string + integration: + type: object + properties: + type: + type: string + enum: + - gitlab + default: gitlab + repositoryId: + description: External Git repository id + type: string + defaultBranch: + description: Name of the default branch of the linked repository + type: string + defaultFolder: + description: Name of the default folder of the linked branch + type: string + isFavorite: + description: Sign of the favorite project for the caller user + type: boolean + lastVersion: + description: Last published version + type: string + ProjectUpdate: + description: >- + Parameters for the project update. Not changed parameters must not be + transmitted. Parameters, required in creation, must not be empty if + transmitted. + type: object + properties: + groupId: + description: Internal unique group ID + type: string + name: + description: Name of the new project + type: string + alias: + description: Project alias + type: string + description: + description: Common description of the project + type: string + repositoryId: + description: External repository project ID + type: string + defaultbranch: + description: Name of the default branch of the linked repository + type: string + GitBranch: + description: External Git branch params + type: object + properties: + name: + description: Branch name + type: string + GitBranchFile: + description: Git branch file + type: object + properties: + name: + description: item name + type: string + isFolder: + description: Is item is folder + type: boolean + default: false + ProjectBranch: + description: External Git branch params + type: object + properties: + name: + description: Branch name + type: string + version: + type: string + status: + $ref: "#/components/schemas/VersionStatusEnum" + publishedAt: + type: string + format: date-time + permissions: + type: array + description: | + "all" value will be returned if user has permission to push to the current branch. + items: + type: string + enum: + - all + default: [] + required: + - name + - permissions + ChangeAction: + description: File/ref change action + type: string + deprecated: true + enum: + - add + - remove + - modify + - rename + ProjectContent: + title: ProjectContent + description: Schema for project content + type: object + properties: + readonly: + description: Is project branch readonly for user + type: boolean + default: false + isDraft: + type: boolean + description: >- + The flag indicates that the project is in draft and cannot be + published + isPublished: + type: boolean + description: The flag indicates that the project has any published version or not + editors: + description: "[Draft] List of users who editing branch as draft" + type: array + items: + $ref: "#/components/schemas/User" + configFileId: + type: string + changeType: + type: string + default: none + enum: + - added + - updated + - none + files: + description: "[before Contents]" + type: array + items: + type: object + properties: + fileId: + description: File Id (GIT file path) + type: string + blobId: + type: string + description: Git blob ID of the file + action: + $ref: "#/components/schemas/ChangeAction" + isDraft: + description: File updated and not saved (committed) + type: boolean + deprecated: true + type: + $ref: "#/components/schemas/ShortcutTypeEnum" + name: + description: File name + type: string + path: + description: GIT path to file + type: string + publish: + description: Is file for publishing + type: boolean + labels: + description: List of file labels + type: array + items: + type: string + status: + description: File status + type: string + default: unmodified + enum: + - moved + - modified + - excluded + - deleted + - added + - included + - unmodified + changeType: + description: Git + type: string + default: none + enum: + - none + - added + - removed + - updated + movedFrom: + description: Original file Id + type: string + conflictedBlobId: + type: string + conflictedFileId: + type: string + required: + - name + - path + - fileId + - status + refs: + type: array + items: + $ref: "#/components/schemas/Ref" + Ref: + type: object + properties: + refId: + description: project id (full alias) + type: string + status: + description: File status + type: string + default: unmodified + enum: + - modified + - deleted + - added + - unmodified + action: + $ref: "#/components/schemas/ChangeAction" + kind: + description: reference kind (group or project) + enum: + - group + - project + type: string + name: + description: Project name + type: string + version: + description: Published version name + type: string + type: + type: string + description: Relation type + enum: + - depend + - import + versionStatus: + $ref: "#/components/schemas/VersionStatusEnum" + BranchConfig: + title: BranchConfig + description: Schema for config file stored in git + type: object + properties: + projectId: + type: string + files: + description: "" + type: array + items: + type: object + properties: + fileId: + description: File Id (GIT file path) + type: string + publish: + type: boolean + blobId: + type: string + labels: + type: array + items: + type: string + status: + type: string + enum: + - added + - removed + - modified + - replaced + - none + required: + - fileId + refs: + type: array + items: + type: object + properties: + refId: + description: project id (full alias) + type: string + version: + description: Published version name + type: string + type: + type: string + description: Relation type + enum: + - depend + - import + ProjectBranchConfigParams: + title: ProjectBranchConfigParams + description: Schema for project branch update body (config file stored in git) + type: object + properties: + files: + type: array + items: + type: object + properties: + name: + description: Shortcut name + type: string + path: + description: Virtual path to folder/shortcut + type: string + fileId: + description: File if (Git file path) + type: string + format: URL + required: + - name + - path + - fileId + refs: + type: array + items: + $ref: "#/components/schemas/RefParam" + RefParam: + type: object + required: + - refId + - version + properties: + refId: + description: project full alias + type: string + type: + type: string + description: Relation type + enum: + - depend + - import + default: depend + version: + description: Published version name + type: string + ShortcutTypeEnum: + description: Enum list of project content types + type: string + enum: + - API/OPENAPI-3 + - API/OPENAPI-3-1 + - API/ASYNCAPI + - API/gRPC + - API/GQL + - API/AVRO + - SECURITY_MATRIX + - ERR_CODES + - DEFINITIONS + - MD + - PICTURE + VersionStatusEnum: + description: Enum list of project status + type: string + enum: + - draft + - release + - deprecated + - archived + ProjectSaveParams: + description: Project save params + type: object + properties: + comment: + description: Git commit comment + type: string + branch: + description: "[Draft] New branch name" + type: string + createMergeRequest: + description: "[Draft] Create Merge request for new branch" + type: boolean + default: false + required: + - comment + ProjectPublishParams: + description: Project publish params + type: object + properties: + version: + description: Version name + type: string + previousVersion: + description: Previous release version + type: string + status: + $ref: "#/components/schemas/VersionStatusEnum" + serviceName: + deprecated: true + type: string + description: Runtime serviceName + saveSources: + type: boolean + default: false + PackagePublishParams: + description: Package publish params + type: object + properties: + version: + description: Version name + type: string + status: + $ref: "#/components/schemas/VersionStatusEnum" + refs: + type: array + items: + type: string + GroupPublishParams: + description: Package publish params + type: object + properties: + version: + description: Version name + type: string + previousVersion: + deprecated: true + type: string + status: + $ref: "#/components/schemas/VersionStatusEnum" + refs: + type: array + items: + $ref: "#/components/schemas/RefParam" + ChangeHistory: + description: History item of the content changes or publications + type: object + properties: + commitId: + type: string + description: | + **[Draft]** Integration commit id + modifiedBy: + $ref: "#/components/schemas/User" + modifiedAt: + description: Date of modification + type: string + format: date-time + comment: + description: Commit comment + type: string + User: + description: User dictionary + type: object + properties: + id: + description: Login of the user + type: string + name: + description: Name of the user + type: string + avatarUrl: + description: Avatar of the user + type: string + format: URL + ProjectApiKey: + type: object + properties: + id: + type: string + format: uuid + projectId: + type: string + name: + type: string + createdBy: + type: string + format: datetime + createdAt: + type: string + format: datetime + ProjectVersion: + description: Item of the project versions list + type: object + properties: + version: + description: Project version number + type: string + previousVersion: + type: string + previousVersionPackageId: + type: string + serviceName: + type: string + status: + $ref: "#/components/schemas/VersionStatusEnum" + publishedAt: + description: Date of the project version publication + type: string + format: date-time + revision: + type: number + description: Publish revision counter + ProjectVersionContent: + description: Published project version content + type: object + required: + - status + - status + - refs + - files + properties: + status: + $ref: "#/components/schemas/VersionStatusEnum" + previousVersion: + type: string + previousVersionPackageId: + type: string + serviceName: + type: string + publishedAt: + description: Date of the project version publication + type: string + format: date-time + refs: + type: array + items: + allOf: + - $ref: "#/components/schemas/ProjectVersionRef" + - type: object + properties: + refId: + description: "[Draft] Regs from Import References with Id" + type: string + files: + description: List of the project content objects + type: array + items: + allOf: + - $ref: "#/components/schemas/ProjectVersionFile" + - type: object + properties: + refId: + description: "[Draft] File from Reference with Id" + type: string + changes: + type: object + properties: + summary: + type: object + description: Numbers of all types of changes + data: + type: array + items: + type: object + description: Here will be paths, endpoints, severity and other params. + validations: + description: Map of validation results + type: object + additionalProperties: + type: object + properties: + summary: + type: object + description: Numbers of all types of changes + data: + type: array + items: + type: object + description: Here will be paths, endpoints, severity and other params. + ProjectVersionFile: + description: API content object parameters for the list function + type: object + properties: + slug: + description: published file slug + type: string + fileId: + type: string + title: + type: string + type: + $ref: "#/components/schemas/ShortcutTypeEnum" + description: + description: Content item description + type: string + ProjectVersionRef: + description: Project version reference + type: object + properties: + name: + description: Name of the new project + type: string + refId: + description: Project/group Id + type: string + kind: + description: reference kind (group or project) + enum: + - group + - project + type: string + type: + type: string + description: Relation type + enum: + - depend + - import + version: + description: Project version number + type: string + status: + $ref: "#/components/schemas/VersionStatusEnum" + DependentProject: + description: Parameters of the dependent project + type: object + properties: + projectId: + description: ID of the dependent project + type: string + name: + type: string + description: Project name + groups: + type: array + items: + $ref: "#/components/schemas/Group" + alias: + description: Project alias + type: string + version: + description: Version of the dependent project + type: string + status: + $ref: "#/components/schemas/VersionStatusEnum" + FilterParams: + description: Common filter params object + type: object + properties: + searchString: + description: Search by common text fields (summary, description, title, etc.) + type: string + organizationCode: + description: Code of the organization from the dictionary + type: string + projectId: + description: Project unique string identifier + type: string + projectStateCode: + description: Code of project state from the dictionary + type: string + projectVersion: + description: Number of the project version. Wildcard is acceptable + type: string + artifactVersion: + description: Number of the Content object version. Wildcard is acceptable + type: string + publicationDateStart: + description: Publication date range start + type: string + format: date-time + publicationDateEnd: + description: Publication date range end + type: string + format: date-time + openAPIParams: + $ref: "#/components/schemas/OpenAPIParamsFilter" + VersionsDiff: + description: Two versions diff + type: object + properties: {} + SharedIdResponse: + description: Shared link id + type: object + properties: + sharedId: + type: string + CompareAction: + type: string + enum: + - add + - delete + - change + - none + default: none + ChangesDetails: + type: object + properties: + breaking: + type: number + nonBreaking: + type: number + annotation: + type: number + unclassified: + type: number + VersionCompareData: + description: Two versions diff + type: object + properties: + files: + type: array + items: + description: Project version file diff + type: object + properties: + before: + $ref: "#/components/schemas/ProjectVersionFile" + after: + $ref: "#/components/schemas/ProjectVersionFile" + action: + $ref: "#/components/schemas/CompareAction" + changes: + $ref: "#/components/schemas/ChangesDetails" + refs: + type: array + items: + description: Project version reference diff + type: object + properties: + before: + $ref: "#/components/schemas/ProjectVersionRef" + after: + $ref: "#/components/schemas/ProjectVersionRef" + action: + $ref: "#/components/schemas/CompareAction" + OpenAPIParamsFilter: + description: Filter object for OpenAPI object type search + type: object + properties: + operationId: + description: Search by operationId. Wildcard is acceptable + type: string + methods: + description: List of available methods for search + type: array + items: + type: string + resourcePath: + description: Part of the resource path of the API. Wildcard is acceptable + type: string + tags: + description: List of tags, used in specification (field -Tag) + type: array + items: + type: string + definition: + description: Search by API definitions. Wildcard is acceptable + type: string + APItype: + $ref: "#/components/schemas/ShortcutTypeEnum" + ErrorResponse: + description: An error description + type: object + properties: + status: + description: HTTP Status Code + type: number + code: + description: Internal string error code. Mandatory in response. + type: string + x-nc-error-codes: {} + message: + description: The attribute contains an error message. + type: string + params: + type: object + description: Message parameters + example: + id: 12345 + type: string + debug: + description: >- + The attribute contains debug details (e.g. stack-trace). Presented + in the error response only on Dev/Test environments if corresponding + logging level is enabled. + type: string + required: + - status + - code + - message + SystemInfo: + description: Information about the API HUB product + type: object + properties: + backendVersion: + description: Current backend version + type: string + example: main-20220727.092034-53 + frontendVersion: + description: Current frontend version + type: string + example: 0.2.2-20220725071210 + productionMode: + description: Production environment flag + type: boolean + externalLinks: + description: List of links to the external resource, for example URL to the documentation or URL pointing to the contact information of support group. + type: array + items: + description: | + Title for the link and corresponding URL. Value must written in the following format |. If array item does not contain vertical bar (|), then this item will be skipped. + type: string + example: [User guide|https://www.example.com/guide, Support team|https://www.example.com/support] + ValidationParams: + description: qubership product release custom API validation parameters + properties: + groupName: + description: >- + Name of the group for validation. The release NC product name may be + used. + type: string + example: Quotation Management + version: + description: >- + The group release number. Must be equal to the published project + version in APIHUB. + type: string + example: "22.1" + required: + - groupName + - version + ValidationReport: + description: | + qubership product release custom API validation report. + properties: + validationId: + description: Validation unique string identifier + type: string + format: uuid + example: 31adff72-3a66-4c7c-8997-aeca45e0bcbe + groupName: + description: >- + Name of the group for validation. The release NC product name may be + used. + type: string + example: Quotation Management + status: + description: The whole validation status + type: string + enum: + - "NO" + - "YES" + - SEMI + - IN_PROGRESS + projects: + description: >- + Collection of all nested projects in Group with their validation + results. + type: array + items: + $ref: "#/components/schemas/ValidationProject" + ValidationProject: + description: Details of project validation + properties: + projectId: + description: Project unique identifier (full alias) + type: string + example: QS.CloudQSS.CPQ.Q-TMF + projectName: + description: Project name + type: string + example: Quote TMF + status: + description: The project validation status + type: string + enum: + - "NO" + - "YES" + - SEMI + files: + description: >- + Collection of all nested files in Project with their validation + results. + type: array + items: + $ref: "#/components/schemas/ValidationFile" + ValidationFile: + description: Details of particular file validation + properties: + slug: + description: File public slug + type: string + example: qitmf-v5.9.json + status: + description: The file validation status + type: string + enum: + - "NO" + - "YES" + - SEMI + messages: + description: Collection of all messages in file validation. + type: array + items: + $ref: "#/components/schemas/ValidationFileMessage" + ValidationFileMessage: + description: Validation message details + properties: + type: + description: The message type + type: string + enum: + - ERROR + - WARNING + - INFORMATION + path: + description: Relative path to the error/warning point in file + type: string + nullable: true + default: "" + text: + description: Validation error/warning message + type: string + example: Failed to get validation messages + examples: + FilterParams: + description: Example of the filter params + value: + searchString: "*any description*" + organizationCode: qubership + projectId: f81d4fae-7dec-11d0-a765-00a0c91e6bf6 + projectStateCode: Develop + projectVersion: v1-2 + artifactVersion: v2.1 + publicationDateStart: "2021-11-21T23:59:59.000Z" + publicationDateEnd: "2021-12-31T23:59:59.000Z" + openAPIParams: + operationId: "*offering*" + methods: + - GET + - POST + resourcePath: /catalogManagement/v2/productOffering + tags: + - ProductOffering + definition: "*offering*" + APItype: API/OPENAPI-3 + Project: + description: Example of the project params + value: + groupId: 957c5116-aeaa-400b-962e-37a7616c3099 + projectId: f81d4fae-7dec-11d0-a765-00a0c91e6bf6 + name: Test project + alias: Test project alias + description: Project for the test purpose + defaultBranch: develop + isFavorite: false + ProjectCreate: + description: Example of the project creation params + value: + groupId: QS.QSS.PRG + name: Test project + alias: PRG + description: Project for the test purpose + repositoryId: "112233" + defaultBranch: develop + defaultFolder: /apihub + SystemInfo: + description: Example of the system description + value: + version: 0.1.0 + DOC: https://doc.qubership.org/pages/viewpage.action?pageId=1114318293 + API: https://doc.qubership.org/display/SAAS/Admin+API+List + contacts: + - name: Admin + contact: admin@qubership.org + BranchNotFound: + description: Branch not found by ID. Response for the 404 error + value: + status: 404 + code: "0001" + source: + pointer: $.path.branch + parameter: branch + reason: Branch not found + message: Incorrect ID or branch not found + ProjectNotFound: + description: Project not found by ID. Response for the 404 error + value: + status: 404 + code: "0005" + source: + pointer: $.path.projectId + parameter: projectId + reason: Project not found + message: Incorrect ID or project not found + VersionNotFound: + description: Version not found by number. Response for the 404 error + value: + status: 404 + code: "0002" + source: + pointer: $.path.version + parameter: version + reason: Version not found + message: Incorrect number or version not found + FileNotFound: + description: File not found by slug. Response for the 404 error + value: + status: 404 + code: "0003" + source: + pointer: $.path.file + parameter: slug + reason: File not found + message: Incorrect slug or file not found + securitySchemes: + BearerAuth: + type: http + description: Common security scheme for API usage + scheme: bearer + bearerFormat: JWT + api-key: + type: apiKey + description: Api-key authentication + name: api-key + in: header + BasicAuth: + type: http + description: Login/password authentication + scheme: basic + responses: + badRequest400: + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + notFound404: + description: Not found or incorrect ID + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + gone410: + description: >- + Gone. Indicates that the resource requested was previously in use but is + no longer available and will not be available again. This should be used + when a resource has been intentionally removed and the resource should + be purged. Upon receiving a 410 status code, the client should not + request the resource in the future. Clients such as search engines + should remove the resource from their indices. + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + internalServerError500: + description: Internal server error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" +security: + - BearerAuth: [] + - api-key: [] diff --git a/docs/api/Admin API.yaml b/docs/api/Admin API.yaml new file mode 100644 index 0000000..081e169 --- /dev/null +++ b/docs/api/Admin API.yaml @@ -0,0 +1,374 @@ +openapi: 3.0.3 +info: + title: APIHUB system administrators API + description: | + The API is for system administrators only. + license: + name: qubership + url: https://qubership.org + version: "2024.2" + x-nc-api-audience: BWC +externalDocs: + description: Find out more about package + url: https://qubership.org/APIHUB +servers: + - url: https://{apihub}.qubership.org + description: APIHUB server + variables: + apihub: + description: Name of the APIHUB server. + enum: + - apihub + - dev.apihub + - staging.apihub + default: apihub +security: + - BearerAuth: [] +tags: + - name: Transition + description: Operations to move packages + +paths: + "/api/v2/admin/transition/move": + post: + tags: + - Transition + summary: Move package + description: Change package id, i.e. rename it or change parent. Async operation, result is returned by id. + operationId: movePackage + security: + - BearerAuth: [] + - api-key: [] + requestBody: + description: Package coordinates + content: + application/json: + schema: + type: object + properties: + from: + type: string + description: package id that needs to be moved + to: + type: string + description: destination package id + overwriteHistory: + type: boolean + description: Enable force move for already used 'old' package id(which now redirects to some new one). In this case existing transition record will be lost and there would be no more redirect. + examples: {} + required: true + + responses: + "200": + description: Success + content: + application/json: + schema: + description: Move package response + type: object + properties: + id: + description: Move process id + type: string + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found or incorrect 'from' ID + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + + /api/v2/admin/transition/move/{id}: + get: + tags: + - Transition + summary: Get move status + description: | + Get status of the move operation by id. + operationId: getMoveStatus + security: + - BearerAuth: [] + - api-key: [] + parameters: + - name: id + description: Move operation id + in: path + required: true + schema: + type: string + responses: + "200": + description: Success + content: + application/json: + schema: + $ref: "#/components/schemas/TransitionStatus" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found or incorrect ID + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + + /api/v2/admin/transition/activity: + get: + tags: + - Transition + summary: List completed transition activities + description: | + List completed transition activities + operationId: listActivities + security: + - BearerAuth: [] + - api-key: [] + parameters: + - name: offset + in: query + description: Transition activities offset + schema: + type: number + default: 0 + - name: limit + in: query + description: Maximun items in response + schema: + type: number + default: 100 + maximum: 100 + minimum: 1 + responses: + "200": + description: Success + content: + application/json: + schema: + description: Transition activities list + type: object + properties: + changes: + type: array + items: + $ref: "#/components/schemas/TransitionStatus" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + /api/v2/admin/transition: + get: + tags: + - Transition + summary: List transitions + description: | + List full transition mapping + operationId: listPackageTransitions + security: + - BearerAuth: [] + - api-key: [] + responses: + "200": + description: Success + content: + application/json: + schema: + description: Transition activities list + type: object + properties: + changes: + type: array + items: + type: object + properties: + oldPackageId: + description: Package id that was before transition + type: string + newPackageId: + description: New package id after transition + type: string + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" +components: + schemas: + ErrorResponse: + description: An error description + type: object + properties: + status: + description: HTTP Status Code + type: number + code: + description: Internal string error code. Mandatory in response. + type: string + message: + description: The attribute contains an error message. + type: string + params: + type: object + description: Message parameters + example: + id: 12345 + type: string + debug: + description: The attribute contains debug details (e.g. stack-trace). Presented in the error response only on Dev/Test environments if corresponding logging level is enabled. + type: string + required: + - status + - code + - message + TransitionStatus: + type: object + properties: + id: + type: string + description: Move operation id + trType: + type: string + description: Transition type + fromId: + type: string + description: Source package id + toId: + type: string + description: Destination package id + status: + type: string + description: Operation status + startedBy: + type: string + description: User who started the operation + startedAt: + type: string + description: Time when the operation was started + finishedAt: + type: string + description: Time when the operation was finished + progressPercent: + type: integer + format: int32 + description: Percent of complete. Values 0-100 + affectedObjects: + type: integer + format: int32 + description: Number of affected rows in DB + completedSerialNumber: + type: integer + format: int32 + description: Serial number of completed transition + examples: + IncorrectInputParameters: + description: Incorrect input parameters + value: + status: 400 + code: "APIHUB-COMMON-4001" + message: "Incorrect input parameters" + InternalServerError: + description: Default internal server error + value: + status: 500 + code: "APIHUB-8000" + reason: "InternalServerError" + message: "InternalServerError" + securitySchemes: + BearerAuth: + type: http + description: Bearer token authentication. Default secutity scheme for API usage. + scheme: bearer + bearerFormat: JWT + api-key: + type: apiKey + description: Api-key authentication. + name: api-key + in: header \ No newline at end of file diff --git a/docs/api/Public Registry API.yaml b/docs/api/Public Registry API.yaml new file mode 100644 index 0000000..a515a15 --- /dev/null +++ b/docs/api/Public Registry API.yaml @@ -0,0 +1,16080 @@ +openapi: 3.0.3 +info: + title: APIHUB registry public API + description: | + The API contract for APIHUB direct call + license: + name: qubership + url: https://qubership.org + version: "2024.2" + x-nc-api-audience: BWC +externalDocs: + description: Find out more about package + url: https://qubership.org/APIHUB +servers: + - url: https://{apihub}.qubership.org + description: APIHUB server + variables: + apihub: + description: Name of the APIHUB server. + enum: + - apihub + - dev.apihub + - staging.apihub + default: apihub +security: + - BearerAuth: [] +tags: + - name: Auth + description: APIs for auth integrations. + - name: Packages + description: APIs for the package management. + - name: Publish + description: Publish version API + - name: Versions + description: Published package versions API. + - name: Export + description: Export API documentation. + - name: Users + description: APIs for the user operations. + - name: Search + description: Search functions. + - name: Admin + description: APIs for technical administration. + - name: Agent + description: APIHUB Agent API documentation. + - name: Integrations + description: APIs for git integrations. + - name: Roles + description: APIs for role management. + - name: Operations + description: Operations APIs. + - name: Documents + description: Documents APIs. + - name: Changes + description: Changes APIs. + - name: TryIt + description: API for 'try it' functionality + - name: Operation groups + description: Operation groups +paths: + "/api/v1/system/configuration": + get: + tags: + - Auth + summary: System configuration + description: Global parameters of system configuration. + operationId: getSystemConfiguration + security: + - {} + responses: + "200": + description: Success + content: + application/json: + schema: + type: object + required: + - autoRedirect + properties: + ssoIntegrationEnabled: + type: boolean + autoRedirect: + type: boolean + defaultWorkspaceId: + description: Id of the workspace, which is used by default while working with the system. + type: string + example: WSPACE1 + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/auth/saml": + post: + x-nc-api-audience: noBWC + tags: + - Auth + summary: SAML authentication + description: | + Starts the SAML authentication process in APIHUB. + + In case of successful authentication, the request will be redirected to the **redirectUri** and + the response will contain cookie with access token for future API calls. + All subsequent APIHUB calls must use this token in a **BearerAuth** authentication. + operationId: postAuthSAML + security: [{}] + parameters: + - name: redirectUri + in: query + description: URI, where user must be redirected in case of successful APIHUB authentication. + required: true + schema: + type: string + format: uri + example: "https://apihub.qubership.org/portal" + responses: + "302": + description: Moved Temporarily + headers: + Set-Cookie: + description: A base64 encoded userView cookie, containing the user data and access token. + schema: + type: string + example: "userView=eyJ0b2tlbiI6ImV5SmhiR2NpT2lKSVV6STFOaUlzSW10cFpDSTZJbk5sWTNKbGRDMXBaQ0lzSW5SNWNDSTZJa3BYVkNKOS5leUpGZUhSbGJuTnBiMjV6SWpwN2ZTd2lSM0p2ZFhCeklqcGJYU3dpU1VRaU9pSmxkbVZzTURJeE9TSXNJazVoYldVaU9pSmxkbVZzTURJeE9TSXNJbUYxWkNJNld5SWlYU3dpWlhod0lqb3hOamMwTURZNU1UVTRMQ0pwWVhRaU9qRTJOelF3TWpVNU5UZ3NJbTVpWmlJNk1UWTNOREF5TlRrMU9Dd2ljM1ZpSWpvaVpYWmxiREF5TVRraWZRLk45d2poeGhLRkoyTlEzNXpCaGw0VEs4VFBOS3RoeDE5czNQQnNheTNkclEiLCJyZW5ld1Rva2VuIjoiZXlKaGJHY2lPaUpJVXpJMU5pSXNJbXRwWkNJNkluTmxZM0psZEMxcFpDSXNJblI1Y0NJNklrcFhWQ0o5LmV5SkZlSFJsYm5OcGIyNXpJanA3ZlN3aVIzSnZkWEJ6SWpwYlhTd2lTVVFpT2lKbGRtVnNNREl4T1NJc0lrNWhiV1VpT2lKbGRtVnNNREl4T1NJc0ltRjFaQ0k2V3lJaVhTd2laWGh3SWpveE5qYzJOakUzT1RVNExDSnBZWFFpT2pFMk56UXdNalU1TlRnc0ltNWlaaUk2TVRZM05EQXlOVGsxT0N3aWMzVmlJam9pWlhabGJEQXlNVGtpZlEuUnRyZEEwNkJrN2lIQnA1bVRYUE1PSnVJdmhtQ0FudHRnVlBqNGZicXN6WSIsInVzZXIiOnsiaWQiOiJldmVsMDIxOSIsImVtYWlsIjoiZXZnZW5paS5lbGl6YXJvdkBuZXRjcmFja2VyLmNvbSIsIm5hbWUiOiJFdmdlbmlpIEVsaXphcm92IiwiYXZhdGFyVXJsIjoiL2FwaS92Mi91c2Vycy9ldmVsMDIxOS9wcm9maWxlL2F2YXRhciJ9fQ==;" + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + /api/v2/activity: + get: + tags: + - Packages + deprecated: true + x-deprecation-reason: New version of API is created - GET /api/v3/activity + summary: Get activity history + description: | + Get activity history. Return the last N events in descending date order. + operationId: getActivity + security: + - BearerAuth: [] + - api-key: [] + parameters: + - name: onlyFavorite + description: | + * If true, then only events from packages that are favorites for the current user shall be returned (including all child groups/packages/dashboards for favorite groups/workspaces) + * If false, events for all packages shall be returned. + in: query + schema: + type: boolean + default: false + - name: onlyShared + in: query + description: filter only shared packages + schema: + type: boolean + default: false + - name: kind + in: query + description: | + Filter by package kind. + The list of values is acceptable. In this case, the following pattern will be used: ```?kind=group,package,dashboard```. + schema: + type: array + items: + type: string + enum: + - workspace + - group + - package + - dashboard + example: [ group, package, dashboard ] + - name: types + description: | + Filter for events by group types: + * package_members - grant_role, update_role, delete_role. + * package_security - generate_api_key, revoke_api_key. + * new_version - publish_new_version. + * package_version - patch_version_meta, delete_version, publish_new_revision. + * package_management - create_package, delete_package, patch_package_meta. + in: query + schema: + type: array + items: + type: string + enum: + - package_members + - package_security + - new_version + - package_version + - package_management + - name: textFilter + in: query + description: Filter by userName/packageName + schema: + type: string + - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/page" + responses: + "200": + description: Success + content: + application/json: + schema: + type: object + properties: + events: + type: array + items: + type: object + required: + - date + - userName + - userId + - packageId + - packageName + - kind + - params + properties: + date: + description: Date when event was generated + type: string + format: date-time + userName: + description: Name of the user who generated an event + type: string + example: "Name Surname" + userId: + description: Login of the user who generated an event + type: string + example: user1221 + packageId: + description: Package unique string identifier (full alias) + type: string + packageName: + description: Package name + type: string + kind: + description: Package kind + type: string + enum: + - workspace + - group + - package + - dashboard + eventType: + description: Activity event type + type: string + enum: + - generate_api_key + - revoke_api_key + - create_package + - delete_package + - grant_role + - delete_role + - update_role + - publish_new_version + - delete_version + - publish_new_revision + - patch_version_meta + - patch_package_meta + params: + type: object + description: Events specific params + oneOf: + - type: object + title: ParamsForGrantAndDeleteRole + description: params for grant_role and delete_role events + required: + - memberId + - memberName + - roles + properties: + memberId: + description: Login of the member + type: string + example: user1221 + memberName: + description: User which was added/deleted to/from package with some role(s) + type: string + example: Name Surname + roles: + type: array + items: + $ref: "#/components/schemas/Role" + - type: object + title: ParamsForUpdateRole + description: params for update_role event + required: + - memberId + - memberName + properties: + memberId: + description: Login of the member + type: string + example: user1221 + memberName: + description: User which was added/deleted to/from package with some role(s) + type: string + example: Name Surname + - type: object + title: ParamsForPublishAndDeleteVersion + description: params for publish_new_version and delete_version events + required: + - version + - status + properties: + version: + description: Package version name. + type: string + example: "22.3" + status: + $ref: "#/components/schemas/VersionStatusEnum" + - type: object + title: ParamsForPublishNewRevision + description: params for publish_new_revision event + required: + - version + - revision + - status + properties: + version: + description: Package version name. + type: string + example: "22.3" + revision: + description: Number of the revision. + type: integer + format: int32 + example: 3 + status: + $ref: "#/components/schemas/VersionStatusEnum" + - type: object + title: ParamsForUpdateVersionMeta + description: params for patch_version_meta event + required: + - version + - versionMeta + properties: + version: + description: Package version name. + type: string + example: "22.3" + versionMeta: + description: List of parameters that was updated in version + type: array + items: + type: string + enum: + - status + - label + - type: object + title: ParamsForPatchPackageMeta + description: params for patch_package_meta event + required: + - packageMeta + properties: + packageMeta: + description: List of parameters that was updated in package + type: array + items: + type: string + enum: + - name + - description + - serviceName + - imageUrl + - defaultRole + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + /api/v3/activity: + get: + deprecated: true + x-deprecation-reason: New version of API is created - GET /api/v4/activity + tags: + - Packages + summary: Get activity history + description: | + Get activity history. Return the last N events in descending date order. + operationId: getActivityV3 + security: + - BearerAuth: [] + - api-key: [] + parameters: + - name: onlyFavorite + description: | + * If true, then only events from packages that are favorites for the current user shall be returned (including all child groups/packages/dashboards for favorite groups/workspaces) + * If false, events for all packages shall be returned. + in: query + schema: + type: boolean + default: false + - name: onlyShared + in: query + description: filter only shared packages + schema: + type: boolean + default: false + - name: kind + in: query + description: | + Filter by package kind. + The list of values is acceptable. In this case, the following pattern will be used: ```?kind=group,package,dashboard```. + schema: + type: array + items: + type: string + enum: + - workspace + - group + - package + - dashboard + example: [ group, package, dashboard ] + - name: types + description: | + Filter for events by group types: + * package_members - grant_role, update_role, delete_role. + * package_security - generate_api_key, revoke_api_key. + * new_version - publish_new_version. + * package_version - patch_version_meta, delete_version, publish_new_revision. + * package_management - create_package, delete_package, patch_package_meta. + in: query + schema: + type: array + items: + type: string + enum: + - package_members + - package_security + - new_version + - package_version + - package_management + - name: textFilter + in: query + description: Filter by userName/packageName + schema: + type: string + - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/page" + responses: + "200": + description: Success + content: + application/json: + schema: + type: object + properties: + events: + type: array + items: + type: object + required: + - date + - userName + - packageId + - packageName + - kind + - params + properties: + date: + description: Date when event was generated + type: string + format: date-time + principal: + $ref: "#/components/schemas/Principal" + packageId: + description: Package unique string identifier (full alias) + type: string + packageName: + description: Package name + type: string + kind: + description: Package kind + type: string + enum: + - workspace + - group + - package + - dashboard + eventType: + description: Activity event type + type: string + enum: + - generate_api_key + - revoke_api_key + - create_package + - delete_package + - grant_role + - delete_role + - update_role + - publish_new_version + - delete_version + - publish_new_revision + - patch_version_meta + - patch_package_meta + params: + type: object + description: Events specific params + oneOf: + - type: object + title: ParamsForGrantAndDeleteRole + description: params for grant_role and delete_role events + required: + - memberId + - memberName + - roles + properties: + memberId: + description: Login of the member + type: string + example: user1221 + memberName: + description: User which was added/deleted to/from package with some role(s) + type: string + example: Name Surname + roles: + type: array + items: + $ref: "#/components/schemas/Role" + - type: object + title: ParamsForUpdateRole + description: params for update_role event + required: + - memberId + - memberName + properties: + memberId: + description: Login of the member + type: string + example: user1221 + memberName: + description: User which was added/deleted to/from package with some role(s) + type: string + example: Name Surname + - type: object + title: ParamsForPublishAndDeleteVersion + description: params for publish_new_version and delete_version events + required: + - version + - status + properties: + version: + description: Package version name. + type: string + example: "22.3" + status: + $ref: "#/components/schemas/VersionStatusEnum" + - type: object + title: ParamsForPublishNewRevision + description: params for publish_new_revision event + required: + - version + - revision + - status + properties: + version: + description: Package version name. + type: string + example: "22.3" + revision: + description: Number of the revision. + type: integer + format: int32 + example: 3 + status: + $ref: "#/components/schemas/VersionStatusEnum" + - type: object + title: ParamsForUpdateVersionMeta + description: params for patch_version_meta event + required: + - version + - versionMeta + properties: + version: + description: Package version name. + type: string + example: "22.3" + versionMeta: + description: List of parameters that was updated in version + type: array + items: + type: string + enum: + - status + - label + - type: object + title: ParamsForPatchPackageMeta + description: params for patch_package_meta event + required: + - packageMeta + properties: + packageMeta: + description: List of parameters that was updated in package + type: array + items: + type: string + enum: + - name + - description + - serviceName + - imageUrl + - defaultRole + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + /api/v4/activity: + get: + tags: + - Packages + summary: Get activity history + description: | + Get activity history. Return the last N events in descending date order. + operationId: getActivityV4 + security: + - BearerAuth: [] + - api-key: [] + parameters: + - name: onlyFavorite + description: | + * If true, then only events from packages that are favorites for the current user shall be returned (including all child groups/packages/dashboards for favorite groups/workspaces) + * If false, events for all packages shall be returned. + in: query + schema: + type: boolean + default: false + - name: onlyShared + in: query + description: filter only shared packages + schema: + type: boolean + default: false + - name: kind + in: query + description: | + Filter by package kind. + The list of values is acceptable. In this case, the following pattern will be used: ```?kind=group,package,dashboard```. + schema: + type: array + items: + type: string + enum: + - workspace + - group + - package + - dashboard + example: [ group, package, dashboard ] + - name: types + description: | + Filter for events by group types: + * package_members - grant_role, update_role, delete_role. + * package_security - generate_api_key, revoke_api_key. + * new_version - publish_new_version. + * package_version - patch_version_meta, delete_version, publish_new_revision. + * package_management - create_package, delete_package, patch_package_meta. + * operations_group - create_manual_group, delete_manual_group, update_operations_group_parameters + in: query + schema: + type: array + items: + type: string + enum: + - package_members + - package_security + - new_version + - package_version + - package_management + - operations_group + - name: textFilter + in: query + description: Filter by userName/packageName + schema: + type: string + - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/page" + responses: + "200": + description: Success + content: + application/json: + schema: + type: object + properties: + events: + type: array + items: + type: object + required: + - date + - userName + - packageId + - packageName + - kind + - params + properties: + date: + description: Date when event was generated + type: string + format: date-time + principal: + $ref: "#/components/schemas/Principal" + packageId: + description: Package unique string identifier (full alias) + type: string + packageName: + description: Package name + type: string + kind: + description: Package kind + type: string + enum: + - workspace + - group + - package + - dashboard + eventType: + description: Activity event type + type: string + enum: + - generate_api_key + - revoke_api_key + - create_package + - delete_package + - grant_role + - delete_role + - update_role + - publish_new_version + - delete_version + - publish_new_revision + - patch_version_meta + - patch_package_meta + - create_manual_group + - delete_manual_group + - update_operations_group_parameters + params: + type: object + description: Events specific params + oneOf: + - type: object + title: ParamsForGrantAndDeleteRole + description: params for grant_role and delete_role events + required: + - memberId + - memberName + - roles + properties: + memberId: + description: Login of the member + type: string + example: user1221 + memberName: + description: User which was added/deleted to/from package with some role(s) + type: string + example: Name Surname + roles: + type: array + items: + $ref: "#/components/schemas/Role" + - type: object + title: ParamsForUpdateRole + description: params for update_role event + required: + - memberId + - memberName + properties: + memberId: + description: Login of the member + type: string + example: user1221 + memberName: + description: User which was added/deleted to/from package with some role(s) + type: string + example: Name Surname + - type: object + title: ParamsForPublishAndDeleteVersion + description: params for publish_new_version and delete_version events + required: + - version + - status + properties: + version: + description: Package version name. + type: string + example: "22.3" + status: + $ref: "#/components/schemas/VersionStatusEnum" + - type: object + title: ParamsForPublishNewRevision + description: params for publish_new_revision event + required: + - version + - revision + - status + properties: + version: + description: Package version name. + type: string + example: "22.3" + revision: + description: Number of the revision. + type: integer + format: int32 + example: 3 + status: + $ref: "#/components/schemas/VersionStatusEnum" + - type: object + title: ParamsForUpdateVersionMeta + description: params for patch_version_meta event + required: + - version + - versionMeta + properties: + version: + description: Package version name. + type: string + example: "22.3" + versionMeta: + description: List of parameters that was updated in version + type: array + items: + type: string + enum: + - status + - label + - type: object + title: ParamsForPatchPackageMeta + description: params for patch_package_meta event + required: + - packageMeta + properties: + packageMeta: + description: List of parameters that was updated in package + type: array + items: + type: string + enum: + - name + - description + - serviceName + - imageUrl + - defaultRole + - type: object + title: ParamsForPostDeleteManualGroups + description: | + params for the following events: + * create_manual_group + * deleted_manual_group + required: + - version + - groupName + - apiType + properties: + version: + description: Package version name. The @ mask is used to return the revision number. + type: string + example: "22.3@2" + notLatestRevision: + type: boolean + default: false + description: If parameter is not returned, then it is latest revision. + groupName: + description: Manual group name + type: string + apiType: + type: string + enum: + - rest + - graphql + - type: object + title: ParamsForPatchOperationsGroup + description: | + params for the update_operations_group_parameters event + required: + - version + - groupName + - groupsParams + - isPrefixGroup + - apiType + properties: + version: + description: Package version name. The @ mask is used to return the revision number. + type: string + example: "22.3@2" + apiType: + type: string + enum: + - rest + - graphql + notLatestRevision: + description: If parameter is not returned, then it is latest revision. + type: boolean + default: false + groupName: + description: Manual group name + type: string + groupsParams: + description: List of parameters that was updated in group + type: array + items: + type: string + enum: + - name + - description + - template + - operations + isPrefixGroup: + type: boolean + description: true - if the group created automatically via restGroupingPrefix. + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages": + post: + tags: + - Packages + - Admin + summary: Create a new package + description: Create a new package in APIHUB registry. + operationId: postPackages + requestBody: + description: Package for creation + content: + application/json: + schema: + $ref: "#/components/schemas/PackageCreate" + examples: {} + required: true + responses: + "201": + description: Created + content: + application/json: + schema: + allOf: + - $ref: "#/components/schemas/Package" + - type: object + properties: + parents: + description: List of all parent packages + type: array + items: + $ref: "#/components/schemas/PackageList" + examples: {} + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + get: + tags: + - Packages + summary: Get packages list + description: Retrieve the packages list. + operationId: getPackages + parameters: + - name: parentId + in: query + description: Filter by the parent package Id. + schema: + type: string + example: QS.CQSS.CPQ + - name: kind + in: query + description: | + Filter the packages by kind. + + The list of values is acceptable. In this case, the following pattern will be used: ```?kind=group,package,dashboard```. + + If not transmitted, the default value will be used. + schema: + type: array + items: + type: string + enum: + - workspace + - group + - package + - dashboard + default: [workspace] + example: [group, package, dashboard] + - name: showAllDescendants + in: query + description: | + Show all the descendants to the parent workspace or group. + + * If ```true```, return the list of all child groups/packages/dashboards to the parentId (take into account all other filter parameters). + * If the parentId is not transmitted??? + * If the parent is transmitted, but kind not - and parentId = package??? + schema: + type: boolean + default: false + - name: textFilter + in: query + description: filter by name/alias/label. + schema: + type: string + - name: onlyFavorite + in: query + description: filter only favorite packages + schema: + type: boolean + default: false + - name: onlyShared + in: query + description: filter only shared packages + schema: + type: boolean + default: false + - name: lastReleaseVersionDetails + in: query + description: | + Show/hide the detailed info about the last release version and it's changes, comparing with the previous one. + schema: + type: boolean + default: false + - name: serviceName + description: Service name that package belongs to. Should be equal to service deployment name in kubernetes. + in: query + schema: + type: string + example: "quote-tmf-service" + - $ref: "#/components/parameters/showParents" + - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/page" + responses: + "200": + description: Success + content: + application/json: + schema: + description: Whole packages list with paging. + type: object + properties: + packages: + type: array + title: Detailed package + items: + required: + - packageId + - alias + - name + allOf: + - $ref: "#/components/schemas/Package" + - type: object + properties: + parents: + description: List of all parent packages + type: array + items: + $ref: "#/components/schemas/PackageList" + lastReleaseVersionDetails: + type: object + description: | + Details about the last release version and it's changes with previous version. + * Returns only if the lastReleaseVersionDetails:true and the lastReleaseVersion is explicitly filled in on a package. + * Otherwise - will be omitted in the answer. + properties: + version: + description: | + Last release version specified on the package.The @ mask is used to return the revision number. + type: string + example: "2022.4@2" + notLatestRevision: + type: boolean + default: false + summary: + $ref: "#/components/schemas/ChangeSummary" + examples: {} + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}": + parameters: + - $ref: "#/components/parameters/packageId" + get: + tags: + - Packages + summary: Get package by Id + description: Common information about the selected package without files and references. + operationId: getPackagesId + security: + - BearerAuth: [] + - api-key: [] + parameters: + - $ref: "#/components/parameters/showParents" + responses: + "200": + description: Success + content: + application/json: + schema: + allOf: + - $ref: "#/components/schemas/Package" + - type: object + properties: + parents: + description: List of all parent packages + type: array + items: + allOf: + - $ref: "#/components/schemas/PackageList" + - type: object + properties: + hasReadPermission: + type: boolean + description: true - if current user has read permission fot this package + defaultReleaseVersion: + description: | + Default release version for the package. + Only `release` version may be placed as default. Return the error otherwise. + The @ mask is used to return the revision number. + type: string + example: "2023.1@5" + defaultVersion: + description: | + Default release version for the package. + It is calculable by the algorithm: + * If defaultReleaseVersion is filled in on the package explicitly, return it. + * If not - return the last published version (by name) with "release" status. + * If there were no published release versions, return the last published "draft" version (by date). + * Otherwise - return "". + The @ mask is used to return the revision number. + type: string + example: "2023.1@5" + examples: + Package: + $ref: "#/components/examples/Package" + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + patch: + tags: + - Packages + - Admin + summary: Change the package's parameters + description: | + Change the package's parameters. + * If the parameter is not transmitted in request - its value stays unchanged. + * The empty parameter value in request sets the empty value in database. + operationId: patchPackagesId + security: + - BearerAuth: [] + - api-key: [] + requestBody: + description: Package update parameters + content: + application/json: + schema: + $ref: "#/components/schemas/PackageUpdate" + responses: + "200": + description: Success + content: + application/json: + schema: + allOf: + - $ref: "#/components/schemas/Package" + - type: object + properties: + parents: + description: List of all parent packages + type: array + items: + $ref: "#/components/schemas/PackageList" + defaultReleaseVersion: + description: | + Default release version for the package. + Only `release` version may be placed as default. Return the error otherwise. + type: string + example: "2023.1" + examples: {} + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + "422": + description: Unprocessable Entity + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + delete: + tags: + - Packages + - Admin + summary: Delete package + description: Delete the package and all included published versions. + operationId: deletePackagesId + security: + - BearerAuth: [] + - api-key: [] + responses: + "204": + description: No content + content: {} + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + /api/v2/packages/{packageId}/activity: + get: + tags: + - Packages + deprecated: true + x-deprecation-reason: New API version is created - GET /api/v3/packages/{packageId}/activity + summary: Get activity history for the package + description: | + Get activity history for specific package. Return the last N events in descending date order. + operationId: getPackageIdActivity + security: + - BearerAuth: [] + - api-key: [] + parameters: + - name: packageId + description: Package unique string identifier (full alias). + in: path + required: true + schema: + type: string + - name: types + description: | + Filter for events by group types: + * package_members - grant_role, update_role, delete_role. + * package_security - generate_api_key, revoke_api_key. + * new_version - publish_new_version. + * package_version - patch_version_meta, delete_version, publish_new_revision. + * package_management - create_package, delete_package, patch_package_meta. + in: query + schema: + type: array + items: + type: string + enum: + - package_members + - package_security + - new_version + - package_version + - package_management + - name: includeRefs + in: query + description: If true, then events for specified package and all its referenced packages (on any level of hierarchy) shall be returned + schema: + type: boolean + default: false + - name: textFilter + in: query + description: Filter by userName/packageName + schema: + type: string + - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/page" + responses: + "200": + description: Success + content: + application/json: + schema: + type: object + properties: + events: + type: array + items: + type: object + required: + - date + - userName + - userId + - packageId + - packageName + - kind + - params + properties: + date: + description: Date when event was generated + type: string + format: date-time + userName: + description: Name of the user who generated an event + type: string + example: "Name Surname" + userId: + description: Login of the user who generated an event + type: string + example: user1221 + packageId: + description: Package unique string identifier (full alias) + type: string + packageName: + description: Package name + type: string + kind: + description: Package kind + type: string + enum: + - workspace + - group + - package + - dashboard + eventType: + description: Activity event type + type: string + enum: + - generate_api_key + - revoke_api_key + - create_package + - delete_package + - grant_role + - delete_role + - update_role + - publish_new_version + - delete_version + - publish_new_revision + - patch_version_meta + - patch_package_meta + params: + type: object + description: Events specific params + oneOf: + - type: object + title: ParamsForGrantAndDeleteRole + description: params for grant_role and delete_role events + required: + - memberId + - memberName + - roles + properties: + memberId: + description: Login of the member + type: string + example: user1221 + memberName: + description: User which was added/deleted to/from package with some role(s) + type: string + example: Name Surname + roles: + type: array + items: + $ref: "#/components/schemas/Role" + - type: object + title: ParamsForUpdateRole + description: params for update_role event + required: + - memberId + - memberName + properties: + memberId: + description: Login of the member + type: string + example: user1221 + memberName: + description: User which was added/deleted to/from package with some role(s) + type: string + example: Name Surname + - type: object + title: ParamsForPublishAndDeleteVersion + description: params for publish_new_version and delete_version events + required: + - version + - status + properties: + version: + description: Package version name.The @ mask is used to return the revision number. + type: string + example: "22.3@1" + notLatestRevision: + type: boolean + default: false + status: + $ref: "#/components/schemas/VersionStatusEnum" + - type: object + title: ParamsForPublishNewRevision + description: params for publish_new_revision event + required: + - version + - status + properties: + version: + description: Package version name.The @ mask is used to return the revision number. + type: string + example: "22.3@2" + notLatestRevision: + type: boolean + default: false + status: + $ref: "#/components/schemas/VersionStatusEnum" + - type: object + title: ParamsForUpdateVersionMeta + description: params for patch_version_meta event + required: + - version + - versionMeta + properties: + version: + description: Package version name.The @ mask is used to return the revision number. + type: string + example: "22.3@2" + notLatestRevision: + type: boolean + default: false + versionMeta: + description: List of parameters that was updated in version + type: array + items: + type: string + enum: + - status + - label + - type: object + title: ParamsForPatchPackageMeta + description: params for patch_package_meta event + required: + - packageMeta + properties: + packageMeta: + description: List of parameters that was updated in package + type: array + items: + type: string + enum: + - name + - description + - serviceName + - imageUrl + - defaultRole + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + /api/v3/packages/{packageId}/activity: + get: + deprecated: true + x-deprecation-reason: New version of API is created - GET /api/v4/packages/{packageId}/activity + tags: + - Packages + summary: Get activity history for the package + description: | + Get activity history for specific package. Return the last N events in descending date order. + operationId: getPackageIdActivityV3 + security: + - BearerAuth: [] + - api-key: [] + parameters: + - name: packageId + description: Package unique string identifier (full alias). + in: path + required: true + schema: + type: string + - name: types + description: | + Filter for events by group types: + * package_members - grant_role, update_role, delete_role. + * package_security - generate_api_key, revoke_api_key. + * new_version - publish_new_version. + * package_version - patch_version_meta, delete_version, publish_new_revision. + * package_management - create_package, delete_package, patch_package_meta. + in: query + schema: + type: array + items: + type: string + enum: + - package_members + - package_security + - new_version + - package_version + - package_management + - name: includeRefs + in: query + description: If true, then events for specified package and all its referenced packages (on any level of hierarchy) shall be returned + schema: + type: boolean + default: false + - name: textFilter + in: query + description: Filter by userName/packageName + schema: + type: string + - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/page" + responses: + "200": + description: Success + content: + application/json: + schema: + type: object + properties: + events: + type: array + items: + type: object + required: + - date + - userName + - packageId + - packageName + - kind + - params + properties: + date: + description: Date when event was generated + type: string + format: date-time + userName: + $ref: "#/components/schemas/Principal" + packageId: + description: Package unique string identifier (full alias) + type: string + packageName: + description: Package name + type: string + kind: + description: Package kind + type: string + enum: + - workspace + - group + - package + - dashboard + eventType: + description: Activity event type + type: string + enum: + - generate_api_key + - revoke_api_key + - create_package + - delete_package + - grant_role + - delete_role + - update_role + - publish_new_version + - delete_version + - publish_new_revision + - patch_version_meta + - patch_package_meta + params: + type: object + description: Events specific params + oneOf: + - type: object + title: ParamsForGrantAndDeleteRole + description: params for grant_role and delete_role events + required: + - memberId + - memberName + - roles + properties: + memberId: + description: Login of the member + type: string + example: user1221 + memberName: + description: User which was added/deleted to/from package with some role(s) + type: string + example: Name Surname + roles: + type: array + items: + $ref: "#/components/schemas/Role" + - type: object + title: ParamsForUpdateRole + description: params for update_role event + required: + - memberId + - memberName + properties: + memberId: + description: Login of the member + type: string + example: user1221 + memberName: + description: User which was added/deleted to/from package with some role(s) + type: string + example: Name Surname + - type: object + title: ParamsForPublishAndDeleteVersion + description: params for publish_new_version and delete_version events + required: + - version + - status + properties: + version: + description: Package version name.The @ mask is used to return the revision number. + type: string + example: "22.3@1" + notLatestRevision: + type: boolean + default: false + status: + $ref: "#/components/schemas/VersionStatusEnum" + - type: object + title: ParamsForPublishNewRevision + description: params for publish_new_revision event + required: + - version + - status + properties: + version: + description: Package version name.The @ mask is used to return the revision number. + type: string + example: "22.3@2" + notLatestRevision: + type: boolean + default: false + status: + $ref: "#/components/schemas/VersionStatusEnum" + - type: object + title: ParamsForUpdateVersionMeta + description: params for patch_version_meta event + required: + - version + - versionMeta + properties: + version: + description: Package version name.The @ mask is used to return the revision number. + type: string + example: "22.3@2" + notLatestRevision: + type: boolean + default: false + versionMeta: + description: List of parameters that was updated in version + type: array + items: + type: string + enum: + - status + - label + - type: object + title: ParamsForPatchPackageMeta + description: params for patch_package_meta event + required: + - packageMeta + properties: + packageMeta: + description: List of parameters that was updated in package + type: array + items: + type: string + enum: + - name + - description + - serviceName + - imageUrl + - defaultRole + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + /api/v4/packages/{packageId}/activity: + get: + tags: + - Packages + summary: Get activity history for the package + description: | + Get activity history for specific package. Return the last N events in descending date order. + operationId: getPackageIdActivityV4 + security: + - BearerAuth: [] + - api-key: [] + parameters: + - name: packageId + description: Package unique string identifier (full alias). + in: path + required: true + schema: + type: string + - name: types + description: | + Filter for events by group types: + * package_members - grant_role, update_role, delete_role. + * package_security - generate_api_key, revoke_api_key. + * new_version - publish_new_version. + * package_version - patch_version_meta, delete_version, publish_new_revision. + * package_management - create_package, delete_package, patch_package_meta. + * operations_group - create_manual_group, delete_manual_group, + update_operations_group_parameters + in: query + schema: + type: array + items: + type: string + enum: + - package_members + - package_security + - new_version + - package_version + - package_management + - operations_group + - name: includeRefs + in: query + description: If true, then events for specified package and all its referenced packages (on any level of hierarchy) shall be returned + schema: + type: boolean + default: false + - name: textFilter + in: query + description: Filter by userName/packageName + schema: + type: string + - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/page" + responses: + "200": + description: Success + content: + application/json: + schema: + type: object + properties: + events: + type: array + items: + type: object + required: + - date + - userName + - packageId + - packageName + - kind + - params + properties: + date: + description: Date when event was generated + type: string + format: date-time + userName: + $ref: "#/components/schemas/Principal" + packageId: + description: Package unique string identifier (full alias) + type: string + packageName: + description: Package name + type: string + kind: + description: Package kind + type: string + enum: + - workspace + - group + - package + - dashboard + eventType: + description: Activity event type + type: string + enum: + - generate_api_key + - revoke_api_key + - create_package + - delete_package + - grant_role + - delete_role + - update_role + - publish_new_version + - delete_version + - publish_new_revision + - patch_version_meta + - patch_package_meta + - create_manual_group + - delete_manual_group + - update_operations_group_parameters + params: + type: object + description: Events specific params + oneOf: + - type: object + title: ParamsForGrantAndDeleteRole + description: params for grant_role and delete_role events + required: + - memberId + - memberName + - roles + properties: + memberId: + description: Login of the member + type: string + example: user1221 + memberName: + description: User which was added/deleted to/from package with some role(s) + type: string + example: Name Surname + roles: + type: array + items: + $ref: "#/components/schemas/Role" + - type: object + title: ParamsForUpdateRole + description: params for update_role event + required: + - memberId + - memberName + properties: + memberId: + description: Login of the member + type: string + example: user1221 + memberName: + description: User which was added/deleted to/from package with some role(s) + type: string + example: Name Surname + - type: object + title: ParamsForPublishAndDeleteVersion + description: params for publish_new_version and delete_version events + required: + - version + - status + properties: + version: + description: Package version name. The @ mask is used to return the revision number. + type: string + example: "22.3@1" + notLatestRevision: + type: boolean + default: false + status: + $ref: "#/components/schemas/VersionStatusEnum" + - type: object + title: ParamsForPublishNewRevision + description: params for publish_new_revision event + required: + - version + - status + properties: + version: + description: Package version name. The @ mask is used to return the revision number. + type: string + example: "22.3@2" + notLatestRevision: + type: boolean + default: false + status: + $ref: "#/components/schemas/VersionStatusEnum" + - type: object + title: ParamsForUpdateVersionMeta + description: params for patch_version_meta event + required: + - version + - versionMeta + properties: + version: + description: Package version name. The @ mask is used to return the revision number. + type: string + example: "22.3@2" + notLatestRevision: + type: boolean + default: false + versionMeta: + description: List of parameters that was updated in version + type: array + items: + type: string + enum: + - status + - label + - type: object + title: ParamsForPatchPackageMeta + description: params for patch_package_meta event + required: + - packageMeta + properties: + packageMeta: + description: List of parameters that was updated in package + type: array + items: + type: string + enum: + - name + - description + - serviceName + - imageUrl + - defaultRole + - type: object + title: ParamsForPostDeleteManualGroups + description: | + params for the following events: + * create_manual_group + * deleted_manual_group + required: + - version + - groupName + - apiType + properties: + version: + description: Package version name. The @ mask is used to return the revision number. + type: string + example: "22.3@2" + notLatestRevision: + type: boolean + default: false + description: If parameter is not returned, then it is latest revision. + groupName: + description: Manual group name + type: string + apiType: + type: string + enum: + - rest + - graphql + - type: object + title: ParamsForPatchOperationsGroup + description: | + params for the update_operations_group_parameters event + required: + - version + - groupName + - groupsParams + - isPrefixGroup + - apiType + properties: + version: + description: Package version name. The @ mask is used to return the revision number. + type: string + example: "22.3@2" + notLatestRevision: + description: If parameter is not returned, then it is latest revision. + type: boolean + default: false + groupName: + description: Manual group name + type: string + groupsParams: + description: List of parameters that was updated in group + type: array + items: + type: string + enum: + - name + - description + - template + - operations + isPrefixGroup: + type: boolean + description: true - if the group created automatically via restGroupingPrefix. + apiType: + type: string + enum: + - rest + - graphql + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/status": + parameters: + - $ref: "#/components/parameters/packageId" + get: + x-nc-api-audience: noBWC + tags: + - Packages + - Admin + summary: Get package status + description: Get package status. + operationId: getPackagesIdStatus + security: + - BearerAuth: [] + - api-key: [] + responses: + "200": + description: Success + content: + application/json: + schema: + $ref: "#/components/schemas/PackageStatus" + examples: {} + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/apiKeys": + parameters: + - $ref: "#/components/parameters/packageId" + post: + deprecated: true + tags: + - Admin + summary: Create a package API Key + description: | + Create a package API Key. + The Api Key for package with kind:group is acceptable for all child groups and packages. + operationId: postPackagesIdApiKeys + security: + - BearerAuth: [] + requestBody: + description: Create API key parameters + content: + application/json: + schema: + type: object + properties: + name: + type: string + description: API key name + roles: + description: List of roles for api key. Limited by user's available roles, i.e. only available roles could be set. List of available roles could be retrieved via "/packages/{packageId}/availableRoles" endpoint. + type: array + items: + type: string + required: + - name + responses: + "200": + description: Success + content: + application/json: + schema: + allOf: + - type: object + description: ApiKey details for the package + title: PackageApiKey + properties: + id: + description: ApiKey unique identifier + type: string + packageId: + description: Internal unique package ID (full alias) + type: string + name: + description: ApiKey name + type: string + createdBy: + type: string + createdAt: + description: Date and time of ApiKey creation + type: string + format: datetime + roles: + description: List of roles. + type: array + items: + type: string + - type: object + properties: + apiKey: + description: | + Generated ApiKey. It shows only once. Need to copy to your credentials storage. + type: string + examples: {} + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + get: + deprecated: true + tags: + - Admin + summary: Package API Keys list retrieve + description: | + Get a package API Keys list. + operationId: getPackagesIdApiKeys + security: + - BearerAuth: [] + - api-key: [] + responses: + "200": + description: Success + content: + application/json: + schema: + type: object + properties: + apiKeys: + description: List of apikeys for package. + type: array + items: + type: object + description: ApiKey details for the package + title: PackageApiKey + properties: + id: + description: ApiKey unique identifier + type: string + packageId: + description: Internal unique package ID (full alias) + type: string + name: + description: ApiKey name + type: string + createdBy: + type: string + createdAt: + description: Date and time of ApiKey creation + type: string + format: datetime + roles: + description: List of roles. + type: array + items: + type: string + examples: {} + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v3/packages/{packageId}/apiKeys": + parameters: + - $ref: "#/components/parameters/packageId" + post: + tags: + - Admin + deprecated: true + x-deprecation-reason: New version of API is created - POST /api/v4/packages/{packageId}/apiKeys. + summary: Create a package API Key + description: | + Create a package API Key. + The Api Key for package with kind:group is acceptable for all child groups and packages.\ + If packageId = '\*', then system tokens shall be created. Only system administrator can specify packageId = '\*'. + operationId: postPackagesIdApiKeysV3 + security: + - BearerAuth: [] + requestBody: + description: Create API key parameters + content: + application/json: + schema: + type: object + properties: + name: + type: string + description: API key name + roles: + description: List of roles for api key. Limited by user's available roles, i.e. only available roles could be set. List of available roles could be retrieved via "/packages/{packageId}/availableRoles" endpoint. + type: array + items: + type: string + required: + - name + responses: + "200": + description: Success + content: + application/json: + schema: + allOf: + - type: object + description: ApiKey details for the package + title: PackageApiKey + properties: + id: + description: ApiKey unique identifier + type: string + packageId: + description: Internal unique package ID (full alias) + type: string + name: + description: ApiKey name + type: string + createdBy: + $ref: "#/components/schemas/User" + createdAt: + description: Date and time of ApiKey creation + type: string + format: datetime + roles: + description: List of roles. + type: array + items: + type: string + - type: object + properties: + apiKey: + description: | + Generated ApiKey. It shows only once. Need to copy to your credentials storage. + type: string + examples: {} + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + get: + tags: + - Admin + deprecated: true + x-deprecation-reason: New version of API is created - GET /api/v4/packages/{packageId}/apiKeys. + summary: Package API Keys list retrieve + description: | + Get a package API Keys list.\ + If packageId = '\*', then system tokens shall be returned. Only system administrator can specify packageId = '\*'. + operationId: getPackagesIdApiKeysV3 + security: + - BearerAuth: [] + - api-key: [] + responses: + "200": + description: Success + content: + application/json: + schema: + type: object + properties: + apiKeys: + description: List of apikeys for package. + type: array + items: + type: object + description: ApiKey details for the package + title: PackageApiKey + properties: + id: + description: ApiKey unique identifier + type: string + packageId: + description: Internal unique package ID (full alias) + type: string + name: + description: ApiKey name + type: string + createdBy: + $ref: "#/components/schemas/User" + createdAt: + description: Date and time of ApiKey creation + type: string + format: datetime + roles: + description: List of roles. + type: array + items: + type: string + examples: {} + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v4/packages/{packageId}/apiKeys": + parameters: + - $ref: "#/components/parameters/packageId" + post: + tags: + - Admin + summary: Create a package API Key + description: | + Create a package API Key. + The Api Key for package with kind:group is acceptable for all child groups and packages.\ + If packageId = '\*', then system tokens shall be created. Only system administrator can specify packageId = '\*'. + operationId: postPackagesIdApiKeysV4 + security: + - BearerAuth: [] + requestBody: + description: Create API key parameters + content: + application/json: + schema: + type: object + properties: + name: + type: string + description: API key name. Name must be unique. + roles: + description: List of roles for api key. Limited by user's available roles, i.e. only available roles could be set. List of available roles could be retrieved via "/packages/{packageId}/availableRoles" endpoint. + type: array + items: + type: string + createdFor: + type: string + description: id of the user for whom the API key shall be created. + example: user1221 + required: + - name + responses: + "200": + description: Success + content: + application/json: + schema: + allOf: + - $ref: "#/components/schemas/PackageApiKey" + - type: object + properties: + apiKey: + description: | + Generated ApiKey. It shows only once. Need to copy to your credentials storage. + type: string + examples: {} + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + get: + tags: + - Admin + summary: Package API Keys list retrieve + description: | + Get a package API Keys list.\ + If packageId = '\*', then system tokens shall be returned. Only system administrator can specify packageId = '\*'. + operationId: getPackagesIdApiKeysV4 + security: + - BearerAuth: [] + - api-key: [] + responses: + "200": + description: Success + content: + application/json: + schema: + type: object + properties: + apiKeys: + description: List of apikeys for package. + type: array + items: + $ref: "#/components/schemas/PackageApiKey" + examples: {} + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/apiKeys/{id}": + parameters: + - $ref: "#/components/parameters/packageId" + - name: id + description: Package API key Id + in: path + required: true + schema: + type: string + delete: + tags: + - Admin + summary: Delete package API Key + description: | + Delete package API Key.\ + If packageId = '\*', then system token with specified id shall be deleted. Only system administrator can specify packageId = '\*'. + operationId: deletePackagesIdApiKeysId + security: + - BearerAuth: [] + - api-key: [] + responses: + "204": + description: No content + content: {} + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/favor": + parameters: + - $ref: "#/components/parameters/packageId" + post: + tags: + - Packages + - Users + summary: Favor package + description: Add the package to favorite list for the user. The user is taken from the token info. + operationId: postPackagesIdFavor + responses: + "204": + description: No content + content: {} + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/disfavor": + parameters: + - $ref: "#/components/parameters/packageId" + post: + tags: + - Packages + - Users + summary: Disfavor package + description: Remove the package from favorite list for the user. The user is taken from the token info. + operationId: postPackagesIdDisfavor + responses: + "204": + description: No content + content: {} + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/publish": + parameters: + - $ref: "#/components/parameters/packageId" + post: + tags: + - Publish + - Admin + summary: Publish package version via upload + description: | + Publish package version via upload. Possible options: + * **Client-side building** - client application marks, that the validation and build of the final specification will be outside the APIHUB backend. + The final specifications will be stored using the POST /packages/{packageId}/publish/{publishId}/status API. + The 202 response and the publish process Id will be returned in success. + In case of the client session close, the build will be continued on the server-sde. + * **Server-side building** - client application send all raw files-sources and starts the validation and build of the final specification in APIHUB backend. + The 202 response and the publish process Id will be returned in success. + * **No building** - only references publication. Files array in config and sources are empty. In this case the build process won't be started. + The 204 response will be returned in success. + operationId: postPackagesIdPublish + security: + - BearerAuth: [] + - api-key: [] + parameters: + - name: clientBuild + in: query + description: Client-side package build will be used. Should be used only for browser-based build process. + required: false + schema: + type: boolean + default: false + - name: resolveRefs + in: query + required: false + description: With resolveRefs=true all references will be resolved into a flat list. With resolveRefs=false it's expected that all references are already resolved. + schema: + type: boolean + default: true + - name: resolveConflicts + in: query + required: false + description: "In case when resolved refs list contains multiple versions of the same package: + + - if resolveConflicts=false - status 400 (Bad request). Conflicts should be resolved manually. + + - if resolveConflicts=true - conflicts will be resolved automatically, some refs will be marked as excluded" + schema: + type: boolean + default: true + requestBody: + description: Publish params + content: + multipart/form-data: + schema: + type: object + required: + - config + properties: + sources: + type: string + description: | + Files for publish in one zip archive. + **Required**, if the files array is filled in the config. + format: binary + config: + $ref: "#/components/schemas/BuildConfig" + builderId: + type: string + description: Builder identifier. **Required** only if clientBuild=true. Used to bind the build to specific executor. + responses: + "202": + description: Publish process started + content: + application/json: + schema: + oneOf: + - type: object + title: serverBuild + properties: + publishId: + type: string + description: Publish process Id + format: uuid + example: 9c8e9045-dd9c-4946-b9e4-e05e3f41c4cc + - type: object + title: clientBuild + description: Returns final build config when clientBuild=true + properties: + publishId: + type: string + description: Publish process Id + format: uuid + example: 9c8e9045-dd9c-4946-b9e4-e05e3f41c4cc + config: + $ref: "#/components/schemas/BuildConfig" + "204": + description: No content + content: {} + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "403": + description: Forbidden. Editor or admin rights for the package are required to publish version. + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/publish/{publishId}/status": + parameters: + - $ref: "#/components/parameters/packageId" + - name: publishId + description: Publish Id + in: path + required: true + schema: + type: string + format: uuid + example: 9c8e9045-dd9c-4946-b9e4-e05e3f41c4cc + post: + deprecated: true + x-deprecation-reason: New version of API is created - POST /api/v3/packages/{packageId}/publish/{publishId}/status + x-nc-api-audience: noBWC + tags: + - Publish + - Admin + summary: Store publish status + description: Store publish status and result. + operationId: postPackagesIdPublishIdStatus + security: + - BearerAuth: [] + - api-key: [] + requestBody: + description: | + Store build result, depending on the process status. + * **running** - build in progress, data object is not transmitted. + * **error** - build process will send a the list of errors. + * **complete** - build process will send a result in a ZIP archive. + content: + multipart/form-data: + schema: + type: object + required: + - status + - builderId + properties: + data: + oneOf: + - $ref: "#/components/schemas/BuildResultV2" + - $ref: "#/components/schemas/BuildErrors" + status: + description: Build process result status. + type: string + enum: + - running + - error + - complete + builderId: + description: Builder identifier. + type: string + responses: + "204": + description: No content + content: {} + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + get: + tags: + - Publish + - Admin + summary: Get publish process status + description: Get publish process status. + operationId: getPackagesIdPublishIdStatus + security: + - BearerAuth: [] + - api-key: [] + responses: + "200": + description: Success + content: + application/json: + schema: + type: object + properties: + status: + description: Publish process status. + type: string + enum: + - running + - error + - complete + - none + message: + description: The message for **error** status. + type: string + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v3/packages/{packageId}/publish/{publishId}/status": + parameters: + - $ref: "#/components/parameters/packageId" + - name: publishId + description: Publish Id + in: path + required: true + schema: + type: string + format: uuid + example: 9c8e9045-dd9c-4946-b9e4-e05e3f41c4cc + post: + x-nc-api-audience: noBWC + tags: + - Publish + - Admin + summary: Store publish status + description: Store publish status and result. + operationId: postPackagesIdPublishIdStatusV3 + security: + - BearerAuth: [] + - api-key: [] + requestBody: + description: | + Store build result, depending on the process status. + * **running** - build in progress, data object is not transmitted. + * **error** - build process will send a the list of errors. + * **complete** - build process will send a result in a ZIP archive. + content: + multipart/form-data: + schema: + type: object + required: + - status + - builderId + properties: + data: + oneOf: + - $ref: "#/components/schemas/BuildResult" + - $ref: "#/components/schemas/BuildErrors" + status: + description: Build process result status. + type: string + enum: + - running + - error + - complete + builderId: + description: Builder identifier. + type: string + responses: + "204": + description: No content + content: {} + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/versions": + parameters: + - $ref: "#/components/parameters/packageId" + get: + tags: + - Versions + deprecated: true + x-deprecation-reason: New verions of API is created - GET /api/v3/packages/{packageId}/versions + summary: Get package versions list + description: Get the published package's versions list. + operationId: getPackagesIdVersions + security: + - BearerAuth: [] + - api-key: [] + parameters: + - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/page" + - name: textFilter + in: query + description: Filter by version name|labels. + schema: + type: string + - name: status + in: query + description: Filter versions by status (start with match) + required: false + schema: + type: string + enum: + - draft + - release + - archived + - name: checkRevisions + in: query + description: | + Flag, if to search in the previous versions revisions. + * if several revisions were found, return only the maximum found value. + * if false - return the last published revision. + schema: + type: boolean + default: false + - name: versionLabel + in: query + description: | + Filter the package versions by label, exact match. + * if the checkRevisions: false - search in the last published revision. + * if the checkRevisions: true - search in all revisions (backward order). + schema: + type: string + example: "app.kubernetes.io/version:release-2022.4-20230228.094427-171" + - name: sortBy + in: query + description: Sort versions by version name or creation date + schema: + type: string + enum: + - version + - createdAt + default: version + - name: sortOrder + in: query + description: Sorting order + schema: + type: string + enum: + - asc + - desc + default: desc + responses: + "200": + description: Success + content: + application/json: + schema: + description: Whole package versions list with paging. + type: object + properties: + versions: + type: array + items: + description: Base parameters of published version (without content) + type: object + title: PackageVersion + required: + - version + - status + - createdAt + - createdBy + properties: + version: + description: Package version name.The @ mask is used to return the revision number. + type: string + example: "2022.3@5" + status: + $ref: "#/components/schemas/VersionStatusEnum" + createdBy: + type: string + description: User, who created a package. + format: datetime + createdAt: + type: string + description: Date of package creation. + format: datetime + versionLabels: + description: List of version labels. + type: array + items: + type: string + example: ["part-of:CloudQSS-CPQBE"] + previousVersion: + description: previous package version name.The @ mask is used to return the revision number. + type: string + example: "2022.2@5" + previousVersionPackageId: + description: Package id of the previous version. Can be empty if the value is equal to the package id. + type: string + example: "QS.GRP.SOMEPKG" + notLatestRevision: + type: boolean + default: false + examples: {} + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v3/packages/{packageId}/versions": + parameters: + - $ref: "#/components/parameters/packageId" + get: + tags: + - Versions + summary: Get package versions list + description: Get the published package's versions list. + operationId: getPackagesIdVersionsV3 + security: + - BearerAuth: [] + - api-key: [] + parameters: + - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/page" + - name: textFilter + in: query + description: Filter by version name|labels. + schema: + type: string + - name: status + in: query + description: Filter versions by status (start with match) + required: false + schema: + type: string + enum: + - draft + - release + - archived + - name: checkRevisions + in: query + description: | + Flag, if to search in the previous versions revisions. + * if several revisions were found, return only the maximum found value. + * if false - return the last published revision. + schema: + type: boolean + default: false + - name: versionLabel + in: query + description: | + Filter the package versions by label, exact match. + * if the checkRevisions: false - search in the last published revision. + * if the checkRevisions: true - search in all revisions (backward order). + schema: + type: string + example: "app.kubernetes.io/version:release-2022.4-20230228.094427-171" + - name: sortBy + in: query + description: Sort versions by version name or creation date + schema: + type: string + enum: + - version + - createdAt + default: version + - name: sortOrder + in: query + description: Sorting order + schema: + type: string + enum: + - asc + - desc + default: desc + responses: + "200": + description: Success + content: + application/json: + schema: + description: Whole package versions list with paging. + type: object + properties: + versions: + type: array + items: + $ref: "#/components/schemas/PackageVersion" + examples: {} + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/versions/{version}": + parameters: + - $ref: "#/components/parameters/packageId" + get: + tags: + - Versions + deprecated: true + x-deprecation-reason: New version of API is created - GET /api/v3/packages/{packageId}/versions/{version} + summary: Get package version content + description: Get the published package's version content. Returns all content objects and folders. + operationId: getPackagesIdVersionsId + security: + - BearerAuth: [] + - api-key: [] + parameters: + - name: version + in: path + description: | + Package version. + The mask @ may be used for search in a specific revision. + required: true + schema: + type: string + example: 22.3@3 + - name: includeSummary + in: query + description: Show/hide the summary info about changes and operations. + schema: + type: boolean + default: false + - name: includeOperations + in: query + description: Show/hide the version's operations list. + schema: + type: boolean + default: false + - name: includeGroups + in: query + description: Flag to define whether to return list of groups of current version or not. + schema: + type: boolean + default: false + responses: + "200": + description: Success + content: + application/json: + schema: + allOf: + - $ref: "#/components/schemas/PackageVersionContentV2" + - type: object + properties: + operationTypes: + type: array + items: + type: object + properties: + apiType: + type: string + enum: + - rest + - graphql + changesSummary: + $ref: '#/components/schemas/ChangeSummary' + operationsCount: + type: number + deprecatedCount: + type: number + operations: + type: object + description: Hash map of 'operationId:dataHash value'. + additionalProperties: + description: Operation hash. + type: string + example: lkfsdkff112 + - type: object + properties: + operationGroups: + type: array + description: | + List of groups. + items: + allOf: + - description: Version group. + type: object + required: + - groupName + - apiType + - isPrefixGroup + properties: + groupName: + type: string + description: Unique group name. + example: New_operation_group + apiType: + type: string + description: Type of the API. + example: rest + description: + type: string + description: Description of group. + isPrefixGroup: + type: boolean + description: true - if the group created automatically via restGroupingPrefix. + example: false + exportTemplateFileName: + type: string + description: The name of the export template file, if there is one. + example: template123.json + - type: object + required: + - operationsCount + properties: + operationsCount: + type: number + description: Number of operations in operation group + ghostOperationsCount: + type: number + description: Number of ghost operations in operation group + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + patch: + tags: + - Versions + - Admin + summary: Update package version + description: | + Update package version. + * If the parameter is not transmitted in request - its value stays unchanged. + * The empty parameter value in request sets the empty value in database. + * The array of labels will be fully replaced as-it-send, no JSON-Patch approach for arrays is applicable. + operationId: patchPackagesIdVersionsIdV2 + security: + - BearerAuth: [] + - api-key: [] + parameters: + - $ref: "#/components/parameters/version" + requestBody: + description: Version update params + content: + application/json: + schema: + type: object + properties: + status: + $ref: "#/components/schemas/VersionStatusEnum" + versionLabels: + description: List of version labels. + type: array + items: + type: string + example: ["app.kubernetes.io/part-of:CloudQSS-CPQBE", "app.kubernetes.io/version:release-candidate-20230410.152115-2782"] + responses: + "200": + description: Success + content: + application/json: + schema: + $ref: "#/components/schemas/PackageVersionContent" + examples: {} + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + "422": + description: Unprocessable Entity + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + delete: + tags: + - Versions + - Admin + summary: Delete package version + description: | + Delete the package's version. + If the version was placed as a "defaultReleaseVersion" on a package, it will be cleared on this package (without the previous version restore). + operationId: deletePackagesIdVersionsId + security: + - BearerAuth: [] + - api-key: [] + parameters: + - $ref: "#/components/parameters/version" + responses: + "204": + description: No content + content: {} + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v3/packages/{packageId}/versions/{version}": + parameters: + - $ref: "#/components/parameters/packageId" + get: + tags: + - Versions + summary: Get package version content + description: Get the published package's version content. Returns all content objects and folders. + operationId: getPackagesIdVersionsIdV4 + security: + - BearerAuth: [] + - api-key: [] + parameters: + - name: version + in: path + description: | + Package version. + The mask @ may be used for search in a specific revision. + required: true + schema: + type: string + example: 22.3@3 + - name: includeSummary + in: query + description: Show/hide the summary info about changes and operations. + schema: + type: boolean + default: false + - name: includeOperations + in: query + description: Show/hide the version's operations list. + schema: + type: boolean + default: false + - name: includeGroups + in: query + description: Flag to define whether to return list of groups of current version or not. + schema: + type: boolean + default: false + - name: sortBy + in: query + description: Sort versions by version name or creation date + schema: + type: string + enum: + - version + - createdAt + default: version + - name: sortOrder + in: query + description: Sorting order + schema: + type: string + enum: + - asc + - desc + default: desc + responses: + "200": + description: Success + content: + application/json: + schema: + allOf: + - $ref: "#/components/schemas/PackageVersionContent" + - type: object + properties: + operationTypes: + type: array + items: + type: object + properties: + apiType: + $ref: "#/components/schemas/ApiType" + changesSummary: + allOf: + - $ref: '#/components/schemas/ChangeSummary' + - type: object + description: Number of declarative changes in the version. + numberOfImpactedOperations: + allOf: + - $ref: '#/components/schemas/ChangeSummary' + - type: object + description: Number of operations impacted by each severety type. + operationsCount: + type: number + deprecatedCount: + type: number + noBwcOperationsCount: + type: number + description: Number of no-BWC operations. + internalAudienceOperationsCount: + type: number + description: Number of operations with apiAudience = internal + unknownAudienceOperationsCount: + type: number + description: Number of operations with apiAudience = unknown + apiAudienceTransitions: + type: array + description: Shows transitions of operations' apiAudience value (compared to the previous release version) and number of operations in which this transition occurred. The array contains only records of transitions that actually occurred in operations. + items: + type: object + properties: + currentAudience: + type: string + description: Current apiAudience value (currentAudience must not be equal to previousAudience) + enum: + - internal + - external + - unknown + previousAudience: + type: string + description: Previous apiAudience value + enum: + - internal + - external + - unknown + operationsCount: + type: number + description: Number of operations in which the apiAudience was changed from previousAudience to currentAudience + operations: + type: object + description: Hash map of 'operationId:dataHash value'. + additionalProperties: + description: Operation hash. + type: string + example: lkfsdkff112 + - type: object + properties: + operationGroups: + type: array + description: | + List of groups. + items: + allOf: + - $ref: "#/components/schemas/CreateOperationGroup" + - type: object + required: + - operationsCount + properties: + operationsCount: + type: number + description: Number of operations in operation group + ghostOperationsCount: + type: number + description: Number of ghost operations in operation group + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/versions/{version}/changes/summary": + get: + tags: + - Changes + summary: Get changes summary + description: | + Get summary of changes between two packages versions. + operationId: getPackageIdVersionChangesSummary + security: + - BearerAuth: [] + parameters: + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + - name: previousVersion + in: query + description: | + Package previous version.\ + If both previousVersion and previousVersionPackageId are not specified, then previous **release** version will be used + schema: + type: string + example: "2022.3" + - name: previousVersionPackageId + in: query + description: | + Package unique identifier for previous version.\ + If both previousVersion and previousVersionPackageId are not specified, then previous **release** version will be used + schema: + type: string + example: "QS.RUNENV.K8S-SERVER.CJM-QSS-DEV-2.Q-TMF" + responses: + "200": + description: Success + content: + application/json: + schema: + oneOf: + - type: object + title: packageComparison + description: List of changes data. + required: + - operationTypes + properties: + operationTypes: + type: array + items: + type: object + properties: + apiType: + $ref: "#/components/schemas/ApiType" + numberOfImpactedOperations: + allOf: + - $ref: '#/components/schemas/ChangeSummary' + - type: object + description: Number of operations impacted by each severety type. + changesSummary: + allOf: + - $ref: '#/components/schemas/ChangeSummary' + - type: object + description: Number of declarative changes of each severety type. + tags: + type: array + items: + type: string + example: ["tag1", "tag2"] + noContent: + type: boolean + description: true - operation comparison cache is not stored in database + default: false + - type: object + title: dashboardComparison + required: + - refs + - packages + properties: + refs: + description: | + Refs shows which packages in compared dashboards were added/deleted/changed and changes summary for each package: + * added package - packageRef is returned + * deleted package - previousPackageRef is returned returned + * changed package - both packageRef and previousPackageRef are returned + type: array + items: + type: object + properties: + packageRef: + description: | + Parent package and version link. + Created by the concatenation of the packageId and version name with At sign. + type: string + example: "QS.CloudQSS.CPQ.Q-TMF@2023.2" + previousPackageRef: + description: | + Parent package and version link. + Created by the concatenation of the packageId and version name with At sign. + type: string + example: "QS.CloudQSS.CPQ.Q-TMF@2023.2" + operationTypes: + type: array + items: + type: object + properties: + apiType: + $ref: "#/components/schemas/ApiType" + numberOfImpactedOperations: + allOf: + - $ref: '#/components/schemas/ChangeSummary' + - type: object + description: Number of operations impacted by each severety type. + changesSummary: + allOf: + - $ref: '#/components/schemas/ChangeSummary' + - type: object + description: Number of declarative changes of each severety type. + tags: + type: array + items: + type: string + example: ["tag1", "tag2"] + noContent: + type: boolean + description: true - operation comparison cache is not stored in database + default: false + packages: + description: | + A mapped list of the packageId and version name concatenation with At sign to the package objects. + type: object + additionalProperties: + allOf: + - $ref: "#/components/schemas/ReferencedPackage" + - type: object + example: + QS.CloudQSS.CPQ.Q-TMF@2023.2: + refId: QS.CloudQSS.CPQ.Q-TMF + kind: package + name: Quote Management TMF648 + version: "2022.2@5" + status: release + parentPackages: ["qubership", "Qubership JSS", "Sample Management"] + deletedAt: "2023-05-30T17:17:11.755146Z" + deletedBy: "user1221" + notLatestRevision: true + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/versions/{version}/{apiType}/changes": + get: + deprecated: true + x-deprecation-reason: New version of API is created - GET /api/v3/packages/{packageId}/versions/{version}/{apiType}/changes + tags: + - Changes + - Versions + summary: Get list of changed operations + description: | + Get changes between two compared package versions with details by operations.\ + The result list depends on the API type. + operationId: getPackagesIdVersionsIdApiTypeChanges + security: + - BearerAuth: [] + - api-key: [] + parameters: + - $ref: "#/components/parameters/apiType" + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/severity" + - name: previousVersion + in: query + description: | + Package previous version.\ + If both previousVersion and previousVersionPackageId are not specified, then previous **release** version will be used + schema: + type: string + example: "2022.3" + - name: previousVersionPackageId + in: query + description: | + Package unique identifier for previous version.\ + If both previousVersion and previousVersionPackageId are not specified, then previous **release** version will be used + schema: + type: string + example: "QS.RUNENV.K8S-SERVER.CJM-QSS-DEV-2.Q-TMF" + - name: version + in: path + description: | + Package version. + The mask @ may be used for search in a specific revision. + required: true + schema: + type: string + example: "2022.3@3" + - name: refPackageId + description: Filter by package id of ref package and previous ref package. + in: query + schema: + type: string + - name: apiKind + description: Filter by api kind + in: query + schema: + type: string + enum: + - bwc + - no-bwc + - experimental + - name: documentSlug + in: query + description: Document unique string identifier + schema: + type: string + pattern: "^[a-z0-9-]" + example: "qitmf-v5-11-json" + - name: tag + in: query + schema: + type: string + description: | + A full match is required.\ + Multiple tags separated by comma can be specified. + - name: emptyTag + in: query + description: | + Flag, filtering the operations without tags at all. + In response will be returned the list of operations, on what the tag is not filled in. + This attribute has a higher priority than the **tag**. In case, then **emptyTag: true**, it will override the **tag** filter. + schema: + type: boolean + default: false + - name: group + in: query + description: | + Name of the group for filtering.\ + The filter is applied only to the groups of current version. Groups from previous version will be ignored.\ + Either "group" or "emptyGroup" (= true) can be sent in the request, if both of them are specified then 400 will be returned in the response. + schema: + type: string + example: v1 + - name: emptyGroup + in: query + description: | + Flag for filtering operations without a group.\ + The filter is applied only to the groups of current version. Groups from previous version will be ignored.\ + Either "group" or "emptyGroup" (= true) can be sent in the request, if both of them are specified then 400 will be returned in the response. + schema: + type: boolean + default: false + - name: textFilter + in: query + description: Filter by operation's title/path/method. + schema: + type: string + - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/page" + responses: + "200": + description: Success + content: + application/json: + schema: + type: object + required: + - operations + properties: + previousVersion: + description: Name of the previous published version. The @ mask is used to return the revision number. + type: string + example: "2022.2@5" + previousVersionPackageId: + description: Previous release version package id. + type: string + example: "QS.CloudQSS.CPQ.Q-TMF" + operations: + type: array + items: + allOf: + - oneOf: + - $ref: "#/components/schemas/RestOperationMeta" + - $ref: "#/components/schemas/GraphQLOperationMeta" + - type: object + required: + - operationId + - changeSummary + properties: + packageRef: + description: > + Parent package and version link. Created by the concatenation of the packageId and version name with At sign. + type: string + example: QS.CloudQSS.CPQ.Q-TMF@2023.2 + previousVersionPackageRef: + description: | + Parent package and version link. + Created by the concatenation of the packageId and version name with At sign. + type: string + example: "QS.CloudQSS.CPQ.Q-TMF@2023.2" + operationId: + description: >- + Operation unique identifier (slug). Not the same as + operationId tag from the OpenAPI file. + type: string + example: get-quoteManagement-v5-quote + apiKind: + type: string + enum: + - bwc + - no-bwc + - experimental + dataHash: + description: Operation hash. + type: string + example: sdfsdfsf242 + previousDataHash: + description: Hash of the previous published version operation. + type: string + example: sdfsdfsf24dds2 + changeSummary: + $ref: "#/components/schemas/ChangeSummary" + packages: + description: > + A mapped list of the packageId and version name + concatenation with At sign to the package objects. + type: object + additionalProperties: + allOf: + - $ref: "#/components/schemas/ReferencedPackage" + - type: object + example: + QS.CloudQSS.CPQ.Q-TMF@2023.2: + refId: QS.CloudQSS.CPQ.Q-TMF + kind: package + name: Quote Management TMF648 + version: "2022.2@5" + status: release + parentPackages: ["qubership", "Qubership JSS", "Sample Management"] + deletedAt: "2023-05-30T17:17:11.755146Z" + deletedBy: "user1221" + notLatestRevision: true + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v3/packages/{packageId}/versions/{version}/{apiType}/changes": + get: + tags: + - Changes + - Versions + summary: Get list of changed operations + description: | + Get changes between two compared package versions with details by operations.\ + The result list depends on the API type. + operationId: getPackagesIdVersionsIdApiTypeChangesV3 + security: + - BearerAuth: [] + - api-key: [] + parameters: + - $ref: "#/components/parameters/apiType" + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/apiAudience" + - $ref: "#/components/parameters/severity" + - name: previousVersion + in: query + description: | + Package previous version.\ + If both previousVersion and previousVersionPackageId are not specified, then previous **release** version will be used + schema: + type: string + example: "2022.3" + - name: previousVersionPackageId + in: query + description: | + Package unique identifier for previous version.\ + If both previousVersion and previousVersionPackageId are not specified, then previous **release** version will be used + schema: + type: string + example: "QS.RUNENV.K8S-SERVER.CJM-QSS-DEV-2.Q-TMF" + - name: version + in: path + description: | + Package version. + The mask @ may be used for search in a specific revision. + required: true + schema: + type: string + example: "2022.3@3" + - name: refPackageId + description: Filter by package id of ref package and previous ref package. + in: query + schema: + type: string + - name: apiKind + description: Filter by api kind + in: query + schema: + type: string + enum: + - bwc + - no-bwc + - experimental + - name: documentSlug + in: query + description: Document unique string identifier + schema: + type: string + pattern: "^[a-z0-9-]" + example: "qitmf-v5-11-json" + - name: tag + in: query + schema: + type: string + description: | + A full match is required.\ + Multiple tags separated by comma can be specified. + - name: emptyTag + in: query + description: | + Flag, filtering the operations without tags at all. + In response will be returned the list of operations, on what the tag is not filled in. + This attribute has a higher priority than the **tag**. In case, then **emptyTag: true**, it will override the **tag** filter. + schema: + type: boolean + default: false + - name: group + in: query + description: | + Name of the group for filtering.\ + The filter is applied only to the groups of current version. Groups from previous version will be ignored.\ + Either "group" or "emptyGroup" (= true) can be sent in the request, if both of them are specified then 400 will be returned in the response. + schema: + type: string + example: v1 + - name: emptyGroup + in: query + description: | + Flag for filtering operations without a group.\ + The filter is applied only to the groups of current version. Groups from previous version will be ignored.\ + Either "group" or "emptyGroup" (= true) can be sent in the request, if both of them are specified then 400 will be returned in the response. + schema: + type: boolean + default: false + - name: textFilter + in: query + description: Filter by operation's title/path/method. + schema: + type: string + - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/page" + responses: + "200": + description: Success + content: + application/json: + schema: + type: object + required: + - operations + properties: + previousVersion: + description: Name of the previous published version. The @ mask is used to return the revision number. + type: string + example: "2022.2@5" + previousVersionPackageId: + description: Previous release version package id. + type: string + example: "QS.CloudQSS.CPQ.Q-TMF" + operations: + type: array + items: + allOf: + - oneOf: + - title: RestOperation + type: object + required: + - path + - method + properties: + path: + description: Operation endpoint path. + type: string + example: "/quoteManagement/v5/quote" + method: + description: Operation method. + type: string + enum: + - post + - get + - put + - patch + - delete + - head + - options + - connect + - trace + - title: GraphQLOperation + type: object + required: + - type + - method + properties: + type: + description: Operation type + type: string + enum: + - query + - mutation + - subscription + method: + description: GraphQL operation method. + type: string + example: getPaymentMethodSpecificationCore + - type: object + required: + - operationId + - changeSummary + properties: + operationId: + description: >- + Operation unique identifier (slug). Not the same as + operationId tag from the OpenAPI file. + type: string + example: get-quoteManagement-v5-quote + previousOperation: + $ref: "#/components/schemas/OperationInfoFromDifferentVersions" + currentOperation: + $ref: "#/components/schemas/OperationInfoFromDifferentVersions" + changeSummary: + allOf: + - $ref: "#/components/schemas/ChangeSummary" + - type: object + description: Number of declarative changes in one specific operation. + packages: + description: > + A mapped list of the packageId and version name + concatenation with At sign to the package objects. + type: object + additionalProperties: + allOf: + - $ref: "#/components/schemas/ReferencedPackage" + - type: object + example: + QS.CloudQSS.CPQ.Q-TMF@2023.2: + refId: QS.CloudQSS.CPQ.Q-TMF + kind: package + name: Quote Management TMF648 + version: "2022.2@5" + status: release + parentPackages: ["qubership", "Qubership JSS", "Sample Management"] + deletedAt: "2023-05-30T17:17:11.755146Z" + deletedBy: "user1221" + notLatestRevision: true + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + /api/v3/packages/{packageId}/versions/{version}/{apiType}/export/changes: + parameters: + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + - $ref: "#/components/parameters/apiType" + - $ref: "#/components/parameters/apiAudience" + - $ref: "#/components/parameters/severity" + - name: previousVersion + in: query + description: | + Package previous version.\ + If both previousVersion and previousVersionPackageId are not specified, then comparison with previous **release** version shall be returned. + schema: + type: string + example: "2022.3" + - name: previousVersionPackageId + in: query + description: | + Package unique identifier for previous version.\ + If both previousVersion and previousVersionPackageId are not specified, then comparison with previous **release** version shall be returned. + schema: + type: string + example: "QS.RUNENV.K8S-SERVER.CJM-QSS-DEV-2.Q-TMF" + - name: refPackageId + description: Filter by package id of ref package and previous ref package. + in: query + schema: + type: string + - name: apiKind + description: Filter by api kind + in: query + schema: + type: string + enum: + - bwc + - no-bwc + - experimental + - name: tag + in: query + schema: + type: string + description: | + A full match is required.\ + Multiple tags separated by comma can be specified. + - name: emptyTag + in: query + description: | + Flag, filtering the operations without tags at all. + In response will be returned the list of operations, on what the tag is not filled in. + This attribute has a higher priority than the **tag**. In case, then **emptyTag: true**, it will override the **tag** filter. + schema: + type: boolean + default: false + - name: group + in: query + description: | + Name of the group for filtering.\ + The filter is applied only to the groups of current version. Groups from previous version will be ignored.\ + Either "group" or "emptyGroup" (= true) can be sent in the request, if both of them are specified then 400 will be returned in the response. + schema: + type: string + example: v1 + - name: emptyGroup + in: query + description: | + Flag for filtering operations without a group.\ + The filter is applied only to the groups of current version. Groups from previous version will be ignored.\ + Either "group" or "emptyGroup" (= true) can be sent in the request, if both of them are specified then 400 will be returned in the response. + schema: + type: boolean + default: false + - name: textFilter + in: query + description: Filter by operation's title/path/method. + schema: + type: string + get: + tags: + - Changes + summary: Export API changes to xlsx file + description: Export API changes to xlsx file + operationId: getPackageIdVersionIdChangesExport + security: + - BearerAuth: [] + - api-key: [] + responses: + "200": + description: Success + content: + application/xlsx: + schema: + type: string + format: binary + description: xlsx file to download + headers: + Content-Disposition: + schema: + type: string + description: xlsx file name + example: attachment; filename="APIChanges_package.id_version.xlsx" + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + VersionNotFound: + $ref: "#/components/examples/VersionNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + /api/v2/packages/{packageId}/versions/{version}/changes/export: + parameters: + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + - name: previousVersion + in: query + description: | + Package previous version.\ + If both previousVersion and previousVersionPackageId are not specified, then comparison with previous **release** version shall be returned. + schema: + type: string + example: "2022.3" + - name: previousVersionPackageId + in: query + description: | + Package unique identifier for previous version.\ + If both previousVersion and previousVersionPackageId are not specified, then comparison with previous **release** version shall be returned. + schema: + type: string + example: "QS.RUNENV.K8S-SERVER.CJM-QSS-DEV-2.Q-TMF" + - name: format + in: query + description: File format for export + schema: + type: string + enum: + - xlsx + default: xlsx + get: + tags: + - Changes + summary: Export API changes to xlsx file + description: Export API changes to xlsx file + operationId: getPackageIdVersionIdChangesExport + security: + - BearerAuth: [ ] + - api-key: [ ] + responses: + "200": + description: Success + content: + application/xlsx: + schema: + type: string + format: binary + description: xlsx file to download + headers: + Content-Disposition: + schema: + type: string + description: xlsx file name + example: attachment; filename="APIChanges_package.id_version.xlsx" + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: { } + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + VersionNotFound: + $ref: "#/components/examples/VersionNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/versions/{version}/sources": + parameters: + - $ref: "#/components/parameters/packageId" + - name: version + in: path + description: Package version. The mask @ may be used to get resouces of the specific revision. If the @revision is not provided, the latest version's revision will be used. + required: true + schema: + type: string + example: "2022.3" + get: + tags: + - Export + - Versions + summary: Export sources of package version + description: | + Export sources of package version as a zip archive. + operationId: getPackagesIdVersionsIdSources + security: + - BearerAuth: [] + - api-key: [] + responses: + "200": + description: Success + content: + application/octet-stream: + schema: + type: string + format: binary + description: ZIP file with pacakge version sources to download + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + VersionNotFound: + $ref: "#/components/examples/VersionNotFound" + FileNotFound: + $ref: "#/components/examples/FileNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/versions/{version}/config": + parameters: + - $ref: "#/components/parameters/packageId" + - name: version + in: path + description: Package version. The mask @ may be used to get resouces of the specific revision. If the @revision is not provided, the latest version's revision will be used. + required: true + schema: + type: string + example: "2022.3" + get: + tags: + - Versions + summary: Get package version config + description: | + Get content of package version config + operationId: getPackagesIdVersionsIdConfig + security: + - BearerAuth: [] + - api-key: [] + responses: + "200": + description: Success + content: + application/json: + schema: + $ref: "#/components/schemas/BuildConfig" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + VersionNotFound: + $ref: "#/components/examples/VersionNotFound" + FileNotFound: + $ref: "#/components/examples/FileNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/versions/{version}/copy": + parameters: + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + post: + tags: + - Publish + - Admin + summary: Copy package version to another package + description: | + Publish package version in target package using sources of source package (server-side building only).\ + The final specifications will be stored using the POST /packages/{packageId}/publish/{publishId}/status. 202 response and the publish process Id will be returned in success. + operationId: getPackageIdVersionIdCopy + security: + - BearerAuth: [] + - api-key: [] + requestBody: + content: + application/json: + schema: + type: object + required: + - targetPackageId + - targetVersion + - targetStatus + properties: + targetPackageId: + description: Target package unique identifier (full alias). + type: string + example: QS.CQSS.CPQ.TMF + targetVersion: + description: Version name for publication in target package. + type: string + example: "2022.3" + targetPreviousVersion: + description: Name of the previous published version in target package. + type: string + example: "2022.2" + targetPreviousVersionPackageId: + description: Package id of the previous version. The parameter may be empty if the value is equal to the targetPackageId. + type: string + example: "QS.CloudQSS.CPQ.Q-TMF" + targetStatus: + $ref: "#/components/schemas/VersionStatusEnum" + targetVersionLabels: + description: List of version labels in target package. + type: array + items: + type: string + example: ["part-of:CloudQSS-CPQBE"] + responses: + "202": + description: Publish process started + content: + application/json: + schema: + type: object + title: serverBuild + properties: + publishId: + type: string + description: Publish process Id + format: uuid + example: 9c8e9045-dd9c-4946-b9e4-e05e3f41c4cc + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "403": + description: Forbidden. No permission to publish version in current status in target package. + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/compare": + post: + tags: + - Changes + summary: Version changelog async calculation + description: | + Calculate changes between any two packages/versions/revisions for any type of API. + + * **200 Comparison calculated successfully** if there is such comparison. + + * **202 Accepted** will be returned after starting of the async changelog calculation. + operationId: postCompare + security: + - BearerAuth: [] + parameters: + - name: clientBuild + in: query + description: Client-side package build will be used. + required: false + schema: + type: boolean + default: false + - name: builderId + in: query + required: false + description: Builder identifier. **Required** if clientBuild=true. + schema: + type: string + - name: reCalculate + in: query + description: | + Flag for the force changelog re-calculation. + May be used after the previous API call with **error** status. + schema: + type: boolean + default: false + requestBody: + description: Changelog calculation parameters. + content: + multipart/form-data: + schema: + type: object + required: + - packageId + - version + - previousVersionPackageId + - previousVersion + properties: + packageId: + description: Package unique identifier (full alias). + type: string + example: QS.CQSS.CPQ.TMF + version: + description: | + Package version. + The mask @ may be used for search in a specific revision. + + If the @revision is not provided, the latest version's revision will be used. + type: string + example: "2022.3@3" + previousVersionPackageId: + description: Package id of the previous version to compare with. + type: string + example: "QS.CloudQSS.CPQ.Q-TMF" + previousVersion: + description: | + Name of the previous published version to compare with. + The mask @ may be used for search in a specific revision. + + If the @revision is not provided, the latest version's revision will be used. + type: string + example: "2022.2@4" + responses: + "200": + description: Comparison calculated successfully + "201": + description: Created + content: + application/json: + schema: + type: object + description: build config + properties: + packageId: + description: Package unique identifier (full alias). + type: string + example: QS.CQSS.CPQ.TMF + version: + description: | + Package version. + The mask @ will be used for return in a specific revision. + + If the @ was not transmitted in the request - it won't be returned in response. Consider the version as the latest one. + type: string + example: "2022.3@3" + previousVersionPackageId: + description: Previous release version package id. + type: string + example: "QS.CloudQSS.CPQ.Q-TMF" + previousVersion: + description: | + Name of the previous published version. + The mask @ will be used for return a specific revision. + + If the @ was not transmitted in the request - it won't be returned in response. Consider the version as the latest one. + type: string + example: "2022.2@4" + buildType: + description: | + Type of the build process. Available options are: + **changelog** - Only the changelog calculation, no API contracts version will be created. + type: string + enum: + - changelog + createdBy: + description: User, created the changelog build. + type: string + buildId: + description: Id of the created build. + type: string + "202": + description: Accepted + content: + application/json: + schema: + type: object + properties: + status: + description: Calculation process status. + type: string + enum: + - running + - error + message: + description: The message for **error** status. + type: string + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/versions/{version}/{apiType}/groups/{groupName}/transform": + parameters: + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + - $ref: "#/components/parameters/apiType" + - name: groupName + in: path + required: true + description: Name of the operation group + schema: + type: string + post: + deprecated: true + x-deprecation-reason: New version of API is created - POST /api/v3/packages/{packageId}/versions/{version}/{apiType}/build/groups/{groupName}/buildType/{buildType} + tags: + - Versions + - Operation groups + summary: Async document transformation + description: | + Async task for document transformation. Document transformation is required for exporting operations group. + * **200 Documents transformation completed successfully** if documents were already transformed. + + * **202 Accepted** will be returned after starting process for documents transformation. + operationId: postGenerateDocument + security: + - BearerAuth: [] + - api-key: [] + parameters: + - name: clientBuild + in: query + description: Client-side package build will be used. + required: false + schema: + type: boolean + default: false + - name: builderId + in: query + required: false + description: Builder identifier. **Required** if clientBuild=true. + schema: + type: string + - name: reCalculate + in: query + description: | + Flag for the force document re-calculation. + May be used after the previous API call with **error** status. + schema: + type: boolean + default: false + responses: + "200": + description: Documents transformation completed successfully + "201": + description: Created + content: + application/json: + schema: + type: object + description: build config + properties: + packageId: + description: Package unique identifier (full alias). + type: string + example: QS.CQSS.CPQ.TMF + version: + description: | + Package version. + The mask @ will be used for return in a specific revision. + + If the @ was not transmitted in the request - it won't be returned in response. Consider the version as the latest one. + type: string + example: "2022.3@3" + apiType: + description: Type of the API + type: string + enum: + - rest + - graphql + groupName: + description: Name of the group + type: string + example: v1 + buildType: + description: | + Type of the build process. Available options are: + **documentGroup** - Only the document generation, no API contracts version will be created. + type: string + enum: + - documentGroup + createdBy: + description: User, created the transform build. + type: string + buildId: + description: Id of the created build. + type: string + "202": + description: Accepted + content: + application/json: + schema: + type: object + properties: + status: + description: Calculation process status. + type: string + enum: + - running + - error + message: + description: The message for **error** status. + type: string + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v3/packages/{packageId}/versions/{version}/{apiType}/build/groups/{groupName}/buildType/{buildType}": + parameters: + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + - name: apiType + description: Type of the API. + in: path + required: true + schema: + type: string + enum: + - rest + - name: buildType + description: | + Type of the build process for OpeanAPI specification tranformation. Available options are: + - **reducedSourceSpecifications** - proccess that finds source specifications for all operations from operation group and removes from these specifications operations other than those that are included into operation group. + - **mergedSpecification** - process the merges all operations from an operation group into one specification. + in: path + required: true + schema: + type: string + enum: + - reducedSourceSpecifications + - mergedSpecification + - name: groupName + in: path + required: true + description: Name of the operation group + schema: + type: string + post: + tags: + - Versions + - Operation groups + summary: Async document transformation + description: | + Async task for document transformation. Document transformation is required for exporting operations group. + * **200 Documents transformation completed successfully** if documents were already transformed. + + * **202 Accepted** will be returned after starting process for documents transformation. + operationId: postGenerateDocumentV3 + security: + - BearerAuth: [] + - api-key: [] + parameters: + - name: clientBuild + in: query + description: Client-side package build will be used. + required: false + schema: + type: boolean + default: false + - name: builderId + in: query + required: false + description: Builder identifier. **Required** if clientBuild=true. + schema: + type: string + - name: reCalculate + in: query + description: | + Flag for the force document re-calculation. + May be used after the previous API call with **error** status. + schema: + type: boolean + default: false + - name: format + in: query + description: | + Format of the exported file.\ + If buidType = reducedSourceSpecifications, then format can be yaml, json or html.\ + If buidType = mergedspecification, then format can be yaml or json; html is not supported for this buildType. + schema: + type: string + enum: + - yaml + - json + - html + default: json + responses: + "200": + description: Documents transformation completed successfully + "201": + description: Created + content: + application/json: + schema: + type: object + description: build config + properties: + packageId: + description: Package unique identifier (full alias). + type: string + example: QS.CQSS.CPQ.TMF + version: + description: | + Package version. + The mask @ will be used for return in a specific revision. + If the @ was not transmitted in the request - it won't be returned in response. Consider the version as the latest one. + type: string + example: "2022.3@3" + apiType: + description: Document transformation is available only for apiType = REST + type: string + enum: + - rest + groupName: + description: Name of the group + type: string + example: v1 + buildType: + description: | + Type of the build process for OpeanAPI specification tranformation. Available options are: + - **reducedSourceSpecifications** - proccess that finds source specifications for all operations from operation group and removes from these specifications operations other than those that are included into operation group. + - **mergedSpecification** - process the merges all operations from an operation group into one specification. + type: string + enum: + - reducedSourceSpecifications + - mergedSpecification + format: + type: string + enum: + - yaml + - json + - html + createdBy: + description: The user who created the object + type: string + buildId: + description: Id of the created build. + type: string + "202": + description: Accepted + content: + application/json: + schema: + type: object + properties: + status: + description: Calculation process status. + type: string + enum: + - running + - error + message: + description: The message for **error** status. + type: string + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/versions/{version}/{apiType}/export/groups/{groupName}": + parameters: + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + - $ref: "#/components/parameters/apiType" + - name: groupName + in: path + required: true + description: Name of the operation group + schema: + type: string + get: + deprecated: true + x-deprecation-reason: New version of API is created - GET /api/v3/packages/{packageId}/versions/{version}/{apiType}/export/groups/{groupName}/buildType/{buildType} + tags: + - Export + - Operation groups + summary: Export operations group as OpenaAPI documents + description: Export operation group as OpenaAPI documents + operationId: getPackagesIdVersionsIdExportGroupName + security: + - BearerAuth: [] + - api-key: [] + parameters: + - name: format + in: query + schema: + type: string + description: | + format of documents: + - json + - html + enum: + - json + - html + default: html + responses: + "200": + description: Success + content: + application/zip: + schema: + type: string + format: binary + description: zip file to download + headers: + Content-Disposition: + schema: + type: string + description: zip file name + example: attachment; filename="groupName_package.id_version.zip" + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: { } + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v3/packages/{packageId}/versions/{version}/{apiType}/export/groups/{groupName}/buildType/{buildType}": + parameters: + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + - name: apiType + description: Type of the API. + in: path + required: true + schema: + type: string + enum: + - rest + - name: buildType + description: | + Type of the build process for OpeanAPI specification tranformation. Available options are: + - **reducedSourceSpecifications** - proccess that finds source specifications for all operations from operation group and removes from these specifications operations other than those that are included into operation group. + - **mergedSpecification** - process the merges all operations from an operation group into one specification. + in: path + required: true + schema: + type: string + enum: + - reducedSourceSpecifications + - mergedSpecification + - name: groupName + in: path + required: true + description: Name of the operation group + schema: + type: string + get: + tags: + - Export + - Operation groups + summary: Export operations group as OpenAPI documents + description: | + Export all operations from an operations group (with apiType = REST API) as OpenAPI specification(s). + operationId: getPackagesIdVersionsIdExportGroupNameV3 + security: + - BearerAuth: [] + - api-key: [] + parameters: + - name: format + in: query + required: true + description: | + Format of the exported file.\ + If buidType = reducedSourceSpecifications, then format can be yaml, json or html.\ + If buidType = mergedspecification, then format can be yaml or json; html is not supported for this buildType. + schema: + type: string + enum: + - yaml + - json + - html + default: json + responses: + "200": + description: Success + content: + application/zip: + schema: + type: string + format: binary + description: ZIP file to download. ZIP will be returned if buildType = reducedSourceSpecifications + application/json: + schema: + type: string + format: binary + description: JSON file to download. JSON will be returned if buidType = mergedspecification and format = json + application/yaml: + schema: + type: string + format: binary + description: YAML file to download. YAML will be returned if buidType = mergedspecification and format = yaml + headers: + Content-Disposition: + schema: + type: string + description: File name + example: attachment; filename="__.zip" + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: { } + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/versions/{version}/files/{slug}/raw": + parameters: + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + - $ref: "#/components/parameters/slug" + get: + tags: + - Versions + summary: Get file data (published) + description: Get the published content object in a RAW format + operationId: getPackagesIdVersionsIdFilesSlugRaw + security: + - BearerAuth: [] + - api-key: [] + responses: + "200": + description: Success + content: + plain/text: + schema: + description: TXT file content (JSON, YAML, MD, TXT). + type: string + application/octet-stream: + schema: + description: Binary content for unsupported file types (doc, xls, jpg, png, etc.). + type: string + format: binary + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/versions/{version}/files/{slug}/doc": + parameters: + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + - $ref: "#/components/parameters/slug" + get: + tags: + - Export + summary: Export offline API documentation by selected file + description: | + Export of offline API documentation by selected file as a zip archive. + Type of the documentation file is provided as input parameters: + - interactive - html document + - raw - yaml/json document. + operationId: getPackagesIdVersionsIdFilesSlugDoc + security: + - BearerAuth: [] + - api-key: [] + parameters: + - name: docType + in: query + description: Type of the exported documentation. + required: false + schema: + type: string + enum: + - interactive + - raw + default: interactive + responses: + "200": + description: Success + content: + application/octet-stream: + schema: + type: string + format: binary + description: Documentation ZIP file (if docType = interactive) or yaml/json (if docType = raw) to download + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + VersionNotFound: + $ref: "#/components/examples/VersionNotFound" + FileNotFound: + $ref: "#/components/examples/FileNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/versions/{version}/sourceData": + parameters: + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + get: + tags: + - Export + - Versions + summary: Export sources of package version with build config + description: | + Export sources of package version as a zip archive and build configuration that was used for this version. + operationId: getPackageVersionSourcesWithBuildConfig + security: + - BearerAuth: [ ] + - api-key: [ ] + responses: + "200": + description: Success + content: + application/json: + schema: + type: object + description: Build configuration and ZIP file with package version sources to download + properties: + sources: + type: string + description: ZIP file with package version sources to download + config: + $ref: "#/components/schemas/BuildConfig" + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: { } + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: { } + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + VersionNotFound: + $ref: "#/components/examples/VersionNotFound" + FileNotFound: + $ref: "#/components/examples/FileNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/versions/{version}/doc": + parameters: + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + get: + tags: + - Export + summary: Export offline API documentation by selected versions + description: | + Export of offline API documentation by selected version (all files) as a zip archive. + Type of the documentation file is provided as input parameters: + * interactive - html document. + operationId: getPackagesIdVersionsIdDoc + security: + - BearerAuth: [] + - api-key: [] + parameters: + - name: docType + in: query + description: Type of the exported documentation. + required: false + schema: + type: string + enum: + - interactive + default: interactive + responses: + "200": + description: Success + content: + application/zip: + schema: + type: string + format: binary + description: Documentation ZIP file to download + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + VersionNotFound: + $ref: "#/components/examples/VersionNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/roles": + post: + tags: + - Roles + summary: Create a new role + description: | + Create a new role with the set of available permissions for this role. + + The role may be assigned to the user for the specific package. + + The "read content of public packages" permissions is applied for all roles by default (except the private packages with default role = **none**). + operationId: postRoles + requestBody: + description: Role creation parameters + content: + application/json: + schema: + $ref: "#/components/schemas/RoleCreate" + responses: + "201": + description: Created + content: + application/json: + schema: + allOf: + - $ref: "#/components/schemas/Role" + - type: object + properties: + permissions: + type: array + description: List of permissions applicable to the role. + items: + $ref: "#/components/schemas/Permission" + example: ["read", "create_and_update_package", "delete_package"] + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + get: + tags: + - Roles + summary: Get list of existing roles + description: List of all roles and their permissions. + operationId: getRoles + responses: + "200": + description: Success + content: + application/json: + schema: + type: object + properties: + roles: + description: List of existing roles. + type: array + items: + allOf: + - $ref: "#/components/schemas/Role" + - type: object + properties: + readOnly: + description: | + A flag that indicates an immutability of a role. readOnly roles cannot be changed or deleted. + type: boolean + default: false + permissions: + type: array + description: List of permissions applicable to the role. + items: + $ref: "#/components/schemas/Permission" + example: ["read", "create_and_update_package", "delete_package"] + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "/api/v2/roles/{roleId}": + parameters: + - name: roleId + in: path + required: true + description: Role identifier. + schema: + type: string + pattern: "^[a-z0-9-]" + example: editor + patch: + tags: + - Roles + summary: Update role + description: | + Change role parameters. + + If the parameter is not transmitted in the request, its value remains unchanged. + + Read only roles can't be updated. + operationId: patchRolesId + requestBody: + description: Role update parameters. + content: + application/json: + schema: + type: object + required: + - permissions + properties: + permissions: + description: | + List of role permissions. + Current role permissions will be replaced by the transmitted list. + type: array + items: + $ref: "#/components/schemas/Permission" + example: ["read", "create_and_update_package", "delete_package"] + responses: + "204": + description: No content + content: {} + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + delete: + tags: + - Roles + summary: Delete role + description: | + Delete a role with its permissions. + + When the role is deleted, all users with that role shall be removed from packages. + + Roles with **readOnly:true** flag can't be deleted. + operationId: deleteRolesId + responses: + "204": + description: No content + content: {} + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/roles/changeOrder": + post: + tags: + - Roles + summary: Update the roles order + description: | + Update the roles order. + + The list of roles will be replaced, all existing roles are required in request (return an error otherwise). + operationId: postRolesChangeOrder + requestBody: + description: Roles list + content: + application/json: + schema: + type: object + description: List of roles. + required: + - roles + properties: + roles: + type: array + items: + type: string + example: [admin, owner, release-manager, editor, viewer, none] + responses: + "204": + description: No content + content: {} + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "/api/v2/permissions": + get: + tags: + - Roles + summary: Get list of permissions + description: List of all permissions. + operationId: getPermissions + responses: + "200": + description: Success + content: + application/json: + schema: + type: object + properties: + permissions: + description: List of available permissions. + type: array + items: + type: object + properties: + permission: + $ref: "#/components/schemas/Permission" + name: + type: string + description: Name of the permission + example: Read content of public package + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "/api/v2/packages/{packageId}/availableRoles": + parameters: + - $ref: "#/components/parameters/packageId" + get: + tags: + - Roles + summary: Get list of available roles for package + description: | + List of available roles to change for package and current user (by access token). + operationId: getPackagesIdAvailableRoles + security: + - BearerAuth: [] + - api-key: [] + parameters: + - name: id + in: query + description: Login of the user + required: false + schema: + type: string + example: user1221 + responses: + "200": + description: Success + content: + application/json: + schema: + type: object + description: List of available roles. + properties: + roles: + description: List of available roles. + type: array + items: + allOf: + - $ref: "#/components/schemas/Role" + - type: object + properties: + readOnly: + description: | + A flag that indicates an immutability of a role. readOnly roles cannot be changed or deleted. + type: boolean + default: false + permissions: + type: array + description: List of permissions applicable to the role. + items: + $ref: "#/components/schemas/Permission" + example: ["read", "create_and_update_package", "delete_package"] + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/members": + parameters: + - $ref: "#/components/parameters/packageId" + post: + tags: + - Roles + - Users + summary: Add members to the package + description: | + Add new user (one user or multiple users) with a role to the package. + A member may be added to the package if the assigned role is greater than the existing one. + operationId: postPackagesIdMembers + security: + - BearerAuth: [] + - api-key: [] + requestBody: + description: Package members assignment parameters + content: + application/json: + schema: + $ref: "#/components/schemas/MemberCreate" + responses: + "201": + description: Created + content: + application/json: + schema: + type: object + properties: + members: + description: List of the package's users with roles + type: array + items: + $ref: "#/components/schemas/Member" + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + get: + tags: + - Roles + - Users + summary: Get the package's members list + description: List of all users and their roles, assigned to the particular package + operationId: getPackagesIdMembers + security: + - BearerAuth: [] + - api-key: [] + responses: + "200": + description: Success + content: + application/json: + schema: + type: object + properties: + members: + description: List of the package's users with roles + type: array + items: + $ref: "#/components/schemas/Member" + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/members/{userId}": + parameters: + - $ref: "#/components/parameters/packageId" + - name: userId + in: path + required: true + description: Login of the user + schema: + type: string + example: user1221 + patch: + tags: + - Roles + - Users + summary: Package member update + description: | + Change the member parameters on the package + operationId: patchPackagesIdMembersId + security: + - BearerAuth: [] + - api-key: [] + requestBody: + description: Package member update parameters + content: + application/json: + schema: + type: object + required: + - roleId + - action + properties: + roleId: + type: string + description: Unique role identifier. The value is the slug of role name. + example: editor + action: + type: string + description: Name of the action with user role. + enum: + - add + - remove + responses: + "204": + description: No content + content: {} + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + delete: + tags: + - Roles + - Users + summary: Package member delete + description: | + Delete (unassign) the member from the package. Response variants logic: + * 200 - if the user has direct role assigned to the current package AND assignment to the parent package, will be returned his inherited role. + * 204 - if the user has only direct role assigned to the current package, this assignment will be deleted. + operationId: deletePackagesIdMembersId + security: + - BearerAuth: [] + - api-key: [] + responses: + "200": + description: Success + content: + application/json: + schema: + type: object + properties: + member: + $ref: "#/components/schemas/Member" + "204": + description: No content + content: {} + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/admins": + post: + x-nc-api-audience: noBWC + tags: + - Roles + summary: Add a system administrator + description: | + Add an existing user as a system administrator. + operationId: postAdmins + requestBody: + content: + application/json: + schema: + type: object + required: + - userId + properties: + userId: + description: Login of the user. + type: string + example: user1221 + responses: + "201": + description: Created + content: + application/json: + schema: + type: object + properties: + admins: + description: List of system administrators. + type: array + items: + $ref: "#/components/schemas/User" + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + get: + x-nc-api-audience: noBWC + tags: + - Roles + summary: Get list of system administrators + description: Get list of system administrators. + operationId: getAdmins + responses: + "200": + description: Success + content: + application/json: + schema: + type: object + properties: + admins: + description: List of system administrators. + type: array + items: + $ref: "#/components/schemas/User" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/admins/{userId}": + delete: + x-nc-api-audience: noBWC + tags: + - Roles + summary: Delete system administrator + description: Delete a user from the system administrators group. + operationId: deleteAdminsId + parameters: + - name: userId + description: Login of the user. + in: path + required: true + schema: + type: string + example: user1221 + responses: + "204": + description: No content + content: {} + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/sharedFiles": + post: + tags: + - Versions + summary: Share a published file + description: + Create public link for file that can be used to retrieve the file without security restrictions. + The link could be used to embed file content. + operationId: postSharedFiles + requestBody: + description: Parameters of package file sharing + content: + application/json: + schema: + type: object + required: + - packageId + - version + - slug + properties: + packageId: + description: Package unique identifier (full alias) + type: string + example: "QS.CloudQSS.CPQ.Q-TMF" + version: + description: Package version + type: string + example: "2022.3" + slug: + description: File unique string identifier + type: string + pattern: "^[a-z0-9-]" + example: "qitmf-v5-11-json" + responses: + "200": + description: Success + content: + application/json: + schema: + type: object + properties: + sharedFileId: + type: string + description: Shared file id + pattern: "^[a-z0-9]" + example: ebbcce45 + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/sharedFiles/{sharedFileId}": + get: + tags: + - Versions + summary: Get shared file data + description: Get shared file data by public shared link + operationId: getSharedFilesId + security: [{}] + parameters: + - name: sharedFileId + in: path + description: Shared file id + required: true + schema: + type: string + maxLength: 8 + pattern: "^[a-z0-9]" + example: ebbcce45 + responses: + "200": + description: Success + content: + plain/text: + schema: + description: TXT file content (JSON, YAML, MD, TXT). + type: string + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + "410": + description: Gone + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/builders/{builderId}/tasks": + parameters: + - $ref: "#/components/parameters/builderId" + post: + tags: + - Publish + - Admin + summary: Assign build task to Builder + description: | + Returns empty response 204 (in case of no free build task to assign) or multipart form (src+config, matching current start build payload) + operationId: postBuilderIdTasks + security: + - BearerAuth: [] + - api-key: [] + responses: + "200": + description: Build task assigned + content: + application/zip: + schema: + type: object + properties: + sources/: + type: string + description: | + Folder with source files for publish + **Required**, if the files array is filled in config. + format: binary + config.json: + type: object + description: | + Configuration of the source files. + Files or/and Refs are **required**. + required: + - version + - status + properties: + version: + description: Version name for publication. + type: string + example: "2022.3" + previousVersion: + description: Name of the previous published version. + type: string + example: "2022.2" + default: "" + previousVersionPackageId: + description: Previous release version package id. + type: string + example: "QS.CQSS.CPQ.TMF" + status: + $ref: "#/components/schemas/VersionStatusEnum" + versionLabels: + description: List of version labels. + type: array + items: + type: string + example: ["part-of:CloudQSS-CPQBE"] + files: + description: | + Detailed data about files in sources archive. + Required in no Refs are provided. + type: array + items: + type: object + required: + - fileId + properties: + fileId: + type: string + description: File name. + example: "qitmf-v5.11.json" + publish: + description: Flag, publish the source file or not. + type: boolean + default: true + labels: + description: List of file labels. + type: array + items: + type: string + example: ["TMF"] + blobId: + description: Git blob ID of the file. + type: string + example: a5d45af7 + xApiKind: + description: Custom x-api-kind parameter. + type: string + example: "no-BWC" + refs: + description: | + Detailed data about referenced versions for current package version. + Required in no Files are provided. + type: array + items: + type: object + required: + - refId + - version + - type + properties: + refId: + description: Referenced package Id. + type: string + example: "QS.CloudQSS.CPQ.CORE" + version: + description: Referenced package version number. + type: string + example: "2022.2@5" + "204": + description: No content + content: {} + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "410": + description: Gone + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v3/search/{searchLevel}": + parameters: + - name: searchLevel + in: path + required: true + description: | + Level of object for search. + schema: + type: string + enum: + - operations + - documents + - packages + post: + x-nc-api-audience: noBWC + tags: + - Search + summary: "Global search" + description: Global search by text or custom parameters + operationId: postSearch + parameters: + - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/page" + requestBody: + description: Filters for search + content: + application/json: + schema: + type: object + required: + - searchString + title: searchCommonParams + description: Common parameters for Global search + properties: + searchString: + description: Search by common text fields (summary, description, title, etc.). + type: string + example: "Billing account" + packageIds: + description: | + List of Package Id(s). + * If user specified Workspace(s), then Workspace Id(s) must be transmitted. + * If user specified Workspace(s) and Group(s), then Group Id(s) must be transmitted. + * If user specified Workspace(s), Group(s) and Package(s) or Workspace(s) and Package(s), then Package Id(s) must be transmitted. + type: array + items: + type: string + example: ["QS.CloudQSS.CPQ.Q-TMF", "QS.CloudQSS.CPQ.CORE"] + versions: + description: Package version names. + type: array + items: + type: string + example: ["2022.2", "2022.3"] + statuses: + description: List of package version statuses + type: array + items: + $ref: "#/components/schemas/VersionStatusEnum" + creationDateInterval: + description: | + Search interval for the package version publication date. + Both dates are included. + type: object + properties: + startDate: + description: Start date of the search. + type: string + format: date + default: "1970-01-01" + endDate: + description: End date of the search. + type: string + format: date + default: "2050-12-31" + operationParams: + type: object + title: ApiSpecificParams + description: Search parameters specific for particular API type. + required: + - apiType + oneOf: + - type: object + description: | + Search parameters specific for REST API. + These params shall be used only if apiType in search request equals to REST API. + title: SearchRestParams + properties: + apiType: + description: Type of the API + type: string + enum: + - rest + scope: + description: Search scope for operation + type: array + items: + type: string + enum: + - request + - response + detailedScope: + description: Detailed search scope for operation + type: array + items: + type: string + enum: + - properties + - annotation + - examples + methods: + description: Operation method + type: array + items: + type: string + enum: + - post + - get + - put + - patch + - delete + - head + - options + - connect + - trace + example: ["post", "get"] + - type: object + description: | + Search parameters specific for GraphQL. + These params shall be used only if apiType in search request equals to GraphQL. + title: SearchGQLParams + properties: + apiType: + description: Type of the API + type: string + enum: + - graphql + scope: + type: array + items: + type: string + enum: + - argument + - property + - annotation + operationTypes: + type: array + items: + type: string + enum: + - query + - mutation + - subscription + examples: {} + required: true + responses: + "200": + description: Success + content: + application/json: + schema: + description: Results of the global search list + type: object + properties: + operations: + type: array + items: + $ref: "#/components/schemas/SearchResultOperation" + documents: + type: array + items: + $ref: "#/components/schemas/SearchResultDocument" + packages: + type: array + items: + $ref: "#/components/schemas/SearchResultPackage" + examples: {} + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/search/{searchLevel}": + parameters: + - name: searchLevel + in: path + required: true + description: | + Level of object for search. + schema: + type: string + enum: + - operations + - documents + - packages + post: + deprecated: true + x-nc-api-audience: noBWC + tags: + - Search + summary: "Global search" + description: Global search by text or custom parameters + operationId: postSearchV2 + parameters: + - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/page" + requestBody: + description: Filters for search + content: + application/json: + schema: + type: object + required: + - searchString + title: searchCommonParams + description: Common parameters for Global search + properties: + searchString: + description: Search by common text fields (summary, description, title, etc.). + type: string + example: "Billing account" + packageIds: + description: | + List of Package Id(s). + * If user specified Workspace(s), then Workspace Id(s) must be transmitted. + * If user specified Workspace(s) and Group(s), then Group Id(s) must be transmitted. + * If user specified Workspace(s), Group(s) and Package(s) or Workspace(s) and Package(s), then Package Id(s) must be transmitted. + type: array + items: + type: string + example: ["QS.CloudQSS.CPQ.Q-TMF", "QS.CloudQSS.CPQ.CORE"] + versions: + description: Package version names. + type: array + items: + type: string + example: ["2022.2", "2022.3"] + statuses: + description: List of package version statuses + type: array + items: + description: Package version status + type: string + enum: + - draft + - release + - archived + creationDateInterval: + description: | + Search interval for the package version publication date. + Both dates are included. + type: object + properties: + startDate: + description: Start date of the search. + type: string + format: date + default: "1970-01-01" + endDate: + description: End date of the search. + type: string + format: date + default: "2050-12-31" + operationParams: + type: object + title: ApiSpecificParams + description: Search parameters specific for particular API type. + required: + - apiType + oneOf: + - type: object + description: | + Search parameters specific for REST API. + These params shall be used only if apiType in search request equals to REST API. + title: SearchRestParams + properties: + apiType: + description: Type of the API + type: string + enum: + - rest + scope: + description: Search scope for operation + type: array + items: + type: string + enum: + - request + - response + detailedScope: + description: Detailed search scope for operation + type: array + items: + type: string + enum: + - properties + - annotation + - examples + methods: + description: Operation method + type: array + items: + type: string + enum: + - post + - get + - put + - patch + - delete + - head + - options + - connect + - trace + example: ["post", "get"] + - type: object + description: | + Search parameters specific for REST API. + These params shall be used only if apiType in search request equals to GraphQL. + title: SearchGQLParams + properties: + apiType: + description: Type of the API + type: string + enum: + - graphql + scope: + type: array + items: + type: string + enum: + - argument + - property + - annotation + examples: {} + required: true + responses: + "200": + description: Success + content: + application/json: + schema: + description: Results of the global search list + type: object + properties: + operations: + type: array + items: + $ref: "#/components/schemas/SearchResultOperationV2" + documents: + type: array + items: + description: Global search result for documents; must be returned when searchLevel = document + title: SearchResultDocument + type: object + required: + - packageId + - name + - parentPackages + - version + - status + - files + - slug + - type + - title + properties: + packageId: + description: Package unique string identifier (full alias) + type: string + example: "QS.CloudQSS.CPQ.Q-TMF" + name: + description: Package name + type: string + example: "Quote TMF Service" + parentPackages: + description: Array of parent package names + type: array + items: + type: string + version: + description: Package version name. + type: string + example: "2022.2@5" + status: + description: Package version status + type: string + enum: + - draft + - release + - archived + slug: + description: Published document slug + type: string + pattern: "^[a-z0-9-]" + example: "qitmf-v5-11-json" + type: + description: Type of the specification notation. + type: string + enum: + - openapi-3-1 + - openapi-3-0 + - openapi-2-0 + - asyncapi-2 + - json-schema + - markdown + - unknown + title: + description: Name/title of the document. + type: string + example: "Quote Integration TMForum Service" + labels: + description: List of documents labels. + type: array + items: + type: string + example: ["TMF"] + createdAt: + description: Date of the package version publication + type: string + format: date-time + content: + type: string + description: | + String with search term occurrences in the document. + If document content does not contain search term, then return N first characters. + If document is empty, then this property will be empty. + packages: + type: array + items: + title: SearchResultPackage + description: | + Global search result for packages with kind = package; must be returned when searchLevel = package + * If search term matches the package id/name/description/service name, return the latest published version only. + * If search term matches the version name/label, return that version. + type: object + required: + - packageId + - name + - parentPackages + - createdAt + - version + - revision + - status + properties: + packageId: + description: Package unique string identifier (full alias) + type: string + example: "QS.CloudQSS.CPQ.Q-TMF" + name: + description: Package name + type: string + example: "Quote TMF Service" + description: + description: Package description + type: string + serviceName: + description: Service name that package belongs to. Should be equal to service deployment name in kubernetes. + type: string + parentPackages: + description: Array of parent package names + type: array + items: + type: string + version: + description: Package version name. + type: string + example: "2022.2@5" + latestRevision: + description: | + true if revision is the latest one. + type: boolean + default: false + status: + description: Package version status + type: string + enum: + - draft + - release + - archived + createdAt: + description: Date of the package version publication + type: string + format: date-time + labels: + description: List of package version labels + type: array + items: + type: string + examples: {} + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/agents": + get: + tags: + - Agent + summary: Get list of all Agent instances + description: Get a list of clouds on which Agent instances are installed that have registered themselves in APIHUB Registry + operationId: getAgents + parameters: + - name: onlyActive + in: query + description: Defines whether return all Agent instances or only active instances + required: true + schema: + type: boolean + default: true + responses: + "200": + description: Successful operation + content: + application/json: + schema: + type: array + description: Array of Agent instances + items: + type: object + properties: + agentId: + type: string + description: Id of Agent instance + cloud: + type: string + description: Cloud name where Agent is deployed + namespace: + type: string + description: Cloud namespace where Agent is deployed + url: + type: string + description: The URL to the Agent + lastActive: + type: string + format: date-time + description: the last time when Agent register itself in registry + status: + type: string + description: status of Agent instance + backendVersion: + type: string + description: Backend version of Agent instance + agentVersion: + type: string + description: Version of Agent instance + compatibilityError: + type: object + properties: + severity: + type: string + enum: + - error + - warning + description: Severity of compatibility error + message: + type: string + description: Description of compatibility error + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + post: + x-nc-api-audience: noBWC + tags: + - Agent + summary: Agent registration + description: Agent registration in APIHUB registry + operationId: postAgentRegistration + requestBody: + content: + application/json: + schema: + type: object + required: + - cloud + - namespace + - url + - backendVersion + properties: + cloud: + type: string + description: Cloud name where Agent is deployed + namespace: + type: string + description: Cloud namespace where Agent is deployed + url: + type: string + description: The URL to the Agent + backendVersion: + type: string + description: Backend version of Agent instance + agentVersion: + type: string + description: Version of Agent instance + responses: + "200": + content: + application/json: + schema: + type: object + properties: + version: + type: string + description: Expected version of Agents + description: Success + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + '/api/v2/agents/{agentId}/namespaces/{name}/serviceNames': + get: + summary: Get list of service names + description: | + Synchronously get list of service names in selected namespace without discovery. + operationId: getNamespaceServicesNames + tags: + - Agent + parameters: + - $ref: "#/components/parameters/AgentId" + - $ref: "#/components/parameters/Namespace" + responses: + "200": + description: Successful operation + content: + application/json: + schema: + type: object + required: + - serviceNames + properties: + serviceNames: + description: List of the service names in namespace + type: array + items: + type: object + properties: + id: + type: string + description: service id + name: + type: string + description: service name + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "424": + description: Failed dependency + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + /agents/{agentId}/namespaces/{name}/services/{serviceId}/proxy/{path}: + get: + summary: Proxy endpoint to service + description: Proxy endpoint to service + operationId: getAgentsIdNamespacesIdServicesProxy + security: + - {} + tags: + - Agent + - TryIt + parameters: + - $ref: "#/components/parameters/AgentId" + - $ref: "#/components/parameters/Namespace" + - $ref: "#/components/parameters/ServiceId" + - name: path + in: path + required: true + schema: + type: string + description: String that includes paths and query params + example: api/v2/escaped/te%20xt/text/text123?escaped=te%20xt + - name: X-Apihub-Authorization + in: header + schema: + type: string + description: | + nc-service authorization: + * if system env INSECURE_PROXY is empty or false, then X-Apihub-Authorization is **required** + * if system env INSECURE_PROXY = true, then then X-Apihub-Authorization is optional + responses: + 1XX: + description: Information responses + content: + "*/*": + schema: + description: Schema of any type + 2XX: + description: Successful responses + content: + "*/*": + schema: + description: Schema of any type + 3XX: + description: Redirection messages + content: + "*/*": + schema: + description: Schema of any type + 4XX: + description: Client error responses + content: + "*/*": + schema: + description: Schema of any type + 5XX: + description: Server error responses + content: + "*/*": + schema: + description: Schema of any type + "/api/v2/agents/{agentId}/namespaces": + get: + summary: Get Namespace list + description: Get Namespace list from current Cloud. + operationId: getAgentsIdNamespaces + tags: + - Agent + parameters: + - $ref: "#/components/parameters/AgentId" + responses: + "200": + description: Successful operation + content: + application/json: + schema: + type: object + properties: + namespaces: + description: List of cloud Namespaces + type: array + items: + type: string + description: Namespace name + cloudName: + type: string + description: Cloud name + "424": + description: Failed dependency + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "/api/internal/users": + post: + x-nc-api-audience: noBWC + tags: + - Users + - Admin + summary: Create internal user + description: | + Creation of the internal user, not connected to LDAP or GitLab. + * If the userId already exists, return an error. + * One email may be connected to several users at one time, no unique constraint. + * The password will be stored in an encrypted form locally. + operationId: postUsers + security: [{}] + requestBody: + description: User for creation + content: + application/json: + schema: + type: object + required: + - email + - password + properties: + email: + description: Email address of the user + type: string + format: email + example: "name.surname@qubership.org" + name: + description: Name of the user + type: string + example: "Name Surname" + password: + type: string + description: User password. + format: password + privateWorkspaceId: + type: string + description: PackageId for personal private workspace + required: true + responses: + "201": + description: Created + content: + application/json: + schema: + $ref: "#/components/schemas/User" + examples: {} + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/users": + get: + tags: + - Users + summary: Get users list + description: List of all users with detailed info + operationId: getUsers + security: + - BearerAuth: [] + parameters: + - name: textFilter + in: query + description: Filter by userId (login)/name/email address. + schema: + type: string + - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/page" + responses: + "200": + description: Success + content: + application/json: + schema: + type: object + properties: + users: + description: List of all available users + type: array + items: + $ref: "#/components/schemas/User" + examples: {} + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/users/{userId}": + get: + tags: + - Users + summary: Get user by id + description: User's detailed info + operationId: getUser + parameters: + - name: userId + description: Login of the user + in: path + required: true + schema: + type: string + example: user1221 + security: + - BearerAuth: [ ] + responses: + "200": + description: Success + content: + application/json: + schema: + $ref: "#/components/schemas/User" + examples: { } + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: { } + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: { } + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/users/{userId}/profile/avatar": + get: + x-nc-api-audience: noBWC + tags: + - Users + - Admin + summary: Get the user avatar + description: | + Get the user avatar. + API returns the data of photo file in a png format. + operationId: getUsersIdProfileAvatar + security: [{}] + parameters: + - name: userId + description: Login of the user + in: path + required: true + schema: + type: string + example: user1221 + responses: + "200": + description: Success + content: + image/png: + schema: + type: string + format: binary + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/versions/{version}/{apiType}/groups/{groupName}/ghosts": + get: + tags: + - Versions + summary: Get list of ghost operations for operation group + description: | + Operation is considered a ghost operation if it was included in this group for previous version revision and was deleted in the current version revision + security: + - BearerAuth: [] + - api-key: [] + parameters: + - $ref: "#/components/parameters/apiType" + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + - $ref: "#/components/parameters/groupName" + - $ref: "#/components/parameters/apiAudience" + - name: textFilter + in: query + description: Filter by title/path/method. + schema: + type: string + - name: documentSlug + in: query + description: Filter by document + schema: + type: string + example: "billing-rating-catalog-integration-service1-json" + - name: tag + in: query + description: | + Name of the tag for filtering/grouping. + A full match is required. To get the list of available tags use GET /tags API. + schema: + type: string + example: RestControllerV5 + - name: emptyTag + in: query + description: | + Flag, filtering the operations without tags at all. + + In response will be returned the list of operations, on what the tag is not filled in. + + This attribute has a higher priority than the **tag**. In case, then **emptyTag: true**, it will override the **tag** filter. + schema: + type: boolean + default: false + - name: kind + description: | + Operation kind. + * bwc - API with backward compatibility support (a.k.a. public). + * no-bwc - API without backward compatibility support (a.k.a. internal). + * experimental - APIs for feature testing. Usage is not recommended. + in: query + schema: + type: string + enum: + - all + - bwc + - no-bwc + - experimental + default: all + - name: deprecated + description: Filter operations by 'deprecated' status. + in: query + schema: + type: string + enum: + - "all" + - "true" + - "false" + default: all + - name: refPackageId + description: Filter by package id of ref package, shall be used in case of dashboard. + in: query + schema: + type: string + - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/page" + responses: + "200": + description: Success + content: + application/json: + schema: + description: List of group ghost operations. + type: object + properties: + ghostOperations: + type: array + description: | + List of grouped operations that were present in the previous revision and are deleted in the current revision. + items: + allOf: + - oneOf: + - $ref: "#/components/schemas/RestOperation" + - $ref: "#/components/schemas/GraphQLOperation" + - $ref: "#/components/schemas/ProtobufOperation" + - type: object + properties: + packageRef: + description: | + Parent package and version link. + Created by the concatenation of the packageId and version name with At sign. + type: string + example: "QS.CloudQSS.CPQ.Q-TMF@2023.2" + packages: + description: | + A mapped list of the packageId and version name concatenation with At sign to the package objects. + type: object + additionalProperties: + allOf: + - $ref: "#/components/schemas/ReferencedPackage" + - type: object + example: + QS.CloudQSS.CPQ.Q-TMF@2023.2: + refId: QS.CloudQSS.CPQ.Q-TMF + kind: package + name: Quote Management TMF648 + version: "2022.2@5" + status: release + parentPackages: ["qubership", "Qubership JSS", "Sample Management"] + deletedAt: "2023-05-30T17:17:11.755146Z" + deletedBy: "user1221" + notLatestRevision: true + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/versions/{version}/{apiType}/operations": + get: + tags: + - Operations + summary: Get list of operations + description: | + Full list of operations without grouping by parent specification document. + The result list depends on the API type. + operationId: getPackagesIdVersionsIdApiTypeOperations + security: + - BearerAuth: [] + - api-key: [] + parameters: + - $ref: "#/components/parameters/apiType" + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + - $ref: "#/components/parameters/apiAudience" + - name: skipRefs + in: query + description: | + If false and package has references, then package references (including references to the deleted package versions) shall be resolved. + schema: + type: boolean + default: false + - name: textFilter + in: query + description: "Filter by title/path/method. Custom tag format in search is key: value. Search works as a complete match" + schema: + type: string + - name: documentSlug + in: query + description: Filter by document + schema: + type: string + example: "billing-rating-catalog-integration-service1-json" + - name: tag + in: query + description: | + Name of the tag for filtering/grouping. + A full match is required. To get the list of available tags use GET /tags API. + schema: + type: string + example: RestControllerV5 + - name: emptyTag + in: query + description: | + Flag, filtering the operations without tags at all. + + In response will be returned the list of operations, on what the tag is not filled in. + + This attribute has a higher priority than the **tag**. In case, then **emptyTag: true**, it will override the **tag** filter. + schema: + type: boolean + default: false + - name: group + in: query + description: | + Name of the group for filtering.\ + Either "group" or "emptyGroup" (= true) can be sent in the request, if both of them are specified then 400 will be returned in the response. + schema: + type: string + example: v1 + - name: emptyGroup + in: query + description: | + Flag for filtering operations without a group.\ + Either "group" or "emptyGroup" (= true) can be sent in the request, if both of them are specified then 400 will be returned in the response. + schema: + type: boolean + default: false + - name: kind + description: | + Operation kind. + * bwc - API with backward compatibility support (a.k.a. public). + * no-bwc - API without backward compatibility support (a.k.a. internal). + * experimental - APIs for feature testing. Usage is not recommended. + in: query + schema: + type: string + enum: + - all + - bwc + - no-bwc + - experimental + default: all + - name: deprecated + description: Filter operations by 'deprecated' status. + in: query + schema: + type: string + enum: + - "all" + - "true" + - "false" + default: all + - name: includeData + in: query + description: Include the operation's content data. + schema: + type: boolean + default: false + - name: ids + in: query + description: List of the operationId to filter. + schema: + type: array + items: + type: string + example: ["get-quoteManagement-v5-quote", "post-quoteManagement-v5-quote"] + - name: hashList + in: query + description: List of the hash to filter. + schema: + type: array + items: + type: string + example: ["sdfsdfsf242", "fhjfjfhjfhj3212"] + - name: refPackageId + description: Filter by package id of ref package, shall be used in case of dashboard. + in: query + schema: + type: string + - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/page" + responses: + "200": + description: Success + content: + application/json: + schema: + description: List of operations. + type: object + properties: + operations: + type: array + items: + allOf: + - oneOf: + - $ref: "#/components/schemas/RestOperation" + - $ref: "#/components/schemas/GraphQLOperation" + - $ref: "#/components/schemas/ProtobufOperation" + - type: object + properties: + data: + description: | + Content of the operation as a JSON object. + Required, if includeData: true. + type: object + customTags: + description: | + Custom tags. + type: object + packageRef: + description: | + Parent package and version link. + Created by the concatenation of the packageId and version name with At sign. + type: string + example: "QS.CloudQSS.CPQ.Q-TMF@2023.2" + packages: + description: | + A mapped list of the packageId and version name concatenation with At sign to the package objects. + type: object + additionalProperties: + allOf: + - $ref: "#/components/schemas/ReferencedPackage" + - type: object + example: + QS.CloudQSS.CPQ.Q-TMF@2023.2: + refId: QS.CloudQSS.CPQ.Q-TMF + kind: package + name: Quote Management TMF648 + version: "2022.2@5" + status: release + parentPackages: ["qubership", "Qubership JSS", "Sample Management"] + deletedAt: "2023-05-30T17:17:11.755146Z" + deletedBy: "user1221" + notLatestRevision: true + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/versions/{version}/deprecated/summary": + get: + tags: + - Versions + summary: Get deprecated operations summary + description: | + Get summary of deprecated operations in the version + operationId: getPackageIdVersionDeprecatedSummaryV3 + security: + - BearerAuth: [ ] + parameters: + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + responses: + "200": + description: Success + content: + application/json: + schema: + oneOf: + - type: object + title: package + description: List of changes data. + required: + - operationTypes + properties: + operationTypes: + type: array + items: + type: object + properties: + apiType: + $ref: "#/components/schemas/ApiType" + deprecatedCount: + description: Total number of deprecated operations in the version + type: string + tags: + type: array + items: + type: string + example: [ "tag1", "tag2" ] + - type: object + title: dashboard + required: + - refs + - packages + properties: + refs: + description: | + Refs to packages, which contains deprecated operations/items + type: array + items: + type: object + properties: + packageRef: + description: | + Parent package and version link. + Created by the concatenation of the packageId and version name with At sign. + type: string + example: "QS.CloudQSS.CPQ.Q-TMF@2023.2" + operationTypes: + type: array + items: + type: object + properties: + apiType: + $ref: "#/components/schemas/ApiType" + deprecatedCount: + description: Total number of deprecated operations in the version + type: string + tags: + type: array + items: + type: string + example: [ "tag1", "tag2" ] + packages: + description: | + A mapped list of the packageId and version name concatenation with At sign to the package objects. + type: object + additionalProperties: + allOf: + - $ref: "#/components/schemas/ReferencedPackage" + - type: object + example: + QS.CloudQSS.CPQ.Q-TMF@2023.2: + refId: QS.CloudQSS.CPQ.Q-TMF + kind: package + name: Quote Management TMF648 + version: "2022.2@5" + status: release + parentPackages: ["qubership", "Qubership JSS", "Sample Management"] + deletedAt: "2023-05-30T17:17:11.755146Z" + deletedBy: "user1221" + notLatestRevision: true + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/versions/{version}/{apiType}/deprecated": + get: + tags: + - Versions + summary: Get list of deprecated operations + description: | + List of deprecated operations in the current version + operationId: getPackagesIdVersionsIdApiTypeDeprecations + security: + - BearerAuth: [ ] + - api-key: [ ] + parameters: + - $ref: "#/components/parameters/apiType" + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/apiAudience" + - name: includeDeprecatedItems + in: query + description: Include deprecated items inside operation. + schema: + type: boolean + default: false + - name: ids + in: query + description: List of the operationId to filter. + schema: + type: array + items: + type: string + example: [ "get-quoteManagement-v5-quote", "post-quoteManagement-v5-quote" ] + - name: version + in: path + description: | + Package version. + The mask @ may be used for search in a specific revision. + required: true + schema: + type: string + example: "2022.3@3" + - name: refPackageId + description: Filter by package id of ref package, shall be used in case of dashboard. + in: query + schema: + type: string + - name: textFilter + in: query + description: Filter operation by operation title/path/method. + schema: + type: string + - name: apiKind + description: Filter by API kind + in: query + schema: + type: string + enum: + - bwc + - no-bwc + - experimental + - name: documentSlug + in: query + description: Document unique string identifier + schema: + type: string + pattern: "^[a-z0-9-]" + example: "qitmf-v5-11-json" + - name: tag + in: query + description: | + A full match is required.\ + Multiple tags separated by comma can be specified. + schema: + type: string + example: "tag1, tag2" + - name: emptyTag + in: query + description: | + Flag, filtering the operations without tags at all. + In response will be returned the list of operations, on what the tag is not filled in. + This attribute has a higher priority than the **tag**. In case, then **emptyTag: true**, it will override the **tag** filter. + schema: + type: boolean + default: false + - name: group + in: query + description: | + Name of the group for filtering.\ + Either "group" or "emptyGroup" (= true) can be sent in the request, if both of them are specified then 400 will be returned in the response. + schema: + type: string + example: v1 + - name: emptyGroup + in: query + description: | + Flag for filtering operations without a group.\ + Either "group" or "emptyGroup" (= true) can be sent in the request, if both of them are specified then 400 will be returned in the response. + schema: + type: boolean + default: false + - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/page" + responses: + "200": + description: Success + content: + application/json: + schema: + type: object + properties: + operations: + description: | + List of discrepancies data of operations in a published version. + The resulted list depends on the API type. + type: array + items: + allOf: + - type: object + properties: + packageRef: + description: > + Parent package and version link. Created by the concatenation of the packageId and version name with At sign. + type: string + example: QS.CloudQSS.CPQ.Q-TMF@2023.2 + operationId: + description: Operation generated unique identifier (slug). + type: string + example: get-quoteManagement-v5-quote + title: + description: Operation summary/title. + type: string + apiKind: + description: Operation API kind + type: string + enum: + - bwc + - no-bwc + - experimental + deprecated: + description: True if the operation itself is deprecated. + type: boolean + deprecatedInPreviousVersions: + description: List of previous release versions where operation was also deprecated + type: array + items: + type: string + example: [ "2022.2", "2022,1", "2021.4" ] + deprecatedCount: + type: string + description: number of deprecated items in the operation + deprecatedInfo: + description: | + Additional information about deprecated operation: + * for REST API it is value of 'x-deprecated-meta' extension (value of extension must be string), which is defined for deprecated operation. + * for GraphQL API it is value of 'reason' argument of @deprecated directive, which is defined for deprecated operation. + type: string + deprecatedItems: + allOf: + - $ref: "#/components/schemas/DeprecatedItems" + - type: object + description: List of deprecated items in the operation. deprecatedItems is required only if includeDeprecatedItems = true + externalMetadata: + description: External operation metadata. + type: object + - oneOf: + - $ref: "#/components/schemas/RestOperationMeta" + - $ref: "#/components/schemas/GraphQLOperationMeta" + - $ref: "#/components/schemas/ProtobufOperationMeta" + packages: + description: > + A mapped list of the packageId and version name + concatenation with At sign to the package objects. + type: object + additionalProperties: + allOf: + - $ref: "#/components/schemas/ReferencedPackage" + - type: object + example: + QS.CloudQSS.CPQ.Q-TMF@2023.2: + refId: QS.CloudQSS.CPQ.Q-TMF + kind: package + name: Quote Management TMF648 + version: "2022.2@5" + status: release + parentPackages: ["qubership", "Qubership JSS", "Sample Management"] + deletedAt: "2023-05-30T17:17:11.755146Z" + deletedBy: "user1221" + notLatestRevision: true + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/versions/{version}/{apiType}/operations/{operationId}/deprecatedItems": + get: + tags: + - Operations + summary: Deprecated items of single operation + description: | + Get list of all deprecated items inside of the single operation.\ + Deprecated item is entity that is deprecated inside of API operation. For example for REST API it can be parameter or schema, for GraphQL API - field or enum value. + operationId: getPackagesIdVersionsApiTypeOperationsIddeprecatedItems + security: + - BearerAuth: [ ] + - api-key: [ ] + parameters: + - $ref: "#/components/parameters/apiType" + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + - $ref: "#/components/parameters/operationId" + responses: + "200": + description: Success + content: + application/json: + schema: + type: object + properties: + deprecatedItems: + $ref: "#/components/schemas/DeprecatedItems" + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/versions/{version}/{apiType}/operations/{operationId}/models/{modelName}/usages": + get: + tags: + - Operations + summary: List of operations with the same model + description: | + Get list of operations that have the same model + operationId: getPackagesIdVersionsApiTypeOperationsIdModelsModelName + security: + - BearerAuth: [ ] + - api-key: [ ] + parameters: + - $ref: "#/components/parameters/apiType" + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + - $ref: "#/components/parameters/operationId" + - $ref: "#/components/parameters/modelName" + responses: + "200": + description: Success + content: + application/json: + schema: + type: object + properties: + modelUsages: + description: List of operationIds and model names + type: array + items: + type: object + properties: + operationId: + description: Operation unique identifier (slug). Not the same as operationId tag from the OpenAPI file. + type: string + example: get-quoteManagement-v5-quote + modelNames: + description: List of models with the same hash. + type: array + items: + type: string + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/versions/{version}/{apiType}/export/operations": + get: + tags: + - Operations + summary: Export operations to xlsx file + description: | + Export operations of specific API type. + operationId: getPackagesIdVersionsIdApiTypeOperationsExport + security: + - BearerAuth: [ ] + - api-key: [ ] + parameters: + - $ref: "#/components/parameters/apiType" + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + - $ref: "#/components/parameters/apiAudience" + - name: textFilter + in: query + description: Filter by title/path/method. + schema: + type: string + - name: tag + in: query + description: | + Name of the tag for filtering/grouping. + A full match is required. To get the list of available tags use GET /tags API. + schema: + type: string + example: RestControllerV5 + - name: emptyTag + in: query + description: | + Flag, filtering the operations without tags at all. + In response will be returned the list of operations, on what the tag is not filled in. + This attribute has a higher priority than the **tag**. In case, then **emptyTag: true**, it will override the **tag** filter. + schema: + type: boolean + default: false + - name: kind + description: | + Operation kind. + * bwc - API with backward compatibility support (a.k.a. public). + * no-bwc - API without backward compatibility support (a.k.a. internal). + * experimental - APIs for feature testing. Usage is not recommended. + in: query + schema: + type: string + enum: + - all + - bwc + - no-bwc + - experimental + default: all + - name: group + in: query + description: | + Name of the group for filtering.\ + The filter is applied only to the groups of current version. Groups from previous version will be ignored.\ + Either "group" or "emptyGroup" (= true) can be sent in the request, if both of them are specified then 400 will be returned in the response. + schema: + type: string + example: v1 + - name: emptyGroup + in: query + description: | + Flag for filtering operations without a group.\ + The filter is applied only to the groups of current version. Groups from previous version will be ignored.\ + Either "group" or "emptyGroup" (= true) can be sent in the request, if both of them are specified then 400 will be returned in the response. + schema: + type: boolean + default: false + - name: refPackageId + description: Filter by package id of ref package, shall be used in case of dashboard. + in: query + schema: + type: string + responses: + "200": + description: Success + content: + application/xlsx: + schema: + type: string + format: binary + description: xlsx file to download + headers: + Content-Disposition: + schema: + type: string + description: xlsx file name + example: attachment; filename="APIOperations_package.id_version.xlsx" + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: { } + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/versions/{version}/{apiType}/export/operations/deprecated": + get: + tags: + - Operations + summary: Export deprecated operations to xlsx file + description: | + Export operations of specific API type. + operationId: getPackagesIdVersionsIdApiTypeOperationsExport + security: + - BearerAuth: [ ] + - api-key: [ ] + parameters: + - $ref: "#/components/parameters/apiType" + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + - $ref: "#/components/parameters/apiAudience" + - name: textFilter + in: query + description: Filter by title/path/method. + schema: + type: string + - name: tag + in: query + description: | + A full match is required.\ + Multiple tags separated by comma can be specified. + schema: + type: string + example: "tag1, tag2" + - name: kind + description: | + Operation kind. + * bwc - API with backward compatibility support (a.k.a. public). + * no-bwc - API without backward compatibility support (a.k.a. internal). + * experimental - APIs for feature testing. Usage is not recommended. + in: query + schema: + type: string + enum: + - all + - bwc + - no-bwc + - experimental + default: all + - name: emptyTag + in: query + description: | + Flag, filtering the operations without tags at all. + In response will be returned the list of operations, on what the tag is not filled in. + This attribute has a higher priority than the **tag**. In case, then **emptyTag: true**, it will override the **tag** filter. + schema: + type: boolean + default: false + - name: group + in: query + description: | + Name of the group for filtering.\ + Either "group" or "emptyGroup" (= true) can be sent in the request, if both of them are specified then 400 will be returned in the response. + schema: + type: string + example: v1 + - name: emptyGroup + in: query + description: | + Flag for filtering operations without a group.\ + Either "group" or "emptyGroup" (= true) can be sent in the request, if both of them are specified then 400 will be returned in the response. + schema: + type: boolean + default: false + - name: refPackageId + description: Filter by package id of ref package, shall be used in case of dashboard. + in: query + schema: + type: string + responses: + "200": + description: Success + content: + application/xlsx: + schema: + type: string + format: binary + description: xlsx file to download + headers: + Content-Disposition: + schema: + type: string + description: xlsx file name + example: attachment; filename="APIOperations_package.id_version.xlsx" + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: { } + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/versions/{version}/{apiType}/operations/{operationId}": + get: + tags: + - Operations + summary: Get operation details + description: | + Operation's parameters and data. + The result depends on the API type. + operationId: getPackagesIdVersionsIdApiTypeOperationsId + security: + - BearerAuth: [] + - api-key: [] + parameters: + - $ref: "#/components/parameters/apiType" + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + - $ref: "#/components/parameters/operationId" + responses: + "200": + description: Success + content: + application/json: + schema: + allOf: + - oneOf: + - $ref: "#/components/schemas/RestOperation" + - $ref: "#/components/schemas/GraphQLOperation" + - $ref: "#/components/schemas/ProtobufOperation" + - type: object + required: + - data + properties: + data: + description: Content of the operation as a JSON object. + type: object + customTags: + description: | + Custom tags. + type: object + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/versions/{version}/{apiType}/operations/{operationId}/changes": + get: + tags: + - Changes + - Operations + summary: Single operation change log + description: | + Get changes of one operation between current and previous published package version. + The result depends on the API type. + operationId: getPackagesIdVersionsApiTypeOperationsIdChanges + security: + - BearerAuth: [] + - api-key: [] + parameters: + - $ref: "#/components/parameters/apiType" + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + - $ref: "#/components/parameters/operationId" + - $ref: "#/components/parameters/severity" + - in: query + name: previousVersion + schema: + type: string + description: If both previousVersion and previousVersionPackageId are not specified, then previous **release** version will be used. + example: "2022.2" + - in: query + name: previousVersionPackageId + schema: + type: string + description: If both previousVersion and previousVersionPackageId are not specified, then previous **release** version will be used. + example: QS.RUNENV.K8S-SERVER.CJM-QSS-DEV-2.Q-TMF + responses: + "200": + description: Success + content: + application/json: + schema: + type: object + properties: + changes: + description: List of discrepancies data in the operation. + type: array + items: + $ref: "#/components/schemas/SingleOperationChange" + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/versions/{version}/{apiType}/tags": + get: + tags: + - Operations + summary: Get list of operations tags + description: | + Get list of operations tags in one published version. + The result list depends on the API type. + operationId: getPackagesIdVersionsIdApiTypeTags + security: + - BearerAuth: [] + - api-key: [] + parameters: + - $ref: "#/components/parameters/apiType" + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + - $ref: "#/components/parameters/apiAudience" + - name: textFilter + in: query + description: | + Filter by tag. + Partial name is applicable. + schema: + type: string + - name: skipRefs + in: query + description: | + If false and package has references, then package references (including references to the deleted package versions) shall be resolved. + schema: + type: boolean + default: false + - name: kind + description: | + Operation kind. + * bwc - API with backward compatibility support (a.k.a. public). + * no-bwc - API without backward compatibility support (a.k.a. internal). + * experimental - APIs for feature testing. Usage is not recommended. + in: query + schema: + type: string + - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/page" + - name: group + in: query + description: | + Name of the group for filtering.\ + Either "group" or "emptyGroup" (= true) can be sent in the request, if both of them are specified then 400 will be returned in the response. + schema: + type: string + example: v1 + - name: emptyGroup + in: query + description: | + Flag for filtering operations without a group.\ + Either "group" or "emptyGroup" (= true) can be sent in the request, if both of them are specified then 400 will be returned in the response. + schema: + type: boolean + default: false + responses: + "200": + description: Success + content: + application/json: + schema: + description: List of tags. + type: object + properties: + tags: + type: array + items: + type: string + example: ["TMF"] + examples: {} + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/versions/{version}/{apiType}/groups": + post: + deprecated: true + x-deprecation-reason: New version of API is created - POST /api/v3/packages/{packageId}/versions/{version}/{apiType}/groups + tags: + - Versions + - Operation groups + summary: Create operation group + description: | + Create operation group.\ + It is needed to be able to distinguish between groups created manually and groups created automatically via restPrefixGrouping. + security: + - BearerAuth: [] + - api-key: [] + parameters: + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + - $ref: "#/components/parameters/apiType" + requestBody: + description: New version group + content: + application/json: + schema: + type: object + required: + - groupName + - apiType + properties: + groupName: + description: Name of new group. Name must be unique within one API type. + type: string + description: + description: Description of created group. + type: string + responses: + "201": + description: Created + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v3/packages/{packageId}/versions/{version}/{apiType}/groups": + post: + tags: + - Versions + - Operation groups + summary: Create manual operation group + description: | + Create manual operation group.\ + Manual groups can be created for both packages and dashboards. One group can contain operations of one API type only. + operationId: PostPackageIdVersionApiTypeGroupsV3 + security: + - BearerAuth: [] + - api-key: [] + parameters: + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + - $ref: "#/components/parameters/apiType" + requestBody: + description: New version group + content: + multipart/form-data: + schema: + type: object + required: + - groupName + properties: + groupName: + description: Name of new group. Name must be unique within one API type. + type: string + description: + description: Description of created group. + type: string + template: + description: | + OpenAPI specification template that will be used to export operations from an operation group with buildType = mergedSpecification.\ + Template can only be specified for the group with apiType = rest.\ + Both YAML and JSON file formats are supported. + type: string + format: binary + responses: + "201": + description: Created + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/versions/{version}/{apiType}/groups/{groupName}": + get: + tags: + - Versions + - Operation groups + summary: Get list of operations for operation group + description: | + Get list of operations from operation group. The result list depends on the API type. + security: + - BearerAuth: [] + - api-key: [] + parameters: + - $ref: "#/components/parameters/apiType" + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + - $ref: "#/components/parameters/groupName" + - $ref: "#/components/parameters/apiAudience" + - name: textFilter + in: query + description: Filter by title/path/method. + schema: + type: string + - name: documentSlug + in: query + description: Filter by document + schema: + type: string + example: "billing-rating-catalog-integration-service1-json" + - name: tag + in: query + description: | + Name of the tag for filtering/grouping. + A full match is required. To get the list of available tags use GET /tags API. + schema: + type: string + example: RestControllerV5 + - name: emptyTag + in: query + description: | + Flag, filtering the operations without tags at all. + + In response will be returned the list of operations, on what the tag is not filled in. + + This attribute has a higher priority than the **tag**. In case, then **emptyTag: true**, it will override the **tag** filter. + schema: + type: boolean + default: false + - name: onlyAddable + in: query + description: | + Flag for filtering operations that are not included in this group + + true - will return all operations from version except operations that are already included in this group + + false - will return all operations that are included in this group + schema: + type: boolean + default: false + - name: kind + description: | + Operation kind. + * bwc - API with backward compatibility support (a.k.a. public). + * no-bwc - API without backward compatibility support (a.k.a. internal). + * experimental - APIs for feature testing. Usage is not recommended. + in: query + schema: + type: string + enum: + - all + - bwc + - no-bwc + - experimental + default: all + - name: deprecated + description: Filter operations by 'deprecated' status. + in: query + schema: + type: string + enum: + - "all" + - "true" + - "false" + default: all + - name: refPackageId + description: Filter by package id of ref package, shall be used in case of dashboard. + in: query + schema: + type: string + - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/page" + responses: + "200": + description: Success + content: + application/json: + schema: + description: List of group operations. + type: object + properties: + operations: + type: array + description: | + List of grouped operations that were present in the previous revision and are deleted in the current revision. + items: + allOf: + - oneOf: + - $ref: "#/components/schemas/RestOperation" + - $ref: "#/components/schemas/GraphQLOperation" + - $ref: "#/components/schemas/ProtobufOperation" + - type: object + properties: + packageRef: + description: | + Parent package and version link. + Created by the concatenation of the packageId and version name with At sign. + type: string + example: "QS.CloudQSS.CPQ.Q-TMF@2023.2" + packages: + description: | + A mapped list of the packageId and version name concatenation with At sign to the package objects. + type: object + additionalProperties: + allOf: + - $ref: "#/components/schemas/PackageVersionRef" + - type: object + properties: + versionRevision: + description: Number of the revision. + type: integer + format: int32 + example: 3 + parentPackages: + description: List of parent package names. + type: array + items: + type: string + example: ["qubership", "Qubership JSS", "Sample Management"] + example: + QS.CloudQSS.CPQ.Q-TMF@2023.2: + parentPackages: ["qubership", "Qubership JSS", "Sample Management"] + refId: "QS.CloudQSS.CPQ.Q-TMF" + kind: "package" + name: "Quote Management TMF648" + version: "2023.2" + status: "release" + deletedAt: "2023-05-30T17:17:11.755146Z" + deletedBy: "user1221" + versionRevision: 3 + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + put: + deprecated: true + x-deprecation-reason: New version of API is created - PUT /api/v3/packages/{packageId}/versions/{version}/{apiType}/groups/{groupName} + tags: + - Versions + - Operation groups + summary: Update operation group + description: | + Update operation group.\ + It is prohibited to update the prefix groups, only manual groups can be updated. + security: + - BearerAuth: [] + - api-key: [] + parameters: + - $ref: "#/components/parameters/apiType" + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + - $ref: "#/components/parameters/groupName" + requestBody: + description: Version group update parameters + content: + application/json: + schema: + type: object + required: + - groupName + properties: + groupName: + description: Name of the group. + type: string + description: + description: Description of version group. + type: string + operations: + type: array + description: One group can contain no more than 200 operations. + items: + type: object + required: + - operationId + properties: + packageId: + description: | + ID of package.\ + PackageId and version shall be specified in case of dashboard to identify source package. + If packageId and version are not specified, this will mean that the source of the operation is the current package version. + type: string + example: "QS.CloudQSS.CPQ.Q-TMF" + version: + description: Version and revision of the package. + type: string + example: "2023.3@3" + operationId: + description: Operation unique identifier. + type: string + example: get-quoteManagement-v5-quote + responses: + "200": + description: Successful + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + patch: + deprecated: true + x-deprecation-reason: New version of API is created - PATCH /api/v3/packages/{packageId}/versions/{version}/{apiType}/groups/{groupName} + tags: + - Versions + - Operation groups + summary: Update parameters of operation group + description: | + Update parameters of operation group ( in path).\ + It is prohibited to update the prefix groups, only manual groups can be updated. + security: + - BearerAuth: [] + - api-key: [] + parameters: + - name: groupName + description: Old groupName + in: path + required: true + schema: + type: string + - $ref: "#/components/parameters/apiType" + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + requestBody: + description: Version group update parameters + content: + application/json: + schema: + type: object + properties: + groupName: + description: Name of the group. + type: string + description: + description: Description of version group. + type: string + responses: + "204": + description: No content + content: {} + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + delete: + tags: + - Versions + - Operation groups + summary: Delete operation group + description: | + Delete version group. + operationId: getPackagesIdVersionsIdApiTypeGroups + security: + - BearerAuth: [] + - api-key: [] + parameters: + - $ref: "#/components/parameters/apiType" + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + - $ref: "#/components/parameters/groupName" + responses: + "204": + description: No content + content: {} + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v3/packages/{packageId}/versions/{version}/{apiType}/groups/{groupName}": + patch: + tags: + - Versions + - Operation groups + summary: Update parameters of operation group + description: | + Update parameters of operations group. + operationId: patchPackageIdVersionApiTypeGroupName + security: + - BearerAuth: [] + - api-key: [] + parameters: + - name: groupName + description: Old groupName + in: path + required: true + schema: + type: string + - $ref: "#/components/parameters/apiType" + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + requestBody: + description: Version group update parameters + content: + multipart/form-data: + schema: + type: object + properties: + groupName: + description: | + Name of new group. Name must be unique within one API type.\ + Group name can be changed only for manual group. + type: string + description: + description: Description of the operation group (can be update for the manual or rest path prefix group) + type: string + template: + description: | + OpenAPI specification template that will be used to export operations from an operation group with buildType = mergedSpecification.\ + Template can be specified for the manual or rest path prefix group, but only with apiType = rest.\ + Both YAML and JSON file formats are supported. + type: string + format: binary + operations: + type: array + description: Operations in the group. One group can contain no more than 200 operations. + items: + type: object + required: + - operationId + properties: + packageId: + description: | + ID of package.\ + PackageId and version shall be specified in case of dashboard to identify source package. + If packageId and version are not specified, this will mean that the source of the operation is the current package version. + type: string + example: "QS.CloudQSS.CPQ.Q-TMF" + version: + description: Version and revision of the package. + type: string + example: "2023.3@3" + operationId: + description: Operation unique identifier. + type: string + example: get-quoteManagement-v5-quote + responses: + "204": + description: No content + content: {} + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + put: + tags: + - Versions + - Operation groups + deprecated: true + x-deprecation-reason: operations object was added in the requestBody of PATCH /api/v3/packages/{packageId}/versions/{version}/{apiType}/groups/{groupName}. Therefore, current operation is no longer required. + summary: Update list of operations in the manual group + operationId: putPackageIdVersionApiTypeGroupName + description: | + Update operations in the group. Only operations in the manual group can be changed. + It is prohibited to update operation in the rest path prefix group, only manual groups can be updated. + security: + - BearerAuth: [] + - api-key: [] + parameters: + - name: apiType + description: Type of the API. + in: path + required: true + schema: + type: string + enum: + - rest + - kafka + - grpc + - graphql + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + - $ref: "#/components/parameters/groupName" + requestBody: + description: Version group update parameters + content: + multipart/form-data: + schema: + type: object + required: + - groupName + properties: + groupName: + description: Name of the group. + type: string + description: + description: Description of version group. + type: string + template: + description: | + OpenAPI specification template that will be used to export operations from an operation group with buildType = mergedSpecification.\ + Template can only be specified for the group with apiType = rest.\ + Both YAML and JSON file formats are supported. + type: string + format: binary + operations: + type: array + description: Operations in the group. One group can contain no more than 200 operations. + items: + type: object + required: + - operationId + properties: + packageId: + description: | + ID of package.\ + PackageId and version shall be specified in case of dashboard to identify source package. + If packageId and version are not specified, this will mean that the source of the operation is the current package version. + type: string + example: "QS.CloudQSS.CPQ.Q-TMF" + version: + description: Version and revision of the package. + type: string + example: "2023.3@3" + operationId: + description: Operation unique identifier. + type: string + example: get-quoteManagement-v5-quote + responses: + "200": + description: Successful + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v1/packages/{packageId}/versions/{version}/{apiType}/groups/{groupName}/template": + get: + tags: + - Versions + - Operation groups + summary: Get export template of operation group + description: | + Export OpenAPI specification template from an operation group (manual or rest path prefix). + This feature is supported only for apiType = rest. + operationId: getPackageIdVersionApiTypeGroupNameTemplate + security: + - BearerAuth: [] + - api-key: [] + parameters: + - $ref: "#/components/parameters/apiType" + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + - $ref: "#/components/parameters/groupName" + responses: + "200": + description: Success + content: + application/octet-stream: + schema: + type: string + format: binary + description: template + headers: + Content-Disposition: + schema: + type: string + description: file name + example: attachment; filename=".xlsx" + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + /api/v2/packages/{packageId}/calculateGroups: + get: + tags: + - Versions + summary: Calculate groups by restGroupingPrefix + description: | + Calculate groups by transmitted restGroupingPrefix. + This is in-flight calculation, i.e. calculated groups will not be saved. + operationId: postPackagesIdRecalculateGroups + security: + - BearerAuth: [] + - api-key: [] + parameters: + - $ref: "#/components/parameters/packageId" + - name: groupingPrefix + in: query + description: Regular expression used as criteria for grouping operations. + schema: + type: string + responses: + "200": + description: Success + content: + application/json: + schema: + description: List of groups. + type: object + properties: + groups: + description: Operation groups calculated by groupingPrefix. + type: array + items: + type: string + example: ["v1", "v2"] + examples: {} + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + /api/v2/packages/{packageId}/recalculateGroups: + post: + tags: + - Versions + summary: Recalculate package version groups + description: | + Recalculate package groups by specified restGroupingPrefix on the package + operationId: postPackagesIdRecalculateGroups + security: + - BearerAuth: [] + - api-key: [] + parameters: + - $ref: "#/components/parameters/packageId" + responses: + "200": + description: Success + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/versions/{version}/documents": + parameters: + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + get: + tags: + - Documents + - Versions + summary: Get version documents + description: | + Get list of documents in a version. The result depend on the package.kind: + * For package.kind: package - return the list of version documents. + * For package.kind: dashboard - return the list of all referenced dashboards and their referenced packages in recursion. + The returned list will contain only leaves - referenced packages of the lowest level with their published documents. + operationId: getPackagesIdVersionsIdDocuments + security: + - BearerAuth: [] + - api-key: [] + parameters: + - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/page" + - $ref: "#/components/parameters/apiTypeQueryParam" + - name: skipRefs + in: query + description: | + If false and package has references, then package references (including references to the deleted package versions) shall be resolved. + schema: + type: boolean + default: false + - name: textFilter + in: query + description: Filter by document title. + schema: + type: string + responses: + "200": + description: Success + content: + application/json: + schema: + description: List of documents in a package. + type: object + properties: + documents: + type: array + items: + allOf: + - $ref: "#/components/schemas/PackageVersionFile" + - type: object + properties: + packageRef: + description: | + Parent package and version link. Created by the concatenation of the packageId and version name with At sign. + type: string + example: QS.CloudQSS.CPQ.Q-TMF@2023.2 + packages: + description: | + A mapped list of the packageId and version name concatenation with At sign to the package objects. + type: object + additionalProperties: + allOf: + - $ref: "#/components/schemas/ReferencedPackage" + - type: object + example: + QS.CloudQSS.CPQ.Q-TMF@2023.2: + refId: QS.CloudQSS.CPQ.Q-TMF + kind: package + name: Quote Management TMF648 + version: "2022.2@5" + status: release + parentPackages: ["qubership", "Qubership JSS", "Sample Management"] + deletedAt: "2023-05-30T17:17:11.755146Z" + deletedBy: "user1221" + notLatestRevision: true + examples: {} + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v3/packages/{packageId}/versions/{version}/documents/{slug}": + parameters: + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + - $ref: "#/components/parameters/slug" + get: + tags: + - Documents + summary: Get document details + description: Get the published content object's details by ID. + operationId: getPackagesIdVersionsIdDocumentsSlugV2 + security: + - BearerAuth: [] + - api-key: [] + responses: + "200": + description: Success + content: + application/json: + schema: + allOf: + - $ref: "#/components/schemas/PackageVersionFile" + - type: object + properties: + description: + description: Document description. + type: string + info: + description: Info object from openapi document + type: object + externalDocs: + description: External documentation object from openapi document + type: object + operations: + description: List of the operations in a file without operation's data. + type: array + items: + oneOf: + - $ref: "#/components/schemas/RestOperation" + - $ref: "#/components/schemas/GraphQLOperation" + - $ref: "#/components/schemas/ProtobufOperation" + examples: {} + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/versions/{version}/documents/{slug}": + parameters: + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + - $ref: "#/components/parameters/slug" + get: + deprecated: true + tags: + - Documents + summary: Get document details + description: Get the published content object's details by ID. + operationId: getPackagesIdVersionsIdDocumentsSlug + security: + - BearerAuth: [] + - api-key: [] + responses: + "200": + description: Success + content: + application/json: + schema: + allOf: + - description: Parameters of published file in package version + type: object + title: Package version file + required: + - fileId + - filename + - slug + - type + - format + - title + properties: + fileId: + type: string + description: File name. + example: "qitmf-v5.11.json" + filename: + type: string + description: File name (slug+extension). + example: "qitmf-v5.11.json" + slug: + description: Published file slug + type: string + pattern: "^[a-z0-9-]" + example: "qitmf-v5-11-json" + type: + description: Type of the specification notation. + type: string + enum: + - openapi-3-1 + - openapi-3-0 + - openapi-2-0 + - asyncapi-2 + - json-schema + - markdown + - graphql-schema + - graphapi + - introspection + - unknown + format: + description: Format of the specification document. + type: string + enum: + - json + - yaml + - md + - graphql + - gql + - unknown + title: + description: Name/title of the document. + type: string + example: "Quote Integration TMForum Service" + version: + description: Document version + type: string + example: "1.0.1" + labels: + description: List of file labels. + type: array + items: + type: string + example: ["TMF"] + - type: object + properties: + description: + description: Document description. + type: string + info: + description: Info object from openapi document + type: object + externalDocs: + description: External documentation object from openapi document + type: object + operations: + description: List of the operations in a file without operation's data. + type: array + items: + allOf: + - $ref: "#/components/schemas/Operation" + - type: object + required: + - metadata + - searchScopes + properties: + metadata: + description: Metadata content depend on apiType. + oneOf: + - $ref: "#/components/schemas/RestOperationMeta" + - $ref: "#/components/schemas/GraphQLOperationMeta" + examples: {} + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/versions/{version}/{apiType}/groups/{groupName}/transformation/documents": + parameters: + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + - $ref: "#/components/parameters/apiType" + - name: groupName + in: path + required: true + description: Name of the operation group + schema: + type: string + get: + deprecated: true + x-deprecation-reason: New version of API is created - GET /api/v3/packages/{packageId}/versions/{version}/{apiType}/groups/{groupName}/documents + tags: + - Documents + - Versions + summary: Get version documents for transformation to openAPI + description: | + Get list of documents for integration by buildType - documentGroup + operationId: getPackagesIdVersionsIdTransformationDocuments + security: + - BearerAuth: [] + - api-key: [] + parameters: + - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/page" + responses: + "200": + description: Success + content: + application/json: + schema: + description: List of documents in a package. + type: object + properties: + documents: + type: array + items: + allOf: + - description: Parameters of published file in package version + type: object + title: Package version file + required: + - fileId + - filename + - slug + - type + - format + - title + properties: + fileId: + type: string + description: File name. + example: "qitmf-v5.11.json" + filename: + type: string + description: File name (slug+extension). + example: "qitmf-v5.11.json" + slug: + description: Published file slug + type: string + pattern: "^[a-z0-9-]" + example: "qitmf-v5-11-json" + type: + description: Type of the specification notation. + type: string + enum: + - openapi-3-1 + - openapi-3-0 + - openapi-2-0 + - asyncapi-2 + - json-schema + - markdown + - graphql-schema + - graphapi + - introspection + - unknown + format: + description: Format of the specification document. + type: string + enum: + - json + - yaml + - md + - graphql + - gql + - unknown + title: + description: Name/title of the document. + type: string + example: "Quote Integration TMForum Service" + version: + description: Document version + type: string + example: "1.0.1" + labels: + description: List of file labels. + type: array + items: + type: string + example: ["TMF"] + description: + description: Document description + type: string + includedOperationIds: + description: List of operation IDs included to specified operation group + type: array + items: + type: string + example: [ + "get-quoteManagement-v5-quote", + "post-quoteManagement-v5-quote" + ] + data: + description: Content of the operation as a JSON object + type: object + - type: object + examples: {} + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v3/packages/{packageId}/versions/{version}/{apiType}/groups/{groupName}/documents": + parameters: + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + - $ref: "#/components/parameters/apiType" + - name: groupName + in: path + required: true + description: Name of the operation group + schema: + type: string + get: + tags: + - Documents + - Versions + summary: Get documents of operations from operation group. + description: | + Get list of package version documents of operations from operation group. + operationId: getPackagesIdVersionsIdTransformationDocumentsV3 + security: + - BearerAuth: [] + - api-key: [] + parameters: + - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/page" + responses: + "200": + description: Success + content: + application/json: + schema: + description: List of documents in a package. + type: object + properties: + documents: + type: array + items: + allOf: + - $ref: "#/components/schemas/PackageTransformationFile" + - type: object + examples: {} + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v3/packages/{packageId}/versions/{version}/{apiType}/groups/{groupName}/publish": + parameters: + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + - name: apiType + description: Type of the API. + in: path + required: true + schema: + type: string + enum: + - rest + - name: groupName + in: path + required: true + description: Name of the operation group + schema: + type: string + post: + tags: + - Operation groups + - Publish + summary: Start operation group publication + description: | + Start operation group publish process.\ + In this process all operations from operation group will be published to the selected package version. + operationId: postOperationGroupPublish + security: + - BearerAuth: [] + - api-key: [] + requestBody: + content: + application/json: + schema: + type: object + required: + - packageId + - version + - status + properties: + packageId: + description: Package unique identifier (full alias). + type: string + example: QS.CQSS.CPQ.TMF + version: + description: Version name for publication in package. + type: string + example: "2022.3" + previousVersion: + description: Name of the previous published version in package. + type: string + example: "2022.2" + previousVersionPackageId: + description: Package id of the previous version. The parameter may be empty if the value is equal to the packageId. + type: string + example: "QS.CloudQSS.CPQ.Q-TMF" + status: + $ref: "#/components/schemas/VersionStatusEnum" + versionLabels: + description: List of version labels in package. + type: array + items: + type: string + example: ["part-of:CloudQSS-CPQBE"] + responses: + "202": + description: Publish process started + content: + application/json: + schema: + type: object + properties: + publishId: + type: string + description: Publish process Id + format: uuid + example: 9c8e9045-dd9c-4946-b9e4-e05e3f41c4cc + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v3/packages/{packageId}/versions/{version}/{apiType}/groups/{groupName}/publish/{publishId}/status": + parameters: + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + - name: apiType + description: Type of the API. + in: path + required: true + schema: + type: string + enum: + - rest + - name: groupName + in: path + required: true + description: Name of the operation group + schema: + type: string + - name: publishId + description: Publish Id + in: path + required: true + schema: + type: string + format: uuid + example: 9c8e9045-dd9c-4946-b9e4-e05e3f41c4cc + get: + tags: + - Operation groups + - Publish + summary: Get operation group publication status + description: | + Get operation group publish status. + operationId: getOperationGroupPublishStatus + security: + - BearerAuth: [] + - api-key: [] + responses: + "200": + description: Success + content: + application/json: + schema: + type: object + properties: + status: + description: Publish process status. + type: string + enum: + - running + - error + - complete + - none + message: + description: The message for **error** status. + type: string + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/versions/{version}/references": + parameters: + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + get: + deprecated: true + tags: + - Documents + - Versions + summary: Get version references + description: | + Get list of version referenced packages (lowest level by recursion, not having refs in a published version). + operationId: getPackagesIdVersionsIdReferences + security: + - BearerAuth: [] + - api-key: [] + parameters: + - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/page" + - name: textFilter + in: query + description: Filter by package name. + schema: + type: string + - name: kind + in: query + description: Filter the packages by kind. + schema: + type: string + enum: + - package + - dashboard + example: "package" + - name: showAllDescendants + in: query + description: | + Show all the descendants to the parent workspace or group. + If ```true```, return the list of all child packages/dashboards to the current package (take into account all other filter parameters). + schema: + type: boolean + default: false + responses: + "200": + description: Success + content: + application/json: + schema: + type: object + properties: + references: + description: List of nested packages from reference. + type: array + items: + allOf: + - description: Package version reference + type: object + title: Referenced package version + required: + - refId + - name + - version + - status + properties: + refId: + description: Referenced package Id + type: string + example: "QS.CloudQSS.CPQ.Q-TMF" + kind: + description: Package kind + type: string + enum: + - package + - dashboard + name: + description: Name of the referenced package + type: string + example: "Quote Management TMF648" + version: + description: Referenced package version number. The @ mask is used to return the revision number. + type: string + example: "2022.2@5" + status: + $ref: "#/components/schemas/VersionStatusEnum" + deletedAt: + description: date when package version was deleted package version + type: string + format: date-type + example: "2023-05-30T17:17:11.755146Z" + deletedBy: + description: user who deleted package version + type: string + example: "user1221" + - type: object + properties: + parents: + description: List of all parent packages + type: array + items: + description: Base package object for parents list. + type: object + title: PackageList + properties: + packageId: + description: Package unique string identifier (full alias) + type: string + alias: + type: string + description: Package short alias (abbreviation) + maxLength: 10 + pattern: "^[a-zA-Z0-9-_]" + parentId: + description: Parent package ID + type: string + kind: + description: Package kind + type: string + enum: + - workspace + - group + - package + - dashboard + name: + description: Name of the package + type: string + imageUrl: + type: string + description: Path to the package icon + format: URL + examples: {} + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v3/packages/{packageId}/versions/{version}/references": + parameters: + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + get: + tags: + - Documents + - Versions + summary: Get version references + description: | + Get flat list of all version references + operationId: getPackagesIdVersionsIdReferencesv3 + security: + - BearerAuth: [] + - api-key: [] + responses: + "200": + description: Success + content: + application/json: + schema: + description: List of references. + type: object + properties: + references: + type: array + items: + type: object + properties: + packageRef: + description: | + Referenced package and version link. + Created by the concatenation of the packageId and version name with At sign. + type: string + example: "QS.CloudQSS.CPQ.Q-TMF@2023.2" + parentPackageRef: + description: | + Parent referenced package and version link. + Created by the concatenation of the packageId and version name with At sign. + type: string + example: "QS.CloudQSS.CPQ.Q-TMF@2023.2" + excluded: + description: All excluded refs will be ignored (but will still be visible for package version). + type: boolean + packages: + description: | + A mapped list of the packageId and version name concatenation with At sign to the package objects. + type: object + additionalProperties: + allOf: + - $ref: "#/components/schemas/ReferencedPackage" + - type: object + example: + QS.CloudQSS.CPQ.Q-TMF@2023.2: + refId: QS.CloudQSS.CPQ.Q-TMF + kind: package + name: Quote Management TMF648 + version: "2022.2@5" + status: release + parentPackages: ["qubership", "Qubership JSS", "Sample Management"] + deletedAt: "2023-05-30T17:17:11.755146Z" + deletedBy: "user1221" + notLatestRevision: true + examples: {} + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: {} + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/versions/{version}/revisions": + get: + tags: + - Versions + deprecated: true + x-deprecation-reason: New version of API is created - GET /api/v3/packages/{packageId}/versions/{version}/revisions. + summary: Get the version revisions list + description: | + Get the list of version revisions. + operationId: getPackagesIdVersionsIdRevisions + security: + - BearerAuth: [ ] + - api-key: [ ] + parameters: + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + - name: textFilter + in: query + description: Filter by label|user. + schema: + type: string + - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/page" + responses: + "200": + description: Success + content: + application/json: + schema: + type: object + properties: + revisions: + description: List of version revisions. + type: array + items: + description: Version revision parameters. + type: object + title: PackageVersionRevision + required: + - version + - revision + - status + - createdAt + - createdBy + properties: + version: + description: Package version name. The @ mask is used to return the revision number. + type: string + example: "2023.1@5" + revision: + description: Number of the revision. + type: integer + format: int32 + example: 3 + status: + $ref: "#/components/schemas/VersionStatusEnum" + createdBy: + $ref: "#/components/schemas/User" + createdAt: + type: string + description: Date of revision creation. + format: datetime + notLatestRevision: + type: boolean + default: false + revisionLabels: + description: List of revision labels. + type: array + items: + type: string + example: [ "part-of:CloudQSS-CPQBE" ] + publishMeta: + additionalProperties: true + description: Publish metadata. + type: object + properties: + commitId: + description: Last Git commit ID of the version. + type: string + example: a5d45af7 + repositoryUrl: + description: Url of the Git repository. + type: string + format: URI + example: https:///apihub-registry + cloudName: + description: Name of the cloud for publication from Agent. + type: string + example: k8s-apps2 + cloudUrl: + description: Full address of the cloud from Agent. + type: string + format: URI + example: https://k8s-apps2.k8s.sdntest.qubership.org + namespace: + description: Namespace of Agent's publication. + type: string + example: cloudQSS-release2 + examples: { } + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: { } + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: { } + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v3/packages/{packageId}/versions/{version}/revisions": + get: + tags: + - Versions + summary: Get the version revisions list + description: | + Get the list of version revisions. + operationId: getPackagesIdVersionsIdRevisionsV3 + security: + - BearerAuth: [ ] + - api-key: [ ] + parameters: + - $ref: "#/components/parameters/packageId" + - $ref: "#/components/parameters/version" + - name: textFilter + in: query + description: Filter by label|user|meta. + schema: + type: string + - $ref: "#/components/parameters/limit" + - $ref: "#/components/parameters/page" + responses: + "200": + description: Success + content: + application/json: + schema: + type: object + properties: + revisions: + description: List of version revisions. + type: array + items: + $ref: "#/components/schemas/PackageVersionRevision" + examples: { } + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: { } + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/versions/recursiveDelete": + parameters: + - $ref: "#/components/parameters/packageId" + post: + tags: + - Versions + summary: Delete expired versions recursively + description: Recursively delete all expired versions in 'Draft' status under group or workspace + operationId: deleteExpiredVersionsRecursively + security: + - BearerAuth: [ ] + - api-key: [ ] + requestBody: + description: Recursive delete parameters + content: + application/json: + schema: + type: object + properties: + olderThanDate: + description: Versions published earlier than the date are going to be deleted. + type: string + format: 'date' + example: '2023-05-12' + responses: + "200": + description: Success + content: + application/json: + schema: + type: object + properties: + jobId: + description: Id of job deleting versions. + type: string + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/packages/{packageId}/publish/availableStatuses": + parameters: + - $ref: "#/components/parameters/packageId" + get: + tags: + - Publish + - Packages + summary: Get a list of available publish statuses for the package + description: | + Get a list of available publish statuses for the package. + List depends on the current user access rights. + operationId: getPackagesIdAvailableStatuses + security: + - BearerAuth: [] + - api-key: [] + responses: + "200": + description: Success + content: + application/json: + schema: + type: object + properties: + statuses: + description: List of available statuses. + type: array + items: + $ref: "#/components/schemas/VersionStatusEnum" + "301": + description: Moved Permanently + headers: + Location: + schema: + type: string + description: Current ednpoint with new packageId of moved package + X-New-Package-Id: + schema: + type: string + description: New packageId of moved package + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + PackageNotFound: + $ref: "#/components/examples/PackageNotFound" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/space": + get: + tags: + - Users + summary: Get user's private workspace + description: Get personal private workspace of the current user. + operationId: getSpace + security: + - BearerAuth: [] + responses: + "200": + description: Success + content: + application/json: + schema: + $ref: "#/components/schemas/Package" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + post: + tags: + - Users + summary: Create own personal private package + description: | + Any user is able to create one personal private workspace (*no permissions required*) + PackageId for this workspace could be set via user creation endpoint (only for new users) or generated automatically when user first logs in to apihub + operationId: postspace + security: + - BearerAuth: [] + responses: + "201": + description: Created + content: + application/json: + schema: + $ref: "#/components/schemas/Package" + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/users/{userId}/space": + post: + tags: + - Users + summary: Create personal private package for the user + description: | + Any user is able to create one personal private workspace (*no permissions required*) + + PackageId for this workspace could be set via user creation endpoint (only for new users) or generated automatically when user first logs in to apihub + operationId: postUsersIdAvailablePackagePromoteStatuses + security: + - BearerAuth: [] + parameters: + - name: userId + description: Login of the user + in: path + required: true + schema: + type: string + example: user1221 + responses: + "201": + description: Created + content: + application/json: + schema: + $ref: "#/components/schemas/Package" + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v2/users/{userId}/availablePackagePromoteStatuses": + post: + x-nc-api-audience: noBWC + tags: + - Publish + - Users + summary: Get list of available package publish statuses for the user. + description: | + By collection of packages, **get** the available statuses for publish into for the particular user. + + Statuses list depends on the current user access rights for the particular package. + + The response is a **mapped list** of statuses to the packageIds. + operationId: getUsersIdAvailablePackagePromoteStatuses + security: + - BearerAuth: [] + parameters: + - name: userId + description: Login of the user + in: path + required: true + schema: + type: string + example: user1221 + requestBody: + description: Packages list to check. + content: + application/json: + schema: + type: object + required: + - packages + properties: + packages: + description: PackageIds list. + type: array + items: + type: string + example: ["QS.CloudQSS.CPQ.Q-TMF","QS.CloudQSS.CPQ.QE-SRV"] + required: true + responses: + "200": + description: Success + content: + application/json: + schema: + type: object + description: List of packages and available statuses. + additionalProperties: + type: array + description: Mapped list of statuses to the packageId. + items: + type: string + enum: + - draft + - release + example: + QS.CloudQSS.CPQ.Q-TMF: + - draft + - release + QS.CloudQSS.CPQ.QE-SRV: + - draft + '400': + description: default response + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + /playground/proxy: + get: + summary: Proxy endpoint for try it in case of non-cloud environments. + description: Allows to send any request to any address. + operationId: getAgentsIdNamespacesIdServicesProxy + security: + - { } + tags: + - TryIt + parameters: + - name: X-Apihub-Proxy-Url + in: header + required: true + schema: + type: string + description: | + Full URL that includes paths and query params + example: http://127.0.0.1:8080/api/v2/escaped/te%20xt/text/text123?escaped=te%20xt + - name: X-Apihub-Authorization + in: header + required: true + schema: + type: string + description: | + The header is a replacement for Authorization header, because original Authorization header should be passed to the target service. + responses: + 1XX: + description: Information responses + content: + "*/*": + schema: + description: Schema of any type + 2XX: + description: Successful responses + content: + "*/*": + schema: + description: Schema of any type + 3XX: + description: Redirection messages + content: + "*/*": + schema: + description: Schema of any type + 4XX: + description: Client error responses + content: + "*/*": + schema: + description: Schema of any type + 5XX: + description: Server error responses + content: + "*/*": + schema: + description: Schema of any type + "/api/v2/businessMetrics": + get: + tags: + - Admin + summary: Get business metrics report + description: | + Returns collected business metrics + operationId: getBusinessMetrics + security: + - BearerAuth: [] + - api-key: [] + parameters: + - name: format + description: Response format + in: query + required: false + schema: + type: string + default: json + enum: + - json + - xlsx + - name: parentPackageId + description: Return business metrics only for specific group\workspace + in: query + required: false + schema: + type: string + example: SD + - name: hierarchyLevel + description: Number of hierarchy levels for grouping packages (level=0 - packageId="SD.TL.TLQSS", level=1 - packageId="SD", level=2 - packageId="SD.TL") + in: query + required: false + schema: + type: integer + default: 0 + responses: + "200": + description: Success + content: + application/json: + schema: + description: List of collected business metrics grouped by package + type: array + items: + $ref: "#/components/schemas/BusinessMetric" + application/octet-stream: + schema: + type: string + format: binary + description: xlsx file to download + headers: + Content-Disposition: + schema: + type: string + description: xlsx file name (only filled when format=xlsx) + example: attachment; filename="business_metrics_2023-12-31.xlsx" + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "404": + description: Not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" + "/api/v1/publishHistory": + get: + tags: + - Admin + summary: Get history of published version revisions + description: | + Returns all existing version revisions sorted by publish date from oldest to newest. + Only for system administrators. + operationId: getPublishHistory + x-api-kind: no-BWC + security: + - BearerAuth: [] + - api-key: [] + parameters: + - name: status + description: version status + in: query + required: false + schema: + $ref: "#/components/schemas/VersionStatusEnum" + - name: publishedBefore + description: Filter to include only revisions published before specific date + in: query + required: false + schema: + type: string + format: date-time + description: RFC3339 format + example: "2024-03-15T12:00:13.052715Z" + - name: publishedAfter + description: Filter to include only revisions published after specific date + in: query + required: false + schema: + type: string + format: date-time + description: RFC3339 format + example: "2024-03-15T12:00:13.052715Z" + responses: + "200": + description: Success + content: + application/json: + schema: + description: List of published versions + type: array + items: + type: object + properties: + packageId: + type: string + version: + type: string + revision: + type: integer + previousVersionPackageId: + type: string + previousVersion: + type: string + publishedAt: + type: string + format: date-time + apiTypes: + type: array + items: + $ref: "#/components/schemas/ApiType" + "400": + description: Bad request + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + IncorrectInputParams: + $ref: "#/components/examples/IncorrectInputParameters" + "401": + description: Unauthorized + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "403": + description: Forbidden + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "500": + description: Internal Server Error + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + examples: + InternalServerError: + $ref: "#/components/examples/InternalServerError" +components: + parameters: + apiAudience: + name: apiAudience + in: query + description: | + Filter operations by apiAudience. + * internal - APIs are available for integration within product application. + * external - APIs exposed outside the boundary of the product application: solution delivery integrations, 3rd party integrations, customer integrations. + * unknown - If any value other than internal or external is used, the API is considered as unknown. + schema: + type: string + enum: + - all + - internal + - external + - unknown + default: all + AgentId: + name: agentId + in: path + description: Id of Agent instance + required: true + schema: + type: string + example: K8S-SERVER_api-hub-dev + Namespace: + name: name + description: Cloud Namespace name + in: path + required: true + schema: + type: string + severity: + name: severity + in: query + description: Filter API changes by severity. + schema: + type: array + items: + type: string + enum: + - breaking + - non-breaking + - deprecated + - semi-breaking + - annotation + - unclassified + ServiceId: + name: serviceId + description: Cloud service identifier + in: path + required: true + schema: + type: string + apiType: + name: apiType + description: Type of the API. + in: path + required: true + schema: + type: string + enum: + - rest + - graphql + - protobuf + apiTypeQueryParam: + name: apiType + in: query + description: | + Filter documents by type of the API. + schema: + type: string + enum: + - rest + - graphql + - protobuf + gitType: + name: gitType + description: Integration type + in: path + required: true + schema: + type: string + enum: + - gitLab + default: gitLab + repositoryId: + name: repositoryId + schema: + type: string + description: Repository Id + in: path + required: true + groupName: + name: groupName + in: path + description: Version Group + required: true + schema: + type: string + builderId: + name: builderId + in: path + description: Builder unique identifier (UUID) + required: true + schema: + type: string + format: UUID + example: QS.CloudQSS.CPQ.Q-TMF + packageId: + name: packageId + in: path + description: Package unique identifier (full alias) + required: true + schema: + type: string + example: QS.CloudQSS.CPQ.Q-TMF + version: + name: version + in: path + description: Package version + required: true + schema: + type: string + example: "2022.3" + slug: + name: slug + in: path + description: File unique string identifier + required: true + schema: + type: string + pattern: "^[a-z0-9-]" + example: "qitmf-v5-11-json" + limit: + name: limit + in: query + description: Requested number of resources to be provided in response. + schema: + type: number + default: 100 + maximum: 100 + minimum: 1 + page: + name: page + in: query + description: Page number + schema: + type: number + default: 0 + showParents: + name: showParents + in: query + description: Show/hide the list of parent packages. + schema: + type: boolean + default: false + operationId: + name: operationId + in: path + description: Operation unique identifier (slug). Not the same as operationId tag from the OpenAPI file. + required: true + schema: + type: string + example: "get-quoteManagement-v5-quote" + modelName: + name: modelName + in: path + description: Unique model identifier for operation + required: true + schema: + type: string + example: "CreateItemDto" + schemas: + AuthResponse: + description: Auth response + type: object + properties: + token: + description: Bearer token + type: string + user: + $ref: "#/components/schemas/User" + required: + - token + ApiKey: + title: ApiKey + type: object + required: + - id + - name + properties: + id: + description: ApiKey unique identifier + type: string + name: + description: ApiKey name + type: string + DeprecatedItems: + description: List of deprecated items in the operation. + type: array + items: + type: object + properties: + deprecatedInPreviousVersions: + description: List of previous release versions where item was also deprecated + type: array + items: + type: string + example: [ "2022.2", "2022,1", "2021.4" ] + declarationJsonPaths: + description: Declarative path to deprecated item. + type: array + items: + type: array + items: + anyOf: + - type: string + - type: integer + example: [["paths", "/post/saml/", "test", 1], ["paths", "/post/saml/", "test", 22]] + description: + description: Human-readable description of deprecated item. + type: string + example: "[Deprecated] query parameter 'petId'" + deprecatedInfo: + description: | + Additional information about single deprecated item: + * for REST API it is value of 'x-deprecated-meta' extension (value of extension must be string), which is defined for deprecated item. + * for GraphQL API it is value of 'reason' argument of @deprecated directive. + type: string + tolerantHash: + description: Tolerant hash for Schema object or Parameter object that has been deprecated. It is needed to identify that the same schema/parameter was deprecated in previous version. + type: string + hash: + description: Hash (full) for Schema object or Parameter object that has been deprecated, it is needed to detect semi-breaking changes in UI. + type: string + ApiType: + title: apiType + type: string + enum: + - rest + - graphql + - protobuf + PackageCreate: + description: Parameters for the package creation + required: + - alias + - name + - kind + type: object + properties: + parentId: + description: Parent package ID + type: string + kind: + description: | + Package kind. + * If kind = workspace, the parentId will be ignored. + * If kind = group or package or dashboard, the parentId is required. + type: string + enum: + - workspace + - group + - package + - dashboard + name: + description: Name of the new package + type: string + alias: + description: Package short alias (abbreviation) + type: string + maxLength: 10 + pattern: "^[a-zA-Z0-9-_]" + description: + description: Common description of the package + type: string + serviceName: + description: | + Service name that package belongs to. Should be equal to service deployment name in kubernetes. + Ignored for Workspace and Group kind. + type: string + imageUrl: + deprecated: true + x-deprecation-reason: UI for logo is deprecated. imageUrl parameter will be deleted in 2025.1 + type: string + description: Path to the package icon + format: URL + defaultRole: + description: Default role of the package. + type: string + releaseVersionPattern: + description: Release version mask. Value shall be inherited from parent group with the ability to override it. + type: string + excludeFromSearch: + description: | + If true, the package (including child packages) will be ignored by global search. + Changing the value of the parent package will change the value of all child packages. + A child package cannot have a negative value if the parent package has a positive value. The default value for a newly created package is equal to the value from the parent package. + type: boolean + Package: + description: Simple package object, without content and dependencies + type: object + title: Package + required: + - packageId + - alias + - kind + - name + - isFavorite + - defaultRole + properties: + packageId: + description: Package unique string identifier (full alias) + type: string + alias: + type: string + description: Package short alias (abbreviation) + maxLength: 10 + pattern: "^[a-zA-Z0-9-_]" + parentId: + description: Parent package ID + type: string + kind: + description: Package kind + type: string + enum: + - workspace + - group + - package + - dashboard + name: + description: Name of the package + type: string + description: + description: Common description of the package + type: string + isFavorite: + description: Sign of the favorite package for the caller user + type: boolean + default: false + serviceName: + description: Service name that package belongs to. Should be equal to service deployment name in kubernetes. + type: string + imageUrl: + deprecated: true + x-deprecation-reason: UI for logo is deprecated. imageUrl parameter will be deleted in 2025.1 + type: string + description: Path to the package icon + format: URL + permissions: + type: array + description: List of user permissions applicable to the package. + items: + $ref: "#/components/schemas/Permission" + example: ["read", "create_and_update_package", "delete_package"] + defaultRole: + description: Default role of the package. + type: string + releaseVersionPattern: + description: Release version mask. Value shall be inherited from parent group with the ability to override it. + type: string + excludeFromSearch: + description: | + If true, the package (including child packages) will be ignored by global search. + Changing the value of the parent package will change the value of all child packages. + A child package cannot have a negative value if the parent package has a positive value. The default value for a newly created package is equal to the value from the parent package. + type: boolean + restGroupingPrefix: + description: | + Regular expression used as criteria for grouping operations. + Groups for the package version are calculated during publication of this version. + type: string + example: "/api/v1/{group}/" + PackageList: + description: Base package object for parents list. + type: object + title: PackageList + properties: + packageId: + description: Package unique string identifier (full alias) + type: string + alias: + type: string + description: Package short alias (abbreviation) + maxLength: 10 + pattern: "^[a-zA-Z0-9-_]" + parentId: + description: Parent package ID + type: string + kind: + description: Package kind + type: string + enum: + - workspace + - group + - package + - dashboard + name: + description: Name of the package + type: string + imageUrl: + deprecated: true + x-deprecation-reason: UI for logo is deprecated. imageUrl parameter will be deleted in 2025.1 + type: string + description: Path to the package icon + format: URL + PackageUpdate: + description: Parameters for the package update. + Not changed parameters must not be transmitted. + Parameters, required in creation, must not be empty if transmitted. + type: object + properties: + name: + description: Name of the package + type: string + description: + description: Common description of the package + type: string + serviceName: + description: | + Service name that package belongs to. Should be equal to service deployment name in kubernetes. + Parameter may be changed (filled in) only it was empty in creation. Otherwise, the 400 error will be returned. + type: string + imageUrl: + deprecated: true + x-deprecation-reason: UI for logo is deprecated. imageUrl parameter will be deleted in 2025.1 + type: string + description: Path to the package icon + format: URL + defaultRole: + description: Default role of the package. + type: string + defaultReleaseVersion: + description: | + Default release version for the package. + Only `release` version may be placed as default. Return the error otherwise. + type: string + example: "2023.1" + releaseVersionPattern: + description: Release version mask. Value shall be inherited from parent group with the ability to override it. + type: string + excludeFromSearch: + description: | + If true, the package (including child packages) will be ignored by global search. + Changing the value of the parent package will change the value of all child packages. + A child package cannot have a negative value if the parent package has a positive value. The default value for a newly created package is equal to the value from the parent package. + type: boolean + restGroupingPrefix: + description: Regular expression used as criteria for grouping operations. + type: string + GitBranch: + description: External Git branch params + type: object + properties: + name: + description: Branch name + type: string + GitBranchFile: + description: Git branch file + type: object + properties: + name: + description: item name + type: string + isFolder: + description: Is item is folder + type: boolean + default: false + Repository: + description: Linked GIT repository params + type: object + properties: + repositoryId: + description: External GIT repository id + type: string + name: + description: External GIT repository full name + type: string + defaultBranch: + description: Name of the default branch of the linked repository + type: string + PackageStatus: + description: Published package version content + type: object + properties: + status: + type: string + enum: + - exist + - deleted + VersionStatusEnum: + description: Package version status + type: string + enum: + - draft + - release + - archived + User: + description: APIHUB user + type: object + required: + - id + properties: + id: + description: Login of the user + type: string + example: user1221 + name: + description: Name of the user + type: string + example: "Name Surname" + email: + description: Email address of the user + type: string + format: email + example: "name.surname@qubership.org" + avatarUrl: + description: Avatar of the user + type: string + format: URL + RoleCreate: + description: Role with set of permissions for creation. + type: object + title: Role + required: + - role + - permissions + properties: + role: + type: string + pattern: "^[a-zA-Z0-9- ]" + description: Role name. + example: Editor + permissions: + type: array + description: List of permissions applicable to the role. + items: + $ref: "#/components/schemas/Permission" + example: ["read", "create_and_update_package", "delete_package"] + Role: + description: Role details. + type: object + title: Role + required: + - roleId + - role + - permissions + properties: + roleId: + type: string + description: Unique role identifier. The value is the slug of role name. + example: editor + role: + type: string + description: Role name. + example: Editor + Permission: + description: Permission + type: string + enum: + - read + - create_and_update_package + - delete_package + - manage_draft_version + - manage_release_version + - manage_archived_version + - user_access_management + - access_token_management + example: read + Member: + description: User and assigned role + type: object + title: Member + required: + - user + - roles + properties: + user: + $ref: "#/components/schemas/User" + roles: + type: array + description: List of user roles in the package. + items: + allOf: + - $ref: "#/components/schemas/Role" + - type: object + properties: + inheritance: + type: object + description: Role was inherited from this package + properties: + packageId: + description: Package unique string identifier (full alias) + type: string + kind: + description: Package kind + type: string + enum: + - workspace + - group + name: + description: Name of the package + type: string + example: qubership + MemberCreate: + description: Assign users and role to the package + type: object + title: MemberCreate + required: + - emails + - roleIds + properties: + emails: + description: List of email addresses of the users to create. + type: array + items: + type: string + format: email + example: ["name.surname@qubership.org"] + roleIds: + type: array + description: List of role IDs, added to the user. + items: + type: string + example: [owner, editor, viewer, none] + PackageVersion: + description: Base parameters of published version (without content) + type: object + title: PackageVersion + required: + - version + - status + - createdAt + - createdBy + properties: + version: + description: Package version name.The @ mask is used to return the revision number. + type: string + example: "2022.3@5" + status: + $ref: "#/components/schemas/VersionStatusEnum" + createdBy: + $ref: "#/components/schemas/Principal" + createdAt: + type: string + description: Date of package creation. + format: datetime + versionLabels: + description: List of version labels. + type: array + items: + type: string + example: ["part-of:CloudQSS-CPQBE"] + previousVersion: + description: previous package version name.The @ mask is used to return the revision number. + type: string + example: "2022.2@5" + previousVersionPackageId: + description: Package id of the previous version. Can be empty if the value is equal to the package id. + type: string + example: "QS.GRP.SOMEPKG" + notLatestRevision: + type: boolean + default: false + PackageVersionContentV2: + deprecated: true + description: Published package version content + type: object + required: + - packageId + - version + - createdAt + - createdBy + - summary + - revision + - revisionsCount + - status + properties: + packageId: + description: Package unique string identifier (full alias) + type: string + example: "QS.CloudQSS.CPQ.Q-TMF" + version: + description: Package version name.The @ mask is used to return the revision number. + type: string + example: "2022.2@5" + createdAt: + description: Date of the package version publication. + type: string + format: date-time + createdBy: + description: User, published the package version. + type: string + previousVersion: + description: Name of the previous published version.The @ mask is used to return the revision number. + type: string + example: "2022.2@5" + previousVersionPackageId: + description: Package id of the previous version to compare with. Required for agent snapshots. + type: string + example: "QS.GRP.SOMEPKG" + versionLabels: + description: List of version labels. + type: array + items: + type: string + example: ["part-of:CloudQSS-CPQBE"] + notLatestRevision: + type: boolean + default: false + revisionsCount: + description: Total number of revisions in the version. + type: integer + format: int32 + example: 3 + status: + $ref: "#/components/schemas/VersionStatusEnum" + PackageVersionContent: + description: Published package version content + type: object + required: + - packageId + - version + - createdAt + - createdBy + - summary + - revision + - revisionsCount + - status + properties: + packageId: + description: Package unique string identifier (full alias) + type: string + example: "QS.CloudQSS.CPQ.Q-TMF" + version: + description: Package version name.The @ mask is used to return the revision number. + type: string + example: "2022.2@5" + createdAt: + description: Date of the package version publication. + type: string + format: date-time + createdBy: + $ref: "#/components/schemas/Principal" + previousVersion: + description: Name of the previous published version.The @ mask is used to return the revision number. + type: string + example: "2022.2@5" + previousVersionPackageId: + description: Package id of the previous version to compare with. Required for agent snapshots. + type: string + example: "QS.GRP.SOMEPKG" + versionLabels: + description: List of version labels. + type: array + items: + type: string + example: ["part-of:CloudQSS-CPQBE"] + notLatestRevision: + type: boolean + default: false + revisionsCount: + description: Total number of revisions in the version. + type: integer + format: int32 + example: 3 + status: + $ref: "#/components/schemas/VersionStatusEnum" + PackageVersionFile: + description: Parameters of published file in package version + type: object + title: Package version file + required: + - fileId + - filename + - slug + - type + - format + - title + properties: + fileId: + type: string + description: File name. + example: "qitmf-v5.11.json" + filename: + type: string + description: File name (slug+extension). + example: "qitmf-v5.11.json" + slug: + description: Published file slug + type: string + pattern: "^[a-z0-9-]" + example: "qitmf-v5-11-json" + type: + $ref: "#/components/schemas/SpecificationType" + format: + $ref: "#/components/schemas/DocumentFormat" + title: + description: Name/title of the document. + type: string + example: "Quote Integration TMForum Service" + version: + description: Document version + type: string + example: "1.0.1" + labels: + description: List of file labels. + type: array + items: + type: string + example: ["TMF"] + PackageTransformationFile: + description: Parameters of published file in package version + type: object + title: Package version file + required: + - fileId + - filename + - slug + - type + - format + - title + properties: + fileId: + type: string + description: File name. + example: "qitmf-v5.11.json" + filename: + type: string + description: File name (slug+extension). + example: "qitmf-v5.11.json" + slug: + description: Published file slug + type: string + pattern: "^[a-z0-9-]" + example: "qitmf-v5-11-json" + type: + $ref: "#/components/schemas/SpecificationType" + format: + $ref: "#/components/schemas/DocumentFormat" + title: + description: Name/title of the document. + type: string + example: "Quote Integration TMForum Service" + version: + description: Document version + type: string + example: "1.0.1" + labels: + description: List of file labels. + type: array + items: + type: string + example: ["TMF"] + description: + description: Document description + type: string + includedOperationIds: + description: List of operation IDs included to specified operation group + type: array + items: + type: string + example: [ + "get-quoteManagement-v5-quote", + "post-quoteManagement-v5-quote" + ] + data: + description: Content of the operation as a JSON object + type: object + PackageVersionRef: + description: Package version reference + type: object + title: Referenced package version + required: + - refId + - name + - version + - status + properties: + refId: + description: Referenced package Id + type: string + example: "QS.CloudQSS.CPQ.Q-TMF" + kind: + description: Package kind + type: string + enum: + - package + - dashboard + name: + description: Name of the referenced package + type: string + example: "Quote Management TMF648" + version: + description: Referenced package version number. The @ mask is used to return the revision number. + type: string + example: "2022.2@5" + status: + $ref: "#/components/schemas/VersionStatusEnum" + deletedAt: + description: date when package version was deleted package version + type: string + format: date-type + example: "2023-05-30T17:17:11.755146Z" + deletedBy: + description: user who deleted package version + type: string + example: "user1221" + Principal: + description: User or API key + allOf: + - oneOf: + - $ref: "#/components/schemas/User" + - $ref: "#/components/schemas/ApiKey" + - type: object + required: + - type + properties: + type: + description: Identifies whether principal is user or API key + type: string + enum: + - user + - apiKey + ReferencedPackage: + description: Package reference + type: object + title: Referenced package + required: + - refId + - kind + - name + - version + - status + properties: + refId: + description: Referenced package Id. + type: string + example: "QS.CloudQSS.CPQ.CORE" + kind: + description: Package kind + type: string + enum: + - package + - dashboard + name: + description: Name of the referenced package + type: string + example: "Quote Management TMF648" + version: + description: Referenced package version number. The @ mask is used to return the revision number. + type: string + example: "2022.2@5" + status: + $ref: "#/components/schemas/VersionStatusEnum" + parentPackages: + description: Array of parent package names + type: array + items: + type: string + deletedAt: + description: date when package version was deleted package version + type: string + format: date-type + example: "2023-05-30T17:17:11.755146Z" + deletedBy: + description: user who deleted package version + type: string + example: "user1221" + notLatestRevision: + type: boolean + default: false + SearchResultOperation: + description: | + Global search result for API operations; must be returned when searchLevel = operation + title: SearchResultOperation + allOf: + - oneOf: + - $ref: "#/components/schemas/RestOperation" + - $ref: "#/components/schemas/GraphQLOperation" + - type: object + required: + - packageId + - name + - parentPackages + - version + - status + properties: + packageId: + description: Package unique string identifier (full alias) + type: string + example: "QS.CloudQSS.CPQ.Q-TMF" + name: + description: Package name + type: string + example: "Quote TMF Service" + parentPackages: + description: Array of parent package names + type: array + items: + type: string + version: + description: Package version name + type: string + example: "2022.2@5" + status: + $ref: "#/components/schemas/VersionStatusEnum" + SearchResultOperationV2: + description: | + Global search result for API operations; must be returned when searchLevel = operation + title: SearchResultOperationV2 + type: object + deprecated: true + required: + - packageId + - name + - parentPackages + - version + - status + - operationId + - title + - metadata + - apiType + properties: + packageId: + description: Package unique string identifier (full alias) + type: string + example: "QS.CloudQSS.CPQ.Q-TMF" + name: + description: Package name + type: string + example: "Quote TMF Service" + parentPackages: + description: Array of parent package names + type: array + items: + type: string + version: + description: Package version name + type: string + example: "2022.2@5" + status: + $ref: "#/components/schemas/VersionStatusEnum" + operationId: + description: Operation unique identifier (slug). Not the same as operationId tag from the OpenAPI file. + type: string + example: get-quoteManagement-v5-quote + title: + description: Operation summary/title. + type: string + deprecated: + description: Operation deprecate flag. + type: boolean + default: false + apiType: + type: string + enum: + - rest + - graphql + metadata: + description: Parameters for specific API type + oneOf: + - type: object + title: RestParams + description: Rest API specific parameters + required: + - path + - method + properties: + path: + description: Operation endpoint path. + type: string + example: "/quoteManagement/v5/quote" + method: + description: Operation method. + type: string + enum: + - post + - get + - put + - patch + - delete + - head + - options + - connect + - trace + - type: object + title: GraphQLParams + description: GrapQL specific parameters + required: + - type + - method + properties: + type: + description: Operation type + type: string + enum: + - query + - mutation + method: + description: GraphQL operation method. + type: string + example: getPaymentMethodSpecificationCore + SearchResultPackage: + title: SearchResultPackage + description: | + Global search result for packages with kind = package; must be returned when searchLevel = package + * If search term matches the package id/name/description/service name, return the latest published version only. + * If search term matches the version name/label, return that version. + type: object + required: + - packageId + - name + - parentPackages + - createdAt + - version + - revision + - status + properties: + packageId: + description: Package unique string identifier (full alias) + type: string + example: "QS.CloudQSS.CPQ.Q-TMF" + name: + description: Package name + type: string + example: "Quote TMF Service" + description: + description: Package description + type: string + serviceName: + description: Service name that package belongs to. Should be equal to service deployment name in kubernetes. + type: string + parentPackages: + description: Array of parent package names + type: array + items: + type: string + version: + description: Package version name. + type: string + example: "2022.2@5" + latestRevision: + description: | + true if revision is the latest one. + type: boolean + default: false + status: + $ref: "#/components/schemas/VersionStatusEnum" + createdAt: + description: Date of the package version publication + type: string + format: date-time + labels: + description: List of package version labels + type: array + items: + type: string + SearchResultDocument: + description: Global search result for documents; must be returned when searchLevel = document + title: SearchResultDocument + type: object + required: + - packageId + - name + - parentPackages + - version + - status + - files + - slug + - type + - title + properties: + packageId: + description: Package unique string identifier (full alias) + type: string + example: "QS.CloudQSS.CPQ.Q-TMF" + name: + description: Package name + type: string + example: "Quote TMF Service" + parentPackages: + description: Array of parent package names + type: array + items: + type: string + version: + description: Package version name. + type: string + example: "2022.2@5" + status: + $ref: "#/components/schemas/VersionStatusEnum" + slug: + description: Published document slug + type: string + pattern: "^[a-z0-9-]" + example: "qitmf-v5-11-json" + type: + description: Type of the specification notation. + type: string + enum: + - openapi-3-1 + - openapi-3-0 + - openapi-2-0 + - json-schema + - markdown + - unknown + title: + description: Name/title of the document. + type: string + example: "Quote Integration TMForum Service" + labels: + description: List of documents labels. + type: array + items: + type: string + example: ["TMF"] + createdAt: + description: Date of the package version publication + type: string + format: date-time + content: + type: string + description: | + String with search term occurrences in the document. + If document content does not contain search term, then return N first characters. + If document is empty, then this property will be empty. + ErrorResponse: + description: An error description + type: object + properties: + status: + description: HTTP Status Code + type: number + code: + description: Internal string error code. Mandatory in response. + type: string + message: + description: The attribute contains an error message. + type: string + params: + type: object + description: Message parameters + example: + id: 12345 + type: string + debug: + description: The attribute contains debug details (e.g. stack-trace). Presented in the error response only on Dev/Test environments if corresponding logging level is enabled. + type: string + required: + - status + - code + - message + PackageApiKey: + type: object + description: ApiKey details for the package + title: PackageApiKey + required: + - id + - name + - createdBy + - createdAt + - roles + properties: + id: + description: ApiKey unique identifier + type: string + packageId: + description: Internal unique package ID (full alias) + type: string + name: + description: ApiKey name + type: string + createdBy: + $ref: "#/components/schemas/User" + createdFor: + $ref: "#/components/schemas/User" + createdAt: + description: Date and time of ApiKey creation + type: string + format: datetime + roles: + description: List of roles. + type: array + items: + type: string + SpecificationType: + title: type + description: Type of the specification notation. + type: string + enum: + - openapi-3-1 + - openapi-3-0 + - openapi-2-0 + - json-schema + - markdown + - graphql-schema + - graphapi + - introspection + - protobuf-3 + - unknown + DocumentFormat: + title: format + description: Format of the specification document. + type: string + enum: + - json + - yaml + - md + - graphql + - gql + - proto + - unknown + Operation: + description: Operation object + title: Operation + type: object + required: + - operationId + - title + - apiType + - dataHash + - apiAudience + - apiKind + properties: + operationId: + description: Operation unique identifier (slug). Not the same as operationId tag from the OpenAPI file. + type: string + example: get-quoteManagement-v5-quote + title: + description: Operation summary/title. + type: string + apiType: + # description: Type of the API. + # type: string + # enum: + # - rest + # - kafka + # - grpc + # - graphql + # - protobuf + $ref: "#/components/schemas/ApiType" + dataHash: + description: Operation hash. + type: string + example: sdfsdfsf242 + externalMetadata: + description: External operation metadata. + type: object + deprecated: + description: Operation deprecate flag. + type: boolean + default: false + apiAudience: + description: | + Operation's target audience. + * internal - APIs are available for integration within product application. + * external - APIs exposed outside the boundary of product application: solution delivery integrations, 3rd party integrations, customer integrations. + * unknown - If any value other than internal or external is used, the API is considered as unknown. + type: string + enum: + - internal + - external + - unknown + apiKind: + description: | + Operation API kind. + * bwc - API with backward compatibility support (a.k.a. public). + * no-bwc - API without backward compatibility support (a.k.a. internal). + * experimental - APIs for feature testing. Usage is not recommended. + type: string + enum: + - bwc + - no-bwc + - experimental + default: bwc + tags: + description: | + List of operation tags. + * in rest, tag is OpenAPI tag. + * in graphql, tag is root schema type - query, mutation, subscription. + * in protobuf, tag is service of method. + type: array + items: + type: string + example: ["RestControllerV5"] + OperationGroup: + description: Operation object + title: Operation + type: object + required: + - operationId + - title + - path + - method + - tags + - packageRef + - apiKind + properties: + operationId: + description: Operation unique identifier (slug). Not the same as operationId tag from the OpenAPI file. + type: string + example: get-quoteManagement-v5-quote + title: + description: Operation summary/title. + type: string + path: + description: Operation endpoint path. + type: string + example: "/quoteManagement/v5/quote" + method: + description: Operation method. + type: string + enum: + - post + - get + - put + - patch + - delete + - head + - options + - connect + - trace + tags: + description: List of operation tags. + type: array + items: + type: string + example: ["RestControllerV5"] + packageRef: + description: Parent package and version link. + type: string + example: "QS.CloudQSS.CPQ.Q-TMF@2023.2" + apiKind: + description: | + Operation API kind. + * bwc - API with backward compatibility support (a.k.a. public). + * no-bwc - API without backward compatibility support (a.k.a. internal). + * experimental - APIs for feature testing. Usage is not recommended. + type: string + enum: + - bwc + - no-bwc + - experimental + default: bwc + RestOperation: + description: REST operation object. + title: RestOperation + allOf: + - $ref: "#/components/schemas/Operation" + - $ref: "#/components/schemas/RestOperationMeta" + ProtobufOperation: + description: Protobuf operation object. + title: ProtobufOperation + allOf: + - $ref: "#/components/schemas/Operation" + - $ref: "#/components/schemas/ProtobufOperationMeta" + GraphQLOperation: + description: GraphQL operation object. + title: GraphQLOperation + allOf: + - $ref: "#/components/schemas/Operation" + - $ref: "#/components/schemas/GraphQLOperationMeta" + RestOperationMeta: + description: Specific parameters for REST operation. + title: RestOperationMeta + required: + - path + - method + type: object + properties: + path: + description: Operation endpoint path. + type: string + example: "/quoteManagement/v5/quote" + method: + description: Operation method. + type: string + enum: + - post + - get + - put + - patch + - delete + - head + - options + - connect + - trace + title: + description: Operation summary/title. + type: string + customTags: + description: Custom tags. + type: object + GraphQLOperationMeta: + description: Specific parameters for GraphQL operation. + title: GraphQLOperationMeta + required: + - type + - method + type: object + properties: + type: + description: Operation type + type: string + enum: + - query + - mutation + - subscription + method: + description: GraphQL operation method. + type: string + example: getPaymentMethodSpecificationCore + title: + description: Operation summary/title. + type: string + customTags: + description: Custom tags. + type: object + ProtobufOperationMeta: + description: Specific parameters for Protobuf operation. + title: GraphQLOperationMeta + required: + - type + - method + type: object + properties: + type: + description: Operation type + type: string + enum: #open questions what types shall be supported? + - unary + - serverStreaming + - clientStreaming + - bidirectionalStreaming + method: + description: Protobuf method name. + type: string + example: ListActionLogItems + title: + description: Operation title (same as method name but with adding spaces between capital letters) + type: string + example: List Action Log Items + OperationInfoFromDifferentVersions: + description: Operation info from previous/current version. + type: object + required: + - title + - apiKind + - dataHash + - apiAudience + properties: + title: + description: Operation summary/title. + type: string + apiKind: + type: string + enum: + - bwc + - no-bwc + - experimental + apiAudience: + description: | + Operation's target audience: + * internal - APIs are available for integration within product application. + * external - APIs exposed outside the boundary of product application: solution delivery integrations, 3rd party integrations, customer integrations. + * unknown - If any value other than internal or external is used, the API is considered as unknown. + type: string + enum: + - internal + - external + - unknown + dataHash: + description: Operation hash. + type: string + example: sdfsdfsf242 + packageRef: + description: > + Parent package and version link. Created by + the concatenation of the packageId and + version name with At sign. + type: string + example: QS.CloudQSS.CPQ.Q-TMF@2023.2 + BusinessMetric: + description: Business metric + title: BusinessMetric + required: + - packageId + - date + - metric + - value + type: object + properties: + packageId: + description: Package unique string identifier + type: string + date: + description: Date on which business metric was collected. + type: string + format: date + example: '2023-12-31' + username: + description: Name of the user that has been counted for a specific metric. + type: string + metric: + description: Name of business metric. + type: string + example: comparisons_called + value: + description: Value associated with business metric. + type: integer + BuildResultV2: + description: | + Result of build operation in ZIP archive. + * The archive contains folders with sources, builded JSON documents and JSON operation files. + * In addition, several config files are provided. he structure of these files is described below. + title: BuildResult + type: object + required: + - info.json + - documents.json + properties: + info.json: + description: | + Build configuration, published version details. + type: object + required: + - packageId + - version + properties: + packageId: + description: Package unique string identifier (full alias). + type: string + example: "QS.CloudQSS.CPQ.Q-TMF" + version: + description: Version name for publication. + type: string + example: "2022.3" + previousVersion: + description: | + Name of the previous published version. + The mask @ may be used to return the revision number. + type: string + example: "2022.2@4" + default: "" + previousVersionPackageId: + description: Previous release version package id. + type: string + example: "QS.CloudQSS.CPQ.Q-TMF" + buildType: + description: | + Type of the build process. Available options are: + + **build** - Standard build process. + Consist of contract and operations build and validation, calculation of the changelog, creation of the final version of the published contracts. + + **changelog** - Only the changelog calculation, no API contracts version will be created. + both version and previousVersion fields *must* contain @ suffix when using changelog buildType + + **documentGroup** - Process to transform documents so that they will contain operations only from specific operations group. + Consists of info.json and documents.json + type: string + enum: + - build + - changelog + - documentGroup + metadata: + description: Common publish metadata. + type: object + properties: + commitId: + description: Last Git commit ID of the version. + type: string + example: a5d45af7 + repositoryUrl: + description: Url of the Git repository. + type: string + format: URI + example: 'https:///apihub-registry' + labels: + description: List of version labels. + type: array + items: + type: string + example: ["TMF"] + cloudName: + description: Name of the cloud for publication from Agent. + type: string + example: 'k8s-apps2' + cloudUrl: + description: Full address of the cloud from Agent. + type: string + format: URI + example: 'https://k8s-apps2.k8s.sdntest.qubership.org' + namespace: + description: Namespace of Agent's publication. + type: string + example: 'cloudQSS-release2' + refs: + description: | + Detailed data about referenced versions for current package version. + type: array + items: + type: object + required: + - refId + - version + - type + properties: + refId: + description: Referenced package Id. + type: string + example: "QS.CloudQSS.CPQ.CORE" + version: + description: Referenced package version number. + type: string + example: "2022.3@5" + parentRefId: + description: Referenced parent package Id. + type: string + example: "QS.CloudQSS.CPQ.CORE" + parentVersion: + description: | + Referenced parent version. + The mask @ may be used to link with a specific revision. + If the @revision is not provided, the latest version's revision will be used. + type: string + example: "2022.2@4" + excluded: + description: All excluded refs will be ignored (but will still be visible for package version). + type: boolean + externalMetadata: + description: External build result metadata + type: object + properties: + operations: + type: array + items: + type: object + properties: + apiType: + type: string + enum: + - rest + method: + type: string + description: HTTP method + path: + type: string + description: HTTP path + externalMetadata: + description: External operation metadata + type: object + documents.json: + type: object + description: List of documents data. + properties: + documents: + type: array + items: + description: List of builded files. + type: object + required: + - fileId + - slug + - title + - format + - type + - operations + properties: + fileId: + type: string + description: File name with folders. + example: "docs/qitmf-v5.11.json" + fileName: + type: string + description: File name (slug+extension). + example: "qitmf-v5.11.json" + slug: + description: Published file slug. + type: string + pattern: "^[a-z0-9-]" + example: "qitmf-v5-11-json" + type: + description: Type of the specification notation. + type: string + enum: + - openapi-3-1 + - openapi-3-0 + - openapi-2-0 + - asyncapi-2 + - json-schema + - markdown + - graphql-schema + - graphapi + - introspection + - unknown + format: + description: Format of the specification document. + type: string + enum: + - json + - yaml + - md + - graphql + - gql + - unknown + title: + description: Document title and summary. + type: string + description: + description: Document description. + type: string + version: + description: Document version. For openapi document, version is taken from info object. + type: string + example: "1.0.1" + operationIds: + description: List of operation IDs. + type: array + items: + type: string + example: ["get-quoteManagement-v5-quote"] + metadata: + description: Meta parameters from config file. + type: object + properties: + info: + description: Info object from openapi document + type: object + externalDocs: + description: External documentation object from openapi document + type: object + labels: + description: List of file labels. + type: array + items: + type: string + example: ["TMF"] + blobId: + description: Git blob ID of the file. + type: string + example: a5d84af7 + tags: + type: object + properties: + name: + type: string + description: + type: string + externalDocs: + type: object + properties: + description: + type: string + url: + type: string + comparisons.json: + type: object + properties: + comparisons: + type: array + items: + type: object + required: + - operationTypes + properties: + comparisonFileId: + type: string + description: Pointer to the file with changes. Optional, required only if package contains documents. Not required in case of refs only (dashboard). + example: "comparisonFile1.json" + packageId: + description: Package unique string identifier (full alias). + type: string + example: "QS.CloudQSS.CPQ.Q-TMF" + version: + description: Version name for publication. + type: string + example: "2022.3" + revision: + type: number + previousVersion: + description: | + Name of the previous published version. + type: string + example: "2022.2" + default: "" + previousVersionPackageId: + description: Previous release version package id. + type: string + example: "QS.CloudQSS.CPQ.Q-TMF" + previousVersionRevision: + type: number + operationTypes: + type: array + items: + type: object + properties: + apiType: + type: string + changesSummary: + $ref: "#/components/schemas/ChangeSummary" + tags: + type: array + items: + type: string + example: + ["a", "b"] + fromCache: + description: True if the changes were retrieved from BE (not calculated) + type: boolean + comparisons/{comparisonFileId}: + type: object + description: List of changes data. + properties: + operations: + description: Collection of changes depend on apiType. + type: array + items: + type: object + required: + - operationId + - changes + - changesSummary + properties: + operationId: + description: >- + Operation unique identifier (slug). Not the same as + operationId tag from the OpenAPI file. + type: string + example: get-quoteManagement-v5-quote + dataHash: + description: Operation hash. + type: string + example: sdfsdfsf242 + previousDataHash: + description: Hash of the previous published version operation. + type: string + example: sdfsdfsf24dds2 + changeSummary: + $ref: "#/components/schemas/ChangeSummary" + changes: + description: Collection of changes for each operation. + type: array + items: + type: object + description: Discrepancy data in a single operation. + properties: + jsonPath: + description: | + (below, a document is not considered to be an original document, but a document with one specific operation)\ + When comparing two documents (origin and changed; difference from changed document apended to the original document), a jsonPath is calculated for each change. jsonPath is the location of a specific change in the changed document. + type: array + items: + type: string + example: ["paths", "/quoteManagement/v5/quote", "get", "parameters", "0", "description"] + description: + description: >- + Human-readable description of point of change. + type: string + example: "[ Added ] Property: summary." + action: + description: Action, what was done with the endpoint. + type: string + enum: + - add + - remove + - change + severity: + $ref: "#/components/schemas/ChangeSeverity" + operations.json: + description: List of operations for publish. + type: object + properties: + operations: + type: array + items: + allOf: + - description: Operation object + title: Operation + type: object + required: + - operationId + - title + - apiType + - dataHash + - apiKind + properties: + operationId: + description: Operation unique identifier (slug). Not the same as operationId tag from the OpenAPI file. + type: string + example: get-quoteManagement-v5-quote + title: + description: Operation summary/title. + type: string + apiType: + description: Type of the API. + type: string + enum: + - rest + - kafka + - grpc + - graphql + dataHash: + description: Operation hash. + type: string + example: sdfsdfsf242 + deprecated: + description: Operation deprecate flag. + type: boolean + default: false + apiKind: + description: | + Operation API kind. + * bwc - API with backward compatibility support (a.k.a. public). + * no-bwc - API without backward compatibility support (a.k.a. internal). + * experimental - APIs for feature testing. Usage is not recommended. + type: string + enum: + - bwc + - no-bwc + - experimental + default: bwc + tags: + description: List of operation tags. + type: array + items: + type: string + example: ["RestControllerV5"] + - type: object + required: + - data + - metadata + - searchScopes + properties: + models: + description: Map of operation models + type: object + additionalProperties: + type: string + example: + Template: a6b1596abe745a4a0cd3de25a411dbe671a3ba7c + ErrorResponse: cf1d87cf3895af106a3ad29f2098dcc2cc477e71 + CreateItemDto: 72bfdc14758213e5c09620b29054da86b75d4868 + deprecatedInfo: + description: Additional information about deprecated operation + type: object + deprecatedInPreviousVersions: + description: List of previous release versions where operation was also deprecated + type: array + items: + type: string + example: ["2022.2", "2022,1", "2021.4"] + deprecatedItems: + description: List of deprecated items inside operation. + type: array + items: + type: object + properties: + jsonPath: + description: jsonPath to point of deprecation. + type: array + items: + type: string + example: ["paths", "/quoteManagement/v5/quote", "get", "parameters", "0", "description"] + description: + description: Human-readable description of deprecated item. + type: string + example: "parameter Type in response 200" + deprecatedInfo: + description: Additional information about single deprecated item + type: object + deprecatedInPreviousVersions: + description: List of previous release versions where item was also deprecated + type: array + items: + type: string + example: ["2022.2", "2022,1", "2021.4"] + metadata: + description: Metadata content depend on apiType. + oneOf: + - allOf: + - $ref: "#/components/schemas/RestOperationMeta" + - type: object + properties: + originalPath: + description: "Operation path as it presented in initial documentation file" + type: string + - $ref: "#/components/schemas/GraphQLOperationMeta" + searchScopes: + description: Scopes for search indexes. + type: object + additionalProperties: + type: string + example: + all: "Quote Snapshot V6 Performs full text search ofQuotes Statistics By Phrase fullTextSearchStatistics searchPhrase offset int32 limit int32 fields 200 OK application/json quote anonymousCustomerData assignTo The name of the user/team to whom the Quote is assigned cancellationReason The entity that represents common information about a customer's request cancellationReasonDescription The reason (from the free text field) why the Quote has been cancelled contactMedium A list of contact mediums (ContactMedium [*]). Indicates the contact medium that could be used to contact the party deliveryMethod Id of the Delivery Method (from the Delivery Method list) for Quote description Description of the Quote extendedParameters additionalProperties The attribute contains additional parameters of the Quote The attribute contains additional parameters of the Quote Map> The attribute contains additional parameters of the Quote externalRefs Represents a reference to an external object id Unique identifier of the Quote name Quote name quoteDate Date and time when the Quote was created date-time quoteTotalPrice Represents Quote total price relatedParty A list of related party references (RelatedPartyRef [*]).A related party defines party or party role linked to a specific entity relatedQuote A list of related quotes (RelatedQuote [*]). It represents another revision of the in-flight Quote from the same chain, i.e. in-flight Quote with the same initialBaselineQuoteId value. This class is returned in the API response only in case of enhancedListQuotes API operation is called. It can be empty if there are no other in-flight Quotes in this chain revision Attribute is used to mark in-flight Quote. Increased in case changes are done to the Quote after submission int32 state State of the Quote updatedWhen Date and time when the Quote was updated date-time validFor Quote with reduced scope of attributes quoteItemStatistic Shows the number of Quote's root Quote Items grouped by some criteria(e.g. by Product Offering, Action) action Numbers of root Quote Items grouped by Action count Number of Quote Items for the specific value int32 name Action name Shows the number of Customer's Product Instances for specified criteria (e.g. Product Family, Billing Account and so on) productFamilyId Unique identifier of Product Family productFamilyName Product family name. Name is returned in the default locale. If alternative locale is passed in Accept-language header in the API request the system returns corresponding localized name in the response. productOfferingId Unique identifier of Product Offering productOfferingName Product offering name. Attribute is returned in the response for catalog entities only (product offering). Name is returned in the default locale. If alternative locale is passed in Accept-language header in the API request the system returns corresponding localized name in the response Shows the number of Quote's root Quote Items grouped by some criteria(e.g. by Product Offering, Action) Shows the Quote with a number of root Quote Items 4XX default response application/json;charset=UTF-8;model=errorResponse;version=1 5XX default response application/json;charset=UTF-8;model=errorResponse;version=1 " + BuildResult: + description: | + Result of build operation in ZIP archive. + * The archive contains folders with sources, builded JSON documents and JSON operation files. + * In addition, several config files are provided. he structure of these files is described below. + title: BuildResult + type: object + required: + - info.json + - documents.json + properties: + info.json: + description: | + Build configuration, published version details. + type: object + required: + - packageId + - version + - buildType + properties: + packageId: + description: Package unique string identifier (full alias). + type: string + example: "QS.CloudQSS.CPQ.Q-TMF" + version: + description: Version name for publication. + type: string + example: "2022.3" + previousVersion: + description: | + Name of the previous published version. + The mask @ may be used to return the revision number. + type: string + example: "2022.2@4" + default: "" + previousVersionPackageId: + description: Previous release version package id. + type: string + example: "QS.CloudQSS.CPQ.Q-TMF" + buildType: + description: | + Type of the build process. Available options are: + - **build** - Standard build process.Consist of contract and operations build and validation, calculation of the changelog, creation of the final version of the published contracts. + - **changelog** - Only the changelog calculation, no API contracts version will be created. Both version and previousVersion fields *must* contain @ suffix when using changelog buildType + - **reducedSourceSpecifications** - proccess that finds source specifications for all operations from operation group and removes from these specifications operations other than those that are included into operation group. + - **mergedSpecification** - process the merges all operations from an operation group into one specification. + type: string + enum: + - build + - changelog + - mergedSpecification + - reducedSourceSpecifications + format: + description: Should be specified if buildType equal to mergedSpecification or reducedSourceSpecifications + type: string + enum: + - yaml + - json + - html + default: json + metadata: + description: Common publish metadata. + type: object + properties: + commitId: + description: Last Git commit ID of the version. + type: string + example: a5d45af7 + repositoryUrl: + description: Url of the Git repository. + type: string + format: URI + example: 'https:///apihub-registry' + labels: + description: List of version labels. + type: array + items: + type: string + example: ["TMF"] + cloudName: + description: Name of the cloud for publication from Agent. + type: string + example: 'k8s-apps2' + cloudUrl: + description: Full address of the cloud from Agent. + type: string + format: URI + example: "https://k8s-apps2.k8s.sdntest.qubership.org" + namespace: + description: Namespace of Agent's publication. + type: string + example: "cloudQSS-release2" + refs: + description: | + Detailed data about referenced versions for current package version. + type: array + items: + type: object + required: + - refId + - version + - type + properties: + refId: + description: Referenced package Id. + type: string + example: "QS.CloudQSS.CPQ.CORE" + version: + description: Referenced package version number. + type: string + example: "2022.3@5" + parentRefId: + description: Referenced parent package Id. + type: string + example: "QS.CloudQSS.CPQ.CORE" + parentVersion: + description: | + Referenced parent version. + The mask @ may be used to link with a specific revision. + If the @revision is not provided, the latest version's revision will be used. + type: string + example: "2022.2@4" + excluded: + description: All excluded refs will be ignored (but will still be visible for package version). + type: boolean + documents.json: + type: object + description: List of documents data. + properties: + documents: + type: array + items: + description: List of builded files. + type: object + required: + - fileId + - slug + - title + - format + - type + - operations + properties: + fileId: + type: string + description: File name with folders. + example: "docs/qitmf-v5.11.json" + fileName: + type: string + description: File name (slug+extension). + example: "qitmf-v5.11.json" + slug: + description: Published file slug. + type: string + pattern: "^[a-z0-9-]" + example: "qitmf-v5-11-json" + type: + $ref: "#/components/schemas/SpecificationType" + format: + $ref: "#/components/schemas/DocumentFormat" + title: + description: Document title and summary. + type: string + description: + description: Document description. + type: string + version: + description: Document version. For openapi document, version is taken from info object. + type: string + example: "1.0.1" + operationIds: + description: List of operation IDs. + type: array + items: + type: string + example: ["get-quoteManagement-v5-quote"] + metadata: + description: Meta parameters from config file. + type: object + properties: + info: + description: Info object from openapi document + type: object + externalDocs: + description: External documentation object from openapi document + type: object + labels: + description: List of file labels. + type: array + items: + type: string + example: ["TMF"] + blobId: + description: Git blob ID of the file. + type: string + example: a5d84af7 + tags: + type: object + properties: + name: + type: string + description: + type: string + externalDocs: + type: object + properties: + description: + type: string + url: + type: string + comparisons.json: + type: object + properties: + comparisons: + type: array + items: + type: object + required: + - operationTypes + properties: + comparisonFileId: + type: string + description: Pointer to the file with changes. Optional, required only if package contains documents. Not required in case of refs only (dashboard). + example: "comparisonFile1.json" + packageId: + description: Package unique string identifier (full alias). + type: string + example: "QS.CloudQSS.CPQ.Q-TMF" + version: + description: Version name for publication. + type: string + example: "2022.3" + revision: + type: number + previousVersion: + description: | + Name of the previous published version. + type: string + example: "2022.2" + default: "" + previousVersionPackageId: + description: Previous release version package id. + type: string + example: "QS.CloudQSS.CPQ.Q-TMF" + previousVersionRevision: + type: number + operationTypes: + type: array + items: + type: object + properties: + apiType: + type: string + changesSummary: + $ref: "#/components/schemas/ChangeSummary" + numberOfImpactedOperations: + allOf: + - $ref: '#/components/schemas/ChangeSummary' + - type: object + description: Number of operations impacted by each severety type. + apiAudienceTransitions: + type: array + description: Shows transitions of operations' apiAudience value (compared to the previous release version) and number of operations in which this transition occurred. The array contains only records of transitions that actually occurred in operations. + items: + type: object + properties: + currentAudience: + type: string + description: Current apiAudience value (currentAudience must not be equal to previousAudience) + enum: + - internal + - external + - unknown + previousAudience: + type: string + description: Previous apiAudience value + enum: + - internal + - external + - unknown + operationsCount: + type: number + description: Number of operations in which the apiAudience was changed from previousAudience to currentAudience + tags: + type: array + items: + type: string + example: + ["a", "b"] + fromCache: + description: True if the changes were retrieved from BE (not calculated) + type: boolean + comparisons/{comparisonFileId}: + type: object + description: List of changes data. + properties: + operations: + description: Collection of changes depend on apiType. + type: array + items: + type: object + required: + - operationId + - changes + - changesSummary + properties: + operationId: + description: >- + Operation unique identifier (slug). Not the same as + operationId tag from the OpenAPI file. + type: string + example: get-quoteManagement-v5-quote + dataHash: + description: Operation hash. + type: string + example: sdfsdfsf242 + previousDataHash: + description: Hash of the previous published version operation. + type: string + example: sdfsdfsf24dds2 + changeSummary: + $ref: "#/components/schemas/ChangeSummary" + changes: + description: Collection of changes for each operation. + type: array + items: + $ref: "#/components/schemas/SingleOperationChange" + operations.json: + description: List of operations for publish. + type: object + properties: + operations: + type: array + items: + allOf: + - $ref: "#/components/schemas/Operation" + - type: object + required: + - data + - metadata + - searchScopes + properties: + models: + description: Map of operation models + type: object + additionalProperties: + type: string + example: + Template: a6b1596abe745a4a0cd3de25a411dbe671a3ba7c + ErrorResponse: cf1d87cf3895af106a3ad29f2098dcc2cc477e71 + CreateItemDto: 72bfdc14758213e5c09620b29054da86b75d4868 + deprecatedInfo: + description: | + Additional information about deprecated operation: + * for REST API it is value of 'x-deprecated-meta' extension (value of extension must be string), which is defined for deprecated operation. + * for GraphQL API it is value of 'reason' argument of @deprecated directive, which is defined for deprecated operation. + type: string + deprecatedInPreviousVersions: + description: List of previous release versions where operation was also deprecated + type: array + items: + type: string + example: ["2022.2", "2022,1", "2021.4"] + deprecatedItems: + $ref: "#/components/schemas/DeprecatedItems" + metadata: + description: Metadata content depend on apiType. + oneOf: + - $ref: "#/components/schemas/RestOperationMeta" + - $ref: "#/components/schemas/GraphQLOperationMeta" + - $ref: "#/components/schemas/ProtobufOperationMeta" + searchScopes: + description: Scopes for search indexes. + type: object + additionalProperties: + type: string + example: + all: "Quote Snapshot V6 Performs full text search ofQuotes Statistics By Phrase fullTextSearchStatistics searchPhrase offset int32 limit int32 fields 200 OK application/json quote anonymousCustomerData assignTo The name of the user/team to whom the Quote is assigned cancellationReason The entity that represents common information about a customer's request cancellationReasonDescription The reason (from the free text field) why the Quote has been cancelled contactMedium A list of contact mediums (ContactMedium [*]). Indicates the contact medium that could be used to contact the party deliveryMethod Id of the Delivery Method (from the Delivery Method list) for Quote description Description of the Quote extendedParameters additionalProperties The attribute contains additional parameters of the Quote The attribute contains additional parameters of the Quote Map> The attribute contains additional parameters of the Quote externalRefs Represents a reference to an external object id Unique identifier of the Quote name Quote name quoteDate Date and time when the Quote was created date-time quoteTotalPrice Represents Quote total price relatedParty A list of related party references (RelatedPartyRef [*]).A related party defines party or party role linked to a specific entity relatedQuote A list of related quotes (RelatedQuote [*]). It represents another revision of the in-flight Quote from the same chain, i.e. in-flight Quote with the same initialBaselineQuoteId value. This class is returned in the API response only in case of enhancedListQuotes API operation is called. It can be empty if there are no other in-flight Quotes in this chain revision Attribute is used to mark in-flight Quote. Increased in case changes are done to the Quote after submission int32 state State of the Quote updatedWhen Date and time when the Quote was updated date-time validFor Quote with reduced scope of attributes quoteItemStatistic Shows the number of Quote's root Quote Items grouped by some criteria(e.g. by Product Offering, Action) action Numbers of root Quote Items grouped by Action count Number of Quote Items for the specific value int32 name Action name Shows the number of Customer's Product Instances for specified criteria (e.g. Product Family, Billing Account and so on) productFamilyId Unique identifier of Product Family productFamilyName Product family name. Name is returned in the default locale. If alternative locale is passed in Accept-language header in the API request the system returns corresponding localized name in the response. productOfferingId Unique identifier of Product Offering productOfferingName Product offering name. Attribute is returned in the response for catalog entities only (product offering). Name is returned in the default locale. If alternative locale is passed in Accept-language header in the API request the system returns corresponding localized name in the response Shows the number of Quote's root Quote Items grouped by some criteria(e.g. by Product Offering, Action) Shows the Quote with a number of root Quote Items 4XX default response application/json;charset=UTF-8;model=errorResponse;version=1 5XX default response application/json;charset=UTF-8;model=errorResponse;version=1 " + SingleOperationChange: + allOf: + - type: object + description: Discrepancy data in a single operation. + properties: + description: + description: >- + Human-readable description of point of change. + type: string + example: "[Added] Property: summary." + severity: + $ref: "#/components/schemas/ChangeSeverity" + scope: + type: string + description: | + Part of operation (like request/response) where change was made. Scope differs for apiTypes.\ + Scope is needed to correctly identify severity of change, because the same change can have different severity in request/response. + action: + description: Action, what was done with the endpoint. + type: string + enum: + - add + - remove + - replace + - rename + - oneOf: + - $ref: "#/components/schemas/ChangeAdd" + - $ref: "#/components/schemas/ChangeRemove" + - $ref: "#/components/schemas/ChangeReplace" + - $ref: "#/components/schemas/ChangeRename" + ChangeAdd: + type: object + description: Data of single operation change when change action = add + properties: + currentDeclarationJsonPaths: + description: | + (below, a document is not considered to be an original document, but a document with one specific operation)\ + When comparing two documents (origin and changed; difference from changed document apended to the original document), a declarative jsonPath is calculated for each change. + type: array + items: + type: array + items: + anyOf: + - type: string + - type: integer + example: [["components/schemas/Cat/minProperties", "components/schemas/Dog/minProperties"],["/quoteManagement/v5/quote"]] + currentValueHash: + type: string + description: | + Hash of the added entity.\ + Hash is needed to identify that the same enitity was changed in other operations, that allows calculating declarative number of changes in package version. + ChangeRemove: + type: object + description: Data of single operation change when change action = remove. + properties: + previousDeclarationJsonPaths: + description: | + (below, a document is not considered to be an original document, but a document with one specific operation)\ + When comparing two documents (origin and changed; difference from changed document apended to the original document), a declarative jsonPath is calculated for each change. + type: array + items: + type: array + items: + anyOf: + - type: string + - type: integer + example: [["components/schemas/Cat/minProperties", "components/schemas/Dog/minProperties"],["/quoteManagement/v5/quote"]] + previousValueHash: + type: string + description: | + Hash of the removed entity.\ + Hash is needed to identify that the same enitity was changed in other operations, that allows calculating declarative number of changes in package version. + ChangeReplace: + type: object + description: Data of single operation change when change action = replace + properties: + currentDeclarationJsonPaths: + description: | + (below, a document is not considered to be an original document, but a document with one specific operation)\ + When comparing two documents (origin and changed; difference from changed document apended to the original document), a declarative jsonPath is calculated for each change. + type: array + items: + type: array + items: + anyOf: + - type: string + - type: integer + example: [["components/schemas/Cat/minProperties", "components/schemas/Dog/minProperties"],["/quoteManagement/v5/quote"]] + previousDeclarationJsonPaths: + description: | + (below, a document is not considered to be an original document, but a document with one specific operation)\ + When comparing two documents (origin and changed; difference from changed document apended to the original document), a declarative jsonPath is calculated for each change. + type: array + items: + type: array + items: + anyOf: + - type: string + - type: integer + example: [["components/schemas/Cat/minProperties", "components/schemas/Dog/minProperties"],["/quoteManagement/v5/quote"]] + previousValueHash: + type: string + description: | + Previous hash of the changed entity.\ + Hash is needed to identify that the same enitity was changed in other operations, that allows calculating declarative number of changes in package version. + currentValueHash: + type: string + description: | + Current hash of the changed entity.\ + Hash is needed to identify that the same enitity was changed in other operations, that allows calculating declarative number of changes in package version. + ChangeRename: + type: object + description: Data of single operation change when change action = rename. + properties: + currentDeclarationJsonPaths: + description: | + (below, a document is not considered to be an original document, but a document with one specific operation)\ + When comparing two documents (origin and changed; difference from changed document apended to the original document), a declarative jsonPath is calculated for each change. + type: array + items: + type: array + items: + anyOf: + - type: string + - type: integer + example: [["components/schemas/Cat/minProperties", "components/schemas/Dog/minProperties"],["/quoteManagement/v5/quote"]] + previousDeclarationJsonPaths: + description: | + (below, a document is not considered to be an original document, but a document with one specific operation)\ + When comparing two documents (origin and changed; difference from changed document apended to the original document), a declarative jsonPath is calculated for each change. + type: array + items: + type: array + items: + anyOf: + - type: string + - type: integer + example: [["components/schemas/Cat/minProperties", "components/schemas/Dog/minProperties"],["/quoteManagement/v5/quote"]] + previousKey: + type: string + description: Previous key (name) of the renamed entity. + currentKey: + type: string + description: Current key (name) of the renamed entity. + BuildConfig: + type: object + description: | + Configuration of the source files. + Content can not be empty, files or refs are **required**. + required: + - version + - status + properties: + version: + description: Version name for publication. + type: string + example: "2022.3" + previousVersion: + description: | + Name of the previous published version. + The mask @ is required for 'changelog' buildType. + type: string + example: "2022.2" + default: "" + previousVersionPackageId: + description: | + Required in case of snapshot when publishing version should be compared to different package. + Do not set anything if the package sould be compared to itself. Value equals to packageId is forbidden. + type: string + example: "QS.CloudOSS.PL.MC" + status: + $ref: "#/components/schemas/VersionStatusEnum" + groupName: + description: | + Operation group name. groupName is required if buildType = documentGroup. + type: string + example: v1 + apiType: + $ref: "#/components/schemas/ApiType" + buildType: + description: | + Type of the build process. Available options are: + + **build** - Standard build process to publish new version. + Consist of contract and operations build and validation, calculation of the changelog, creation of the final version of the published contracts. + + **changelog** - Only the changelog calculation, no API contracts version will be created. + The ```files``` and ```refs``` objects are not required in this case. + + **documentGroup** - Process to transform documents so that they will contain operations only from specific operations group. + type: string + enum: + - build + - changelog + - documentGroup + default: build + metadata: + description: Common publish metadata. + type: object + properties: + commitId: + description: Last Git commit ID of the version. + type: string + example: a5d45af7 + repositoryUrl: + description: Url of the Git repository. + type: string + format: URI + example: "https:///apihub-registry" + versionLabels: + description: List of version labels. Label is a string. + type: array + items: + type: string + example: ["part-of:CloudQSS-CPQBE"] + cloudName: + description: Name of the cloud for publication from Agent. + type: string + example: "k8s-apps2" + cloudUrl: + description: Full address of the cloud from Agent. + type: string + format: URI + example: "https://k8s-apps2.k8s.sdntest.qubership.org" + namespace: + description: Namespace of Agent's publication. + type: string + example: "cloudQSS-release2" + externalMetadata: + description: External build metadata + type: object + properties: + operations: + type: array + items: + type: object + properties: + apiType: + type: string + enum: + - rest + method: + type: string + description: HTTP method + path: + type: string + description: HTTP path + externalMetadata: + description: External operation metadata + type: object + files: + description: | + Detailed data about files in sources archive. + Required if no Refs are provided. + type: array + items: + type: object + required: + - fileId + properties: + fileId: + type: string + description: File name. + example: "qitmf-v5.11.json" + publish: + description: | + Flag, publish the source file or not. + Required for case with external refs when specification is separated into multiple files. + After dereference in scope of build process the source files(parts) are no longer required. So it's possible to skip publish of such files. + type: boolean + default: true + labels: + description: List of file labels. Label is a string. + type: array + items: + type: string + example: ["TMF"] + blobId: + description: Git blob ID of the file. + type: string + example: a5d45af7 + xApiKind: + description: Custom x-api-kind parameter for the file. Specify if this API is backward compatible. + type: string + example: "no-BWC" + refs: + description: | + Detailed data about referenced versions for current package version. + Required if no Files provided. + type: array + items: + type: object + required: + - refId + - version + - type + properties: + refId: + description: Referenced package Id. I.e. link to another package. + type: string + example: "QS.CloudQSS.CPQ.CORE" + version: + description: Referenced package version. I.e. link to another package's version. + The mask @ may be used to link with a specific revision. + If the @revision is not provided, the latest version's revision will be used. + type: string + example: "2022.3@5" + parentRefId: + description: Required to build reference(dependencies) graph. Allows to specify the parent node package id in the graph. + type: string + example: "QS.CloudQSS.CPQ.CORE" + parentVersion: + description: | + Required to build reference(dependencies) graph. Allows to specify the parent node version in the graph. + The mask @ may be used to link with a specific revision. + If the @revision is not provided, the latest version's revision will be used. + type: string + example: "2022.2@4" + excluded: + description: | + Required for conflict resolution case when different versions of the same package appear in the publication config. + All excluded refs will be ignored (but will still be visible for package version). + type: boolean + ChangeSummary: + description: | + Numbers of changes between the current and previous published version. + type: object + properties: + breaking: + description: Number of changes, breaking the backward compatibility. + type: integer + default: 0 + semi-breaking: + description: Number of changes, breaking the backward compatibility in a legal way. For example, deleting correctly deprecated endpoint. + type: integer + default: 0 + deprecated: + description: Number of deprecated endpoints. + type: integer + default: 0 + non-breaking: + description: Number of non-breaking changes. + type: integer + default: 0 + annotation: + description: Number of annotation changes. + type: integer + default: 0 + unclassified: + description: Number of unclassified changes. + type: integer + default: 0 + ChangeSummaryByApiType: + description: | + Numbers of changes between the current and previous published version by api type. + type: object + additionalProperties: + type: object + properties: + breaking: + description: Number changes, breaking the backward compatibility. + type: integer + default: 0 + semi-breaking: + description: Number of changes, breaking the backward compatibility in a legal way. + type: integer + default: 0 + deprecated: + description: Number of deprecated endpoints. + type: integer + default: 0 + non-breaking: + description: Number of non-breaking changes. + type: integer + default: 0 + annotation: + description: Number of annotation changes. + type: integer + default: 0 + unclassified: + description: Number of unclassified changes. + type: integer + default: 0 + example: + rest: + breaking: 15 + semi-breaking: 3 + deprecated: 2 + non-breaking: 45 + annotation: 233 + unclassified: 0 + graphql: + breaking: 0 + semi-breaking: 0 + deprecated: 0 + non-breaking: 5 + annotation: 20 + unclassified: 0 + OperationSummary: + description: Numbers of operations of each type. + type: object + properties: + endpoints: + description: Overall number of operations. + type: integer + default: 0 + deprecated: + description: Number of newly deprecated operations (comparing with the previous version). + type: integer + default: 0 + created: + description: Number of newly created operations (comparing with the previous version). + type: integer + default: 0 + deleted: + description: Number of deleted operations (comparing with the previous version). + type: integer + default: 0 + CreateOperationGroup: + description: Version group. + type: object + required: + - groupName + - apiType + - isPrefixGroup + properties: + groupName: + type: string + description: Unique group name. + example: New_operation_group + apiType: + $ref: "#/components/schemas/ApiType" + description: + type: string + description: Description of group. + isPrefixGroup: + type: boolean + description: true - if the group created automatically via restGroupingPrefix. + example: false + exportTemplateFileName: + type: string + description: The name of the export template file, if there is one. + example: template123.json + BuildTypes: + type: string + description: | + Type of the build process. Available options are:\ + **build** - Standard build process to publish new version. Consist of contract and operations build and validation, calculation of the changelog, creation of the final version of the published contracts.\ + **changelog** - Only the changelog calculation, no API contracts version will be created. The ```files``` and ```refs``` objects are not required in this case.\ + **documentGroup** - Process to transform documents so that they will contain operations only from specific operations group. + **reducedSourceSpecifications** - Process to take operations from the operation group and ex + **mergedSpecification** + enum: + - build + - changelog + - reducedSourceSpecifications + - mergedSpecification + BuildErrors: + type: object + properties: + errors: + type: string + description: Build error message. + ChangeSeverity: + description: Severity of the particular change. + type: string + enum: + - breaking + - semi-breaking + - deprecated + - non-breaking + - annotation + - unclassified + PackageVersionRevision: + description: Version revision parameters. + type: object + title: PackageVersionRevision + required: + - version + - revision + - status + - createdAt + - createdBy + properties: + version: + description: Package version name. The @ mask is used to return the revision number. + type: string + example: "2023.1@5" + revision: + description: Number of the revision. + type: integer + format: int32 + example: 3 + status: + $ref: "#/components/schemas/VersionStatusEnum" + createdBy: + $ref: "#/components/schemas/Principal" + createdAt: + type: string + description: Date of revision creation. + format: datetime + notLatestRevision: + type: boolean + default: false + revisionLabels: + description: List of revision labels. + type: array + items: + type: string + example: [ "part-of:CloudQSS-CPQBE" ] + publishMeta: + additionalProperties: true + description: Publish metadata. + type: object + properties: + commitId: + description: Last Git commit ID of the version. + type: string + example: a5d45af7 + repositoryUrl: + description: Url of the Git repository. + type: string + format: URI + example: https:///apihub-registry + cloudName: + description: Name of the cloud for publication from Agent. + type: string + example: k8s-apps2 + cloudUrl: + description: Full address of the cloud from Agent. + type: string + format: URI + example: https://k8s-apps2.k8s.sdntest.qubership.org + namespace: + description: Namespace of Agent's publication. + type: string + example: cloudQSS-release2 + examples: + Package: + description: Example of the package params + value: + packageId: "QS.QSS.PRG.APIHUB" + parentId: "QS.QSS.PRG" + kind: "package" + name: "Test package" + alias: "APIHUB" + description: "Package for the test purpose" + isFavorite: false + serviceName: "apihub-be" + defaultRole: Viewer + PackageCreate: + description: Example of the package creation params + value: + parentId: "QS.QSS.PRG" + kind: "package" + name: "Test package" + alias: "APIHUB" + description: "Package for the test purpose" + serviceName: "apihub-be" + PackageNotFound: + description: Package not found by ID. Response for the 404 error + value: + status: 404 + code: "APIHUB-3020" + message: "package with packageId = $packageId not found" + VersionNotFound: + description: Version not found by number. Response for the 404 error + value: + status: 404 + code: "APIHUB-3050" + message: "Published version $version not found" + FileNotFound: + description: File not found by slug. Response for the 404 error + value: + status: 404 + code: "APIHUB-3043" + message: "File for path $fileId not found" + IncorrectInputParameters: + description: Incorrect input parameters + value: + status: 400 + code: "APIHUB-COMMON-4001" + message: "Incorrect input parameters" + InternalServerError: + description: Default internal server error + value: + status: 500 + code: "APIHUB-8000" + reason: "InternalServerError" + message: "InternalServerError" + securitySchemes: + BearerAuth: + type: http + description: Bearer token authentication. Default security scheme for API usage. + scheme: bearer + bearerFormat: JWT + api-key: + type: apiKey + description: Api-key authentication. + name: api-key + in: header + BasicAuth: + type: http + description: Login/password authentication. + scheme: basic diff --git a/docs/api/apihub-ws.yaml b/docs/api/apihub-ws.yaml new file mode 100644 index 0000000..0f37ec1 --- /dev/null +++ b/docs/api/apihub-ws.yaml @@ -0,0 +1,289 @@ +asyncapi: 2.4.0 +info: + title: APIHUB WS specification + version: 0.1.0 + description: APIHUB websocket API specification. Is used for the Editor UI connection. + license: + name: NC + url: https://www.qubership.org +externalDocs: + url: >- + url: https://qubership.org/APIHUB +servers: + prod: + url: wss://apihub.qubership.org/ws/v1 + protocol: wss + description: Main server + security: + - token: [] +defaultContentType: application/json +channels: + /projects/{projectId}/branches/{branchId}: + parameters: + projectId: + description: Project id to connect + schema: + type: string + branchId: + description: Branch identifier + schema: + type: string + bindings: + ws: + bindingVersion: 0.1.0 + method: GET + query: + type: object + description: Query parameters for WS connection + properties: + token: + type: string + description: Security token + subscribe: + operationId: BranchEditingSendMessage + description: Messages, sended by server for the branch editing + message: + oneOf: + - $ref: "#/components/messages/OnConnected" + - $ref: "#/components/messages/OnDisconnected" + - $ref: "#/components/messages/OnFilesUpdated" + - $ref: "#/components/messages/OnRefsUpdated" + - $ref: "#/components/messages/OnBranchSaved" + - $ref: "#/components/messages/OnVersionPublished" + /projects/{projectId}/branches/{branchId}/files/{fileId}: + parameters: + projectId: + description: Project id to connect + schema: + type: string + branchId: + description: Branch identifier + schema: + type: string + fileId: + description: File identifier + schema: + type: string + bindings: + ws: + bindingVersion: 0.1.0 + method: GET + query: + type: object + description: Query parameters for WS connection + properties: + token: + type: string + description: Security token + subscribe: + operationId: FileEditingSendMessages + description: Messages, sended by server for the file editing +components: + messages: + OnConnected: + messageId: OnConnected + name: OnConnected message + payload: + $ref: "#/components/schemas/onConnectedPayload" + OnDisconnected: + messageId: OnDisconnected + name: OnDisconnected message + payload: + $ref: "#/components/schemas/OnDisconnectedPayload" + OnFilesUpdated: + messageId: OnFilesUpdated + name: OnFilesUpdated message. + payload: + $ref: "#/components/schemas/OnFilesUpdatedPayload" + OnRefsUpdated: + messageId: OnRefsUpdated + name: OnRefsUpdated message. + payload: + $ref: "#/components/schemas/OnRefsUpdatedPayload" + OnBranchSaved: + messageId: OnBranchSaved + name: OnBranchSaved message + payload: + $ref: "#/components/schemas/OnBranchSavedPayload" + OnVersionPublished: + messageId: OnVersionPublished + name: OnVersionPublished message + payload: + $ref: "#/components/schemas/OnVersionPublishedPayload" + schemas: + onConnectedPayload: + type: object + properties: + type: + type: string + description: Type of the connection + sessionId: + type: string + connectedAt: + type: string + user: + $ref: "#/components/schemas/User" + OnDisconnectedPayload: + type: object + properties: + type: + type: string + sessionId: + type: string + userId: + type: string + User: + type: object + description: Base user info + properties: + id: + type: string + name: + type: string + avatarUrl: + type: string + OnUpdatedPayload_base: + description: Base params for files and refs update. + type: object + required: + - action + properties: + type: + type: string + userId: + type: string + action: + type: string + enum: + - add + - remove + - modify + - replace + OnFilesUpdatedPayload: + description: Polymorphic message for files update + type: object + oneOf: + - $ref: "#/components/schemas/OnFilesUpdatedPayload_add" + - $ref: "#/components/schemas/OnFilesUpdatedPayload_remove" + - $ref: "#/components/schemas/OnFilesUpdatedPayload_modify" + - $ref: "#/components/schemas/OnFilesUpdatedPayload_replace" + discriminator: action + OnFilesUpdatedPayload_add: + description: OnFilesUpdated message. Action:add + allOf: + - $ref: "#/components/schemas/OnUpdatedPayload_base" + - properties: + data: + $ref: "#/components/schemas/FileData" + OnFilesUpdatedPayload_remove: + description: OnFilesUpdated message. Action:remove + allOf: + - $ref: "#/components/schemas/OnUpdatedPayload_base" + - properties: + fileId: + type: string + OnFilesUpdatedPayload_modify: + description: OnFilesUpdated message. Action:modify + allOf: + - $ref: "#/components/schemas/OnUpdatedPayload_base" + - properties: + fileId: + type: string + OnFilesUpdatedPayload_replace: + description: OnFilesUpdated message. Action:replace + allOf: + - $ref: "#/components/schemas/OnUpdatedPayload_base" + - properties: + fileId: + type: string + data: + $ref: "#/components/schemas/FileData" + FileData: + type: object + description: File parameters for add/update. + properties: + fileId: + type: string + OnRefsUpdatedPayload: + description: Polymorphic message for refs update + type: object + oneOf: + - $ref: "#/components/schemas/OnRefsUpdatedPayload_add" + - $ref: "#/components/schemas/OnRefsUpdatedPayload_remove" + - $ref: "#/components/schemas/OnRefsUpdatedPayload_replace" + discriminator: action + OnRefsUpdatedPayload_add: + description: OnRefsUpdated message. Action:add + allOf: + - $ref: "#/components/schemas/OnUpdatedPayload_base" + - properties: + data: + $ref: "#/components/schemas/RefData_add" + OnRefsUpdatedPayload_remove: + description: OnRefsUpdated message. Action:remove + allOf: + - $ref: "#/components/schemas/OnUpdatedPayload_base" + - properties: + refId: + type: string + version: + type: string + OnRefsUpdatedPayload_replace: + description: OnRefsUpdated message. Action:replace + allOf: + - $ref: "#/components/schemas/OnUpdatedPayload_base" + - properties: + refId: + type: string + version: + type: string + data: + $ref: "#/components/schemas/RefData_replace" + RefData_add: + type: object + description: Data for ref updates. Action:add + properties: + refId: + type: string + version: + type: string + name: + type: string + status: + type: string + RefData_replace: + type: object + description: Data for ref updates. Action:replace + properties: + version: + type: string + status: + type: string + OnBranchSavedPayload: + description: All connected users should get "branch:saved" message on branch save. + type: object + properties: + type: + type: string + userId: + type: string + comment: + type: string + OnVersionPublishedPayload: + description: All connected users should get "branch:saved" message on branch publish. + type: object + properties: + type: + type: string + userId: + type: string + version: + type: string + status: + type: string + securitySchemes: + token: + type: httpApiKey + name: token + description: Provide the authentication token + in: query diff --git a/docs/build.md b/docs/build.md new file mode 100644 index 0000000..1bdf607 --- /dev/null +++ b/docs/build.md @@ -0,0 +1,113 @@ +# Build(task) model +Apihub implement pull model for build processing. +I.e. BE have a queue of builds and have an interface to take the build. +In this schema BE is not aware of builders +It allows to scale builders(browser, node services) horizontally without changes in BE configuration. + +# build config +Biold config is a metadata set for an object that will be created during the build. + +Example: +``` +{ + "files": [ + { + "slug": "", + "index": 0, + "fileId": "swagger.json", + "labels": [ + ], + "publish": true + } + ], + "status": "draft", + "version": "2024.3", + "metadata": { + }, + "buildType": "build", + "createdBy": "username", + "packageId": "username.g1.p1", + "publishId": "", + "publishedAt": "0001-01-01T00:00:00Z", + "previousVersion": "2024.1", + "previousVersionPackageId": "" +} +``` + +"publish" for file defines is the file should present in result(builder output) + +"publishId" is generated by Apihub BE when the build is accepted and stored to DB + +"buildType" defines the logic of build process +options: +* "build" - regular version publish +* "changelog" - changelog only +* "documentGroup" - export +* "reducedSourceSpecifications" - export +* "mergedSpecification" - export + + +# Stages for buildType = build (in builder lib) +0) detect file type (swagger 2.0, openapi 3.0+, graphql, protobuf) +0.1) convert swagger 2.0 to openapi 3.0 +1) bundling (resolve ext refs) +2) make operations from specs and make search scopes (set of words in a file excluding specification terms(e.x. description, summery, parameters, etc)) +3) make comparison if previous version is defined + +# result + +![build result example](build_result_example.png) + +* "info.json" - main file which contains build(future version) parameters and metadata. +* documents.json - list of documents with metadata +* operations.json - list of operatrions with metadata +* comparisons.json - list of comparisons with metadata +* notifications.json - warnings from builder + +# build sequences +## Success cases +### build type = build & client_build=false: +publish request -> validation -> save build entity and sources(tables build and build_src ) -> build is in queue (status==none) -> +build is taken by node service *and bound to builder id* (status=running) -> the build is processed on node service (node service sends keepalives(set build status=running) while the build is running) -> node service sends build status = success with build result data -> builder id is validated -> build result is stored in DB (table build_result) or in minio -> build result is validated -> build result is stored in DB via single transaction -> search indexes calculated asynchronously(somehow) + +### build type = build & client_build=true: +publish request (with client_build=true and builderId!=null) -> validation -> save build entity and sources(tables build and build_src ) -> the build is bound to the builderId from request and status = running -> buildId is returned to the client -> the build is processed on client (client sends keepalives(set build status=running) while the build is running)-> client sends build status = success with build result data -> builder id is validated -> build result is stored in DB (table build_result) or in minio -> build result is validated -> build result is stored in DB via single transaction -> search indexes calculated asynchronously(somehow) + +### build type = changelog +almost the same, but only comparisons generated and stored + +### build type = documentGroup \ reducedSourceSpecifications \ mergedSpecification +TODO + + +## Error cases: +### builder is restarted (state is lost) +For example due to OOM killed or browser(tab) is closed. + +start -> processing -> keepalives stop going -> BE detects that the build is no longer processed by the following criterea: "now - last_active > {timeout}" -> +the build is considered as free and goes to the queue again -> when node service is ready to take the build, restart_count is checked. If restart_count >= 2 then the build is failed with message "Restart count exceeded limit" -> if restart is available, build is taken by node service *and bound to builder id* (status=running) **and restart_count is incremented** -> build as usual + +### builder internal error (handled) +Internal error(exception) happens in builder logic and it's caught during the processing. + +Builder sets status=error and error details. +New version/changelog/export is not created. +Error build is not restarted. + +### problem with source documents +The error in source documents is detected by builder. +Builder sets status=error and details regarding the problem. +New version/changelog/export is not created. +Error build is not restarted. + +# build statuses +none - build is not strarted +running - build is started +complete - successfully completed +error - completed with error + +# DB +Related tables: +* build - list of build +* build_src - config and sources archive(zip) +* build_result or minio (depends on configuration) - result archive(zip) diff --git a/docs/development_guide.md b/docs/development_guide.md new file mode 100644 index 0000000..2a7a237 --- /dev/null +++ b/docs/development_guide.md @@ -0,0 +1,145 @@ +This is a draft version of the backend development guide, to be updated. + +# API +## Design +Backend API development follows API-first approach. +It means that we need to design and approve API before the implementation. +REST API is an openapi v3.0 document(see /doc/api folder). + +## Changes +We have an agreement that we should not introduce any breaking changes to public API. +There's one small exception: if we know that the the only user is frontend and we can submit changes simultaneously with FE team. + +In case of breaking changes we need to create new version of endpoint + +e.x. "/api/v2/packages/{packageId}/versions/{version}/references" -> "/api/v3/packages/{packageId}/versions/{version}/references" + +Then, old endpoint should be marked as deprecated in specification and code. +See "Code Deprecation policy" chapter for details + +Non-breaking(additive) changes could be made without any synchronization with frontend. + +## API review +Api changes should be made is a separate branch. + +API should be published to Apihub for review and linked to the BA, BE and FE ticket's description. + +Since we treat API as a code we should apply the same review procedure. I.e. create merge request(which contains ticket name) and ask(BA, BE and FE teams) for review. MR with api changes should contain gitlab label 'API'. + +Need to make sure that the API was reviewed(confirm with BE, FE teams) and all comments resolved, then add label api_approved to the ticket. + +BA ticket branch should be merged to BE ticket branch before merging to develop. + +# Code +## Projects structure +Most the feature code is separated into structural folders - controllers, services, repositories, views, entities. +But some outstanding/common functionality like DB migration is placed in functional folders. + +TODO: need an example how the code is distributed for some business entity. + +## Parameters in function: +Required parameters should be passed as as separate params, optional params could be passed via struct. + +e.x. `getOperation(packageId string, version string, operationId string, searchReq view.SearchRequest)` + +`packageId` - required +`version` - required +`operationId` - required +`searchReq` is a struct with optional params like paging, etc + +## Constants naming convention +TODO + +## Deprecation policy +deprecated methods/functions should be appended with '\_deprecated' postfix. +Use case: +New version of endpoint is required to avoid API breaking changes(e.x. payload fields was updated). +Need to rename existing controller/service/view to \*\_deprecated and add new one with proper name. + +## Logging +The code should contains logs for troubleshooting. +INFO System log shouldn't be flooded with repeated/useless data, but should contain major operations and event. +So the INFO log should reflect what is going on, but without much details. +We don't need INFO log for each read(GET) request, but all write(POST, PUT) operations should be logged. + +### Errors +All errors should be logged to ERROR log. + +### Async operations +Async operation start should be logged with info log containing request data and generated id. +Example: +* log.Infof("Starting Quality Gate validation v3 with params %+v, id = %s", req, id) + +All async operation logs should include operation id as a prefix. +Example: +* log.Errorf("Quality Gate %s: Failed to search package %s versions by textFilter: %s", report.Id, packageId, err.Error()) + +It's recommended to log all major steps in INFO and some minor with DEBUG. + +Async operation end should be logged with INFO log. +Example: +* log.Infof("Operations migration process %s complete", id) + +# Pull requests +## Title +PR title should contain issue id, otherwise it couldn't be merged by rules. +The title should be in the following format: `ISSUE-ID Changes short summary` + + +## Merge options +It's recommended to delete dev branch after merge and squash commits to get clean develop/main branches history. + +So both checkboxes should be checked: +* Delete source branch when merge request is accepted. +* **Squash commits when merge request is accepted.** + +## Code review +Everyone is welcome to the code reviews. + +### Reviewer's checklist: +* Changes cover all requirements of the story. +* Changes are syntaxically and logically correct. +* Changes in code mathes changes in API if applicable. +* No commented code with some rare and justified exceptions. +* Error handling should be implemented. +* New code should contain necessary logs(see Logging chapter). + +# Demo +At this moment we run demo every week. Every developer should prepare ready and in-progress(if's suitable and makes sense) stories for demonstration. +The goal of demonstration is to collect the whole team's feedback. +## Introduction +Demo listeners need to understand the context of presented story. +What is the business goal(epic) and how it's transformed into implemented changes. +Or answer the question: "why the change is required, which purpose it serves?". +It's going to be great if you'll describe the background of the problem. You may ask dev lead or BA team for details if required. +## Demonstration +Live process demonstration is preferred. But if it's not possible, you can show the final result or a set of screenshots/presentation. +## Questions +Try to answer all questions. + +# Business logic +## Business terms glossary + +specification - file which contains openapi/graphql/etc content + +build - processing specification which includes the following stages: dereference, comparison with previous version (backward compatibility analysis), generation of search index. Performed via builder library on frontend or node service. + +project - a part of editor service implementation, local representation of a remote git repository(integration). + +draft - a part of editor service implementation, local copy of a remote git repository that holds user changes until they're committed and pushed to git or discarded. Created automatically when user opens editor and connects the some git branch. + +workspace - is a first-level group. Provide a logical separation for different projects, teams, or departments within the organization. Grouping of related APIs and provide a hierarchical structure for better organization and management. + +group - is an entity that allows you to logically group packages in a hierarchical view. Within a workspace, groups help further categorize APIs based on functional domains or specific areas of focus. Groups provide a flexible way to organize APIs and make them easily discoverable within the API Management portal. + +package - is an entity that contains published API documents related to specific Service/Application. Packages are versioned, and package version can be in one of the following statuses: Draft, Release and Archived. + +dashboard - is a virtual package which can contain only links to another packages/dashboards. Dashboard cannot contain its own documents. Dashboards serve as a higher level of abstraction and can group APIs from complex applications that consist of multiple services. + +reference - a link between published versions. Logically connects one version to another. + +baseline package - a package that contains release versions and compared to snapshot. Used in Agent. + +BWC - BackWard Compatibility / BackWard Compatible, an API that is supposed to be backward compatible, i.e. contain no breaking changes between releases. + +TODO: append diff --git a/docs/editor.md b/docs/editor.md new file mode 100644 index 0000000..27461b5 --- /dev/null +++ b/docs/editor.md @@ -0,0 +1,125 @@ +# Apihub editor + +## Overview +User can view git branch content, edit it, then commit and push changes. +Local state is stored on backend and called `draft`. +Apihub editor is based on Monaco editor and custom UI. + +**Rigtht now editor doesn't support pull operation as well as merge of any type.** + +## Collaborative editing +Apihub editor is designed to be collaborative, i.e. users can work together on local copy of a git branch. +Executed changes will be visible to all users connected to the same branch and file. + +### Operational transformation +Collaborative text editing is based on https://en.wikipedia.org/wiki/Operational_transformation + +# project +Editor UI have the same grouping as a portal one. +Groups are the same as in portal! I.e. it's the same entity stored in `package_group`. +But empty groups(without **projects**) are hidden from the tree view. + +Project is a relation between Apihub and Git integration. +Project is bound to exactly one specific git integration server and repository inside it. + +## project vs package +Package is a portal's entity. +Project is an editor's entity. +Package contains versions, while project the project is not. + +# Project config +## Files in project's branch +Apihub editor is not intended to work with all files in a repository, but with API specs only. +So user needs to add required files to the editor's config. **The config is generated and commited by Apihub automatically and usually requires no manual editing.** + +## Git representation +Project configs are stored in git in the top level folder `apihub-config`(name is a contract). +Each config is stored in a separate json file which is named as a project id. + +Example: `apihub-config/QS.CP.AH.RS.json` +https:///apihub-backend/-/blob/develop/apihub-config/QS.CP.AH.RS.json + +Project config contains a list of included documents and folders. + +`publish` flag indicated if the file should be included to result published version as a separate (specification) file. Typically it's set to `false` **by user** when a document contains json schema which is required to resolve external reference, but not a complete specification. + +Config example: +``` +{ + "projectId": "QS.CP.AH.RS", + "files": [ + { + "fileId": "docs/api/Public Registry API.yaml", + "publish": true + }, + { + "fileId": "docs/api/APIHUB API.yaml", + "publish": true + }, + { + "fileId": "docs/api/Admin API.yaml", + "publish": true + } + ] +} +``` + +# Branch draft +Local state for a git repo branch(only project files!) is stored in the BE DB and called "draft". +Draft is created when a user opens any branch in Apihub editor. +All required data(file content) is loaded to the draft on creation. + +If user made no changes and closed the session(websocket), the draft would be deleted. +Draft with changes would be stored for a long time. + +tables: +* drafted_branches - list of drafts +* branch_draft_content - list of content for each draft with data +* branch_draft_reference - references + +# Communication between FE and BE +FE loads some information via REST, but all interactive actions are performed via websocket. + +# Websocket + +## Branch websocker +Initial branch state is sent to the clien in the first message from BE. +All evens that modify branch config are sent via the webcket. + +Events: +``` + BranchConfigSnapshotType = "branch:config:snapshot" + BranchConfigUpdatedType = "branch:config:updated" + BranchFilesUpdatedType = "branch:files:updated" + BranchFilesResetType = "branch:files:reset" + BranchFilesDataModifiedType = "branch:files:data:modified" + BranchRefsUpdatedType = "branch:refs:updated" + BranchSavedType = "branch:saved" + BranchResetType = "branch:reset" + BranchEditorAddedType = "branch:editors:added" + BranchEditorRemovedType = "branch:editors:removed" +``` + + +## File websocket + +Content change events(like typing) are sent via file websocket. + +## Scaling +Since BE supports scaling, websocket sessions need to be bound to a specific BE pod to work with local state. +WsLoadBalancer implements this functionality + +# E2E sequence +## Initial load +open project -> start editor loading for default branch -> get branch from git and check the permissions -> if user can push to the branch, the UI will allow docs editing -> +Browser connects to branch websocket -> BE sends branch snapshot message with config(list of files and metadata) -> broser renders data + +Branch snapshot example: +``` +{"type":"branch:config:snapshot","data":{"projectId":"QS.CP.AH.AHA","editors":[],"configFileId":"apihub-config/QS.CP.AH.AHA.json","changeType":"none","permissions":["all"],"files":[{"fileId":"docs/Agent API.yaml","name":"Agent API.yaml","type":"unknown","path":"docs","publish":true,"status":"unmodified","blobId":"bd7fe81975235979dc2294e4a00e71995b1d72c2","changeType":"none"}],"refs":[]}} +``` + +## Open file/edit +open file -> Browser connects to file websocket -> BE sends current file state -> BE sends "user joined" event to other users -> user types something/move cursor -> FE send changes event -> changes are applied and BE state is updated -> updates are sent to other clients -> updates are applied to other client's state and re-rendering happens + +File state is periodically save to DB (to branch_draft_content table). \ No newline at end of file diff --git a/docs/git_integration.md b/docs/git_integration.md new file mode 100644 index 0000000..4d552b7 --- /dev/null +++ b/docs/git_integration.md @@ -0,0 +1,74 @@ +# (Git) Integration +"Integration" is an ability to interact with git server. +Every integration have a constant in view.GitIntegrationType and it's considered as a part of integration key. +Some time ago we expected that multiple git servers will be supported, but in the end we supported only gitlab. + +NOTE: Apihub editor can not work without git integration enabled. So user is required to establish the integration, see the details below. + +# Gitlab authorization + +## Connection between Gitlab and Apihub +As part of deployment need to establish connection between Gitlab instance and Apihub instance. + +Apihub itself have no access to Gitlab. +I.e. Apihub works only with user's credential. + +NOTE: in current Gitlab implementation token TTL is short, i.e. hours. So we have to refresh the token every time it's close to expiration or expired. +Refresh token is used in this case. +So ideally the token shoud be refreshed automatically every time and invisible to end user. + +## How git integration is enabled +Git integration status is stored in **Apihub's** JWT token. (See Auth.go) +In `gitIntegration` field. +The value is used by Frontend to identify if user have integration enabled. + +If the integration is not enabled yet, Frontend displays wellcome message with green button asking user to establish the integration(connect to gitlab). +When user clicks the button, FE redirects him to the Gitlab page (through /login/gitlab on Apihub BE) which allows to grant rights to Apihub application(You can check existing applications in your Gitlab profile: https://git.domain.com/-/profile/applications). +If user grants the rights, Apihub application is added to his profile and Gitlab redirects back to Apihub(to /login/gitlab/callback) with new generated token info. +Then user is redirected back to the original page(typically https://{APIHUB_URL}/editor/) with git integration enabled. +In scope of this process, the row with credentials is inserted to `user_integration` table. + +## Token revocation +User can revoke application token manually. So Apihub should handle it and mark integration as `revoked`. +In this case user's row in `user_integration` is updated and `is_revoked` column is set to true. Also need to disable existing cached Gitlab client(see below). + +# Gitlab go client +Apihub is using `go-gitlab` library to interact with gitlab. +The library requires initialization and user token is used in the init. I.e. go-gitlab instance is bound to specific user and we can't share it between user. +To reduce initializations count we've implemented cache in a provider style: `service/GitClientProvider` which is able to get client from cache or create it. +So the cache is initialized on demand. Diffenent Apihub instances may have different sets of cached clients and it's ok due to on-demand initialization. + +## Cleaning client cache (distributed) +If token revoked or expired, the client instance(with built-in token) is no longer functional. So need to remove it from the cache of **ALL** Apihub instances. +For this purpose we use `DTopic`(distributed topic) from `olric`. `GitClientProvider` is listening to the "git-client-revoked-users" topic and cleaning up the cache when receives corresponding event. + +## TokenExpirationHandler +Tries to get new access token with refresh token. + +## TokenRevocationHandler +Marks existing key as revoked and disables integration for user(need to be re-established). + +# Db schema +* user_integration - credentials to git integrations(at this moment gitlab only) + +# Problems and troubleshooting +* expired token +Hnadled by TokenExpirationHandler + +* revoken token +Handled by TokenRevocationHandler + +* gitlab integration broken + +* 401 from Gitlab +In this case Apihub marks the token as revoked + + +# Local development +Since local isntance is not authorized on Gitlab (and it's not possible to do), default connect algorithm will not work(Gitlab will reject the request from local PC). +But you can deal with the process and establish connection manually: +* Generate personal access token on gitlab https://git.domain.com/-/profile/personal_access_tokens wiht `api` scope. +* Insert row to `user_integration` with the token value in `key` column and null `expires_at`. + Or paste the value to `user_integration` in `key` column and remove `expires_at` if the record exists. + + diff --git a/docs/local_development/docker-compose/DB/docker-compose.yml b/docs/local_development/docker-compose/DB/docker-compose.yml new file mode 100644 index 0000000..833f096 --- /dev/null +++ b/docs/local_development/docker-compose/DB/docker-compose.yml @@ -0,0 +1,54 @@ +version: "3.9" +services: + postgres: + image: postgres:13.3 + command: + - "postgres" + - "-c" + - "max_connections=50" + - "-c" + - "shared_buffers=1GB" + - "-c" + - "effective_cache_size=4GB" + - "-c" + - "work_mem=16MB" + - "-c" + - "maintenance_work_mem=512MB" + - "-c" + - "random_page_cost=1.1" + - "-c" + - "temp_file_limit=10GB" + - "-c" + - "log_min_duration_statement=200ms" + - "-c" + - "idle_in_transaction_session_timeout=10s" + - "-c" + - "lock_timeout=1s" + - "-c" + - "shared_preload_libraries=pg_stat_statements" + - "-c" + - "pg_stat_statements.max=10000" + - "-c" + - "pg_stat_statements.track=all" + environment: + POSTGRES_DB: "apihub" + POSTGRES_USER: "apihub" + POSTGRES_PASSWORD: "APIhub1234" + PGDATA: "/C/pg-docker/data" + volumes: + - ../2. Init Database:/docker-entrypoint-initdb.d + - .:/var/lib/postgresql/data + ports: + - "5432:5432" + healthcheck: + test: ["CMD-SHELL", "pg_isready -U apihub -d apihub"] + interval: 10s + timeout: 5s + retries: 5 + start_period: 10s + restart: unless-stopped + deploy: + resources: + limits: + cpus: '1' + memory: 4G \ No newline at end of file diff --git a/docs/local_development/docker-compose/FE/docker-compose.yml b/docs/local_development/docker-compose/FE/docker-compose.yml new file mode 100644 index 0000000..da367f5 --- /dev/null +++ b/docs/local_development/docker-compose/FE/docker-compose.yml @@ -0,0 +1,10 @@ +version: "3.9" +services: + apihub-ui: + image: ghcr.io/qubersip/apihub-ui-assembly-release-2.2.18:build8 + environment: + APIHUB_BACKEND_ADDRESS: "host.docker.internal:8090" + APIHUB_NC_SERVICE_ADDRESS: "host.docker.internal:8091" + ports: + - "8080:8080" + restart: unless-stopped \ No newline at end of file diff --git a/docs/local_development/docker-compose/Node-service/docker-compose.yml b/docs/local_development/docker-compose/Node-service/docker-compose.yml new file mode 100644 index 0000000..35c1dde --- /dev/null +++ b/docs/local_development/docker-compose/Node-service/docker-compose.yml @@ -0,0 +1,10 @@ +version: "3.9" +services: + apihub-node-service: + image: ghcr.io/qubership/apihub-node-service-assembly-develop:build861 + environment: + APIHUB_BACKEND_ADDRESS: "host.docker.internal:8090" + APIHUB_API_KEY: "123123" + ports: + - "8092:8080" + restart: unless-stopped \ No newline at end of file diff --git a/docs/local_development/generate_jwt_pkey.sh b/docs/local_development/generate_jwt_pkey.sh new file mode 100644 index 0000000..386be0c --- /dev/null +++ b/docs/local_development/generate_jwt_pkey.sh @@ -0,0 +1,15 @@ +if [ -f jwt_private_key ] +then + echo "Private key 'jwt_private_key' is already exists, please backup or remove it" + ls -l jwt_private_key +else + openssl genpkey -out rsakey.pem -algorithm RSA -pkeyopt rsa_keygen_bits:2048 + if [ -r rsakey.pem ] + then + base64 rsakey.pem | tr -d '\n' > jwt_private_key + else + echo "File is not readable: rsakey.pem" + ls -l rsakey.pem + fi + rm -f rsakey.pem +fi diff --git a/docs/local_development/local_development.md b/docs/local_development/local_development.md new file mode 100644 index 0000000..3c18319 --- /dev/null +++ b/docs/local_development/local_development.md @@ -0,0 +1,159 @@ +# Local Backend development + +This instruction tells how to set up local development. + +Backends (Apihub and Custom service) could be started from IDE/cmd. + +DB and frontend components in docker are required to run full Apihub application except the agent functionality. +There's no way for start agent in docker since k8s API is required. + +## Prerequisites + +### Software installation + +Install necessary software if it was not installed earlie. For more info please see [Newcomer environment setup](../newcomer_env_setup.md) + +### DB + +Run corresponding docker-compose file from /docker-compose/DB folder. +It will start Postgres DB in docker container with predefined credentials and database. So it's ready to connect from Apihub BE. + +At this moment there's no procedure to create NC service DB in one command, so you have to create DB `apihub_nc` manually. +If you use DBeaver you need to connect to PostgreSQL DB first using parameters: +``` +Host=localhost +Username=apihub +Password=APIhub1234 +Port=5432 +``` +Don't forget to check 'Show all databases' to see all DBs. +Then open postgres->Databases and create `apihub_nc` DB with owner 'apihub' (all other params leave as default). + +* To create a corresponding docker image you need to issue a command: + +```bash +docker-compose -f docs/local_development/docker-compose/DB/docker-compose.yml up +``` + +If you have another docker image (usually another DB container from another project) which could intersect with this one then you need to change PostgreSQL port settings and image port mapping in [`DB/docker-compose.yml`](/docs/local_development/docker-compose/DB/docker-compose.yml). Please add two arguments into **command** section ("\-p" and "\") and update port mapping in the **ports** section. Default port number for PostgreSQL is **5433**. + +* To run the image please issue a command below: + +```bash +docker-compose -f docs/local_development/docker-compose/DB/docker-compose.yml run postgres +``` + +Of course, you can perform the actions above with your favorite IDE (Podman Desktop or Rancher Desktop for example). + +Expected result - you will have a PostgreSQL instance running and waiting for a clients. If you do not please try to remove images, restart Docker (Podman Desktop or Rancher Desktop) and try again. If the application is unable to reach PostgreSQL then change you port settings, re-create image and try again. + +## Running backends + +### Apihub + +Apihub backend is a product implementation which should be opensource-ready. + +#### Generate private key + +Apihub contains built-in identity provider and it requires RSA private key as a base secret. + +Run [`generate_jwt_pkey.sh`](generate_jwt_pkey.sh), it will generate file jwt_private_key in the current directory. Paste the value to Apihub BE environment. Please mind that the key must be non-empty. + +#### API hub BE environment + +The following environment variables are required to start Apihub application: + +```INI +LISTEN_ADDRESS=127.0.0.1:8090; +APIHUB_POSTGRESQL_DB_NAME=apihub; +APIHUB_POSTGRESQL_USERNAME=apihub; +APIHUB_POSTGRESQL_PASSWORD=APIhub1234; +APIHUB_POSTGRESQL_PORT=5432; +PRODUCTION_MODE=false; +JWT_PRIVATE_KEY={use generated key here} +``` + +Set these variables to build configuration. + +#### Run API hub + +You can simply run Service.go from apihub-service project or you can try to use [`Dockerfile`](/Dockerfile) at your choice. If you will try to use Dockerfile you have to know about the proper image URL which you need to change in the file. + +### Post-setup + +Since you will run non-production environment you do not have any valid identity instead of internal. You need to perform the actions below to configure internal user in the newly created environment: + +* create local user via `POST /api/internal/users` +* add admin role via `POST /api/internal/users/{userId}/systemRole` +* get local user token via `POST /api/v2/auth/local` + +You can use any of test tools approved by company to send REST API requests. The best request collection can be found in the [`apihub-postman-collections repository`](https:///apihub-postman-collections). And the command above, collection and environment for local development are also included. + +You can use NC-newman-desktop or Bruno app to run REST API requests. + +### Custom service + +Custom service is an Apihub extension with custom logic. + +### Create m2m token + +Create m2m admin token via POST `/api/v2/packages/*/apiKeys` +Asterisk means that the token will work for any package + +### Envs + +```INI +LISTEN_ADDRESS=127.0.0.1:8091; +DB_TYPE=postgres; +APIHUB_POSTGRESQL_HOST=localhost; +APIHUB_POSTGRESQL_PORT=5432; +NC_APIHUB_POSTGRESQL_DB_NAME=apihub_nc; +NC_APIHUB_POSTGRESQL_USERNAME=apihub; +NC_APIHUB_POSTGRESQL_PASSWORD=APIhub1234; +APIHUB_URL=http://127.0.0.1:8090; +APIHUB_ACCESS_TOKEN={use generated token value here}; +``` + +## FE + + +### Run frontend + +Run corresponding docker-compose file from `/docker-compose/FE` folder. +It will start FE container providing you a kind of GUI on localhost:8080 that will connect to Apihub BE on :8090 and NC service on :8091. + +To create a corresponding Docker image you need to issue a command: + +```bash +docker compose -f docs/local_development/docker-compose/FE/docker-compose.yml up +``` + +If default port (8080) was already taken by another application or Docker image you have configure another one in the **ports** section an re-create image with the command above. See [`FE/docker-compose.yml`](/docs/local_development/docker-compose/FE/docker-compose.yml) + +To run the image please issue a command below: + +```bash +docker compose -f docs/local_development/docker-compose/FE/docker-compose.yml run apihub-ui +``` + +Of course, you can perform the actions above with your favorite IDE. + +### Open web view + +#### Create user +First you need to create a local user. +Open NC-newman-desktop or Bruno app and run `POST /api/internal/users` at APIHUB_HOST=localhost:8090 with body: + +`{ +"email":"test@mail.com", +"password":"test" +}` + +#### Open web view +Go to http://localhost:8080 (use other port if you change it) and enter created above credentials. + +#### Fill DB with data if needed +You can fill DB with data: +* download appropriate backup archive from FTP +* extract downloaded archive +* use Restore tool of you favorite application, dBeaver for example with next parameters: format: Directory, Backup file: , Discard object owner = true. \ No newline at end of file diff --git a/docs/minio.md b/docs/minio.md new file mode 100644 index 0000000..4019dea --- /dev/null +++ b/docs/minio.md @@ -0,0 +1,8 @@ +Minio (S3-compatble storage) is used for string relatively big binary files like build results. + +Apihub is accessing minio via API, but you can access it via built-in web UI. + +Console (web ui) access: +https://console.apihub-minio.s3.qubership.org/login + +For credentials ask BE team lead or management(or you can get it from Apihub-BE k8s secrets if you have access). \ No newline at end of file diff --git a/docs/newcomer_env_setup.md b/docs/newcomer_env_setup.md new file mode 100644 index 0000000..2422513 --- /dev/null +++ b/docs/newcomer_env_setup.md @@ -0,0 +1,29 @@ +# Applications to install fo newcomer GO developer + +## 1) Install Git (Git Bash) from software center. +Then ask access to GIT repositories listed in Backend sources section of the [Onboarding](onboarding.md) document. + +## 2) Install and setup Podman Desktop or Rancher Desktop (your choice). +This software you need to run APIHUB locally. For more info please see: [Local development](./local_development/local_development.md). + +### Install Podman Desktop: + + +### To Install Rancher Desktop: +* install Rancher Desktop from the website: https://rancherdesktop.io/; +* install WSL Debian from software center because NC deprecate installation from Microsoft servers; +* run Rancher Desktop and be sure it run without errors +* update WSL if needed (run command in Terminal: wsl --update) +* enable Networking tunnels in Rancher Desktop (Preferences -> WSL -> Network) +Now you can run docker-compose up at appropriate docker-compose.yml file. + +## 3) Install IntelliJ IDEA Ultimate + + +## 4) Install postman + + +## 5) Install pgAdmin or dBeaver +This application you need for local development, when you need to browse and setup Data Base locally from DB backups. For more info please see: [Local development](./local_development/local_development.md). + + diff --git a/docs/onboarding.md b/docs/onboarding.md new file mode 100644 index 0000000..30404ce --- /dev/null +++ b/docs/onboarding.md @@ -0,0 +1 @@ +TODO \ No newline at end of file diff --git a/docs/postgres_useful_queries/apihub-postgresql_dev.sh b/docs/postgres_useful_queries/apihub-postgresql_dev.sh new file mode 100644 index 0000000..bbd6b3d --- /dev/null +++ b/docs/postgres_useful_queries/apihub-postgresql_dev.sh @@ -0,0 +1,11 @@ +#!/bin/bash + +kubectl config set-cluster k8s-server --insecure-skip-tls-verify=true --server=https://k8s-server.qubership.org:6443 +kubectl config set-credentials admin/k8s-server --token="K8S_TOKEN_HERE" +kubectl config set-context context-admin/k8s-server --user=admin/k8s-server --namespace=apihub-postgresql --cluster=k8s-server +kubectl config use-context context-admin/k8s-server + +MAIN_POD_ID=$(kubectl get pods -l pgtype=master -o name 2>/dev/null | grep -Po "(pg-patroni-.*?-.*?-.*?\$)") +echo "main pod = $MAIN_POD_ID" + +kubectl port-forward $MAIN_POD_ID 15432:5432 diff --git a/docs/postgres_useful_queries/db_size.txt b/docs/postgres_useful_queries/db_size.txt new file mode 100644 index 0000000..54c3bc7 --- /dev/null +++ b/docs/postgres_useful_queries/db_size.txt @@ -0,0 +1,47 @@ +Source: https://wiki.postgresql.org/wiki/Disk_Usage + + + +WITH RECURSIVE pg_inherit(inhrelid, inhparent) AS + (select inhrelid, inhparent + FROM pg_inherits + UNION + SELECT child.inhrelid, parent.inhparent + FROM pg_inherit child, pg_inherits parent + WHERE child.inhparent = parent.inhrelid), +pg_inherit_short AS (SELECT * FROM pg_inherit WHERE inhparent NOT IN (SELECT inhrelid FROM pg_inherit)) +SELECT table_schema + , TABLE_NAME + , row_estimate + , pg_size_pretty(total_bytes) AS total + , pg_size_pretty(index_bytes) AS INDEX + , pg_size_pretty(toast_bytes) AS toast + , pg_size_pretty(table_bytes) AS TABLE + , total_bytes::float8 / sum(total_bytes) OVER () AS total_size_share + FROM ( + SELECT *, total_bytes-index_bytes-COALESCE(toast_bytes,0) AS table_bytes + FROM ( + SELECT c.oid + , nspname AS table_schema + , relname AS TABLE_NAME + , SUM(c.reltuples) OVER (partition BY parent) AS row_estimate + , SUM(pg_total_relation_size(c.oid)) OVER (partition BY parent) AS total_bytes + , SUM(pg_indexes_size(c.oid)) OVER (partition BY parent) AS index_bytes + , SUM(pg_total_relation_size(reltoastrelid)) OVER (partition BY parent) AS toast_bytes + , parent + FROM ( + SELECT pg_class.oid + , reltuples + , relname + , relnamespace + , pg_class.reltoastrelid + , COALESCE(inhparent, pg_class.oid) parent + FROM pg_class + LEFT JOIN pg_inherit_short ON inhrelid = oid + WHERE relkind IN ('r', 'p') + ) c + LEFT JOIN pg_namespace n ON n.oid = c.relnamespace + ) a + WHERE oid = parent +) a +ORDER BY total_bytes DESC; \ No newline at end of file diff --git a/docs/postgres_useful_queries/get_pg_connections_by_app.txt b/docs/postgres_useful_queries/get_pg_connections_by_app.txt new file mode 100644 index 0000000..dfdb08b --- /dev/null +++ b/docs/postgres_useful_queries/get_pg_connections_by_app.txt @@ -0,0 +1 @@ +select datname, count(datname) from pg_stat_activity group by datname order by count desc; \ No newline at end of file diff --git a/docs/postgres_useful_queries/global_search_postgres.txt b/docs/postgres_useful_queries/global_search_postgres.txt new file mode 100644 index 0000000..2b86bf3 --- /dev/null +++ b/docs/postgres_useful_queries/global_search_postgres.txt @@ -0,0 +1,58 @@ +CREATE OR REPLACE FUNCTION global_search( + search_term text, + param_tables text[] default '{}', + param_schemas text[] default '{public}', + progress text default null -- 'tables','hits','all' +) + RETURNS table(schemaname text, tablename text, columnname text, rowctid tid) +AS $$ +declare + query text; + hit boolean; +begin + FOR schemaname,tablename IN + SELECT t.table_schema, t.table_name + FROM information_schema.tables t + JOIN information_schema.schemata s ON + (s.schema_name=t.table_schema) + WHERE (t.table_name=ANY(param_tables) OR param_tables='{}') + AND t.table_schema=ANY(param_schemas) + AND t.table_type='BASE TABLE' + AND EXISTS (SELECT 1 FROM information_schema.table_privileges p + WHERE p.table_name=t.table_name + AND p.table_schema=t.table_schema + AND p.privilege_type='SELECT' + ) + LOOP + IF (progress in ('tables','all')) THEN + raise info '%', format('Searching globally in table: %I.%I', + schemaname, tablename); + END IF; + + query := format('SELECT ctid FROM ONLY %I.%I AS t WHERE strpos(cast(t.* as text), %L) > 0', + schemaname, + tablename, + search_term); + FOR rowctid IN EXECUTE query + LOOP + FOR columnname IN + SELECT column_name + FROM information_schema.columns + WHERE table_name=tablename + AND table_schema=schemaname + LOOP + query := format('SELECT true FROM ONLY %I.%I WHERE cast(%I as text)=%L AND ctid=%L', + schemaname, tablename, columnname, search_term, rowctid); + EXECUTE query INTO hit; + IF hit THEN + IF (progress in ('hits', 'all')) THEN + raise info '%', format('Found in %I.%I.%I at ctid %s', + schemaname, tablename, columnname, rowctid); + END IF; + RETURN NEXT; + END IF; + END LOOP; -- for columnname + END LOOP; -- for rowctid + END LOOP; -- for table +END; +$$ language plpgsql; \ No newline at end of file diff --git a/docs/postman_collections.md b/docs/postman_collections.md new file mode 100644 index 0000000..7ff1c4f --- /dev/null +++ b/docs/postman_collections.md @@ -0,0 +1,3 @@ +# Postman collections +We have postman collections for integration testing. +The collections are stored in separate repository: TODO diff --git a/qubership-apihub-service/Service.go b/qubership-apihub-service/Service.go new file mode 100644 index 0000000..3f8f56d --- /dev/null +++ b/qubership-apihub-service/Service.go @@ -0,0 +1,737 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + "context" + "io" + "net/http" + _ "net/http/pprof" + "os" + "path" + "runtime/debug" + "strings" + "sync" + "time" + + "gopkg.in/natefinch/lumberjack.v2" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/metrics" + midldleware "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/middleware" + "github.com/prometheus/client_golang/prometheus/promhttp" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + mController "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/migration/controller" + mRepository "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/migration/repository" + mService "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/migration/service" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/cache" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/db" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/client" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/controller" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/security" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" + + "github.com/gorilla/handlers" + "github.com/gorilla/mux" + log "github.com/sirupsen/logrus" + prefixed "github.com/x-cray/logrus-prefixed-formatter" +) + +func init() { + basePath := os.Getenv("BASE_PATH") + if basePath == "" { + basePath = "." + } + mw := io.MultiWriter(os.Stderr, &lumberjack.Logger{ + Filename: basePath + "/logs/apihub.log", + MaxSize: 10, // megabytes + }) + log.SetFormatter(&prefixed.TextFormatter{ + DisableColors: true, + TimestampFormat: "2006-01-02 15:04:05", + FullTimestamp: true, + ForceFormatting: true, + }) + logLevel, err := log.ParseLevel(os.Getenv("LOG_LEVEL")) + if err != nil { + logLevel = log.InfoLevel + } + log.SetLevel(logLevel) + log.SetOutput(mw) +} + +func main() { + systemInfoService, err := service.NewSystemInfoService() + if err != nil { + panic(err) + } + basePath := systemInfoService.GetBasePath() + + gitlabUrl := systemInfoService.GetGitlabUrl() + + // Create router and server to expose live and ready endpoints during initialization + readyChan := make(chan bool) + migrationPassedChan := make(chan bool) + initSrvStoppedChan := make(chan bool) + r := mux.NewRouter() + r.Use(midldleware.PrometheusMiddleware) + r.SkipClean(true) + r.UseEncodedPath() + healthController := controller.NewHealthController(readyChan) + r.HandleFunc("/live", healthController.HandleLiveRequest).Methods(http.MethodGet) + r.HandleFunc("/ready", healthController.HandleReadyRequest).Methods(http.MethodGet) + initSrv := makeServer(systemInfoService, r) + + creds := systemInfoService.GetCredsFromEnv() + + cp := db.NewConnectionProvider(creds) + + migrationRunRepository := mRepository.NewMigrationRunRepository(cp) + buildCleanupRepository := repository.NewBuildCleanupRepository(cp) + transitionRepository := repository.NewTransitionRepository(cp) + buildResultRepository := repository.NewBuildResultRepository(cp) + publishedRepository, err := repository.NewPublishedRepositoryPG(cp) + if err != nil { + log.Error("Failed to create PublishedRepository: " + err.Error()) + panic("Failed to create PublishedRepository: " + err.Error()) + } + minioStorageCreds := systemInfoService.GetMinioStorageCreds() + minioStorageService := service.NewMinioStorageService(buildResultRepository, publishedRepository, minioStorageCreds) + dbMigrationService, err := mService.NewDBMigrationService(cp, migrationRunRepository, buildCleanupRepository, transitionRepository, systemInfoService, minioStorageService) + if err != nil { + log.Error("Failed create dbMigrationService: " + err.Error()) + panic("Failed create dbMigrationService: " + err.Error()) + } + + go func(initSrvStoppedChan chan bool) { // Do not use safe async here to enable panic + log.Debugf("Starting init srv") + _ = initSrv.ListenAndServe() + log.Debugf("Init srv closed") + initSrvStoppedChan <- true + close(initSrvStoppedChan) + }(initSrvStoppedChan) + + go func(migrationReadyChan chan bool) { // Do not use safe async here to enable panic + passed := <-migrationPassedChan + err := initSrv.Shutdown(context.Background()) + if err != nil { + log.Fatalf("Failed to shutdown initial server") + } + if !passed { + log.Fatalf("Stopping server since migration failed") + } + migrationReadyChan <- true + close(migrationReadyChan) + close(migrationPassedChan) + }(readyChan) + + wg := sync.WaitGroup{} + wg.Add(1) + + go func() { // Do not use safe async here to enable panic + defer wg.Done() + + currentVersion, newVersion, migrationRequired, err := dbMigrationService.Migrate(basePath) + if err != nil { + log.Error("Failed perform DB migration: " + err.Error()) + time.Sleep(time.Second * 10) // Give a chance to read the unrecoverable error + panic("Failed perform DB migration: " + err.Error()) + } + err = dbMigrationService.SoftMigrateDb(currentVersion, newVersion, migrationRequired) + if err != nil { + log.Errorf("Failed to perform db migrations: %v", err.Error()) + time.Sleep(time.Second * 10) // Give a chance to read the unrecoverable error + panic("Failed to perform db migrations: " + err.Error()) + } + + migrationPassedChan <- true + }() + + wg.Wait() + _ = <-initSrvStoppedChan // wait for the init srv to stop to avoid multiple servers started race condition + log.Infof("Migration step passed, continue initialization") + + gitIntegrationRepository, err := repository.NewGitIntegrationRepositoryPG(cp) + if err != nil { + log.Error("Failed to create UserIntegrationsRepository: " + err.Error()) + panic("Failed to create UserIntegrationsRepository: " + err.Error()) + } + + projectRepository, err := repository.NewPrjGrpIntRepositoryPG(cp) + if err != nil { + log.Error("Failed to create PrjGrpIntRepository: " + err.Error()) + panic("Failed to create PrjGrpIntRepository: " + err.Error()) + } + + draftRepository, err := repository.NewDraftRepositoryPG(cp) + if err != nil { + log.Error("Failed to create DraftRepository: " + err.Error()) + panic("Failed to create DraftRepository: " + err.Error()) + } + + favoritesRepository, err := repository.NewFavoritesRepositoryPG(cp) + if err != nil { + log.Error("Failed to create FavoriteRepository: " + err.Error()) + panic("Failed to create FavoriteRepository: " + err.Error()) + } + + usersRepository, err := repository.NewUserRepositoryPG(cp) + if err != nil { + log.Error("Failed to create UsersRepository: " + err.Error()) + panic("Failed to create UsersRepository: " + err.Error()) + } + apihubApiKeyRepository, err := repository.NewApihubApiKeyRepositoryPG(cp) + if err != nil { + log.Error("Failed to create ApihubApiKeyRepository: " + err.Error()) + panic("Failed to create ApihubApiKeyRepository: " + err.Error()) + } + branchRepository, err := repository.NewBranchRepositoryPG(cp) + if err != nil { + log.Error("Failed to create BranchRepository: " + err.Error()) + panic("Failed to create BranchRepository: " + err.Error()) + } + buildRepository, err := repository.NewBuildRepositoryPG(cp) + if err != nil { + log.Error("Failed to create BuildRepository: " + err.Error()) + panic("Failed to create BuildRepository: " + err.Error()) + } + + roleRepository := repository.NewRoleRepository(cp) + operationRepository := repository.NewOperationRepository(cp) + agentRepository := repository.NewAgentRepository(cp) + businessMetricRepository := repository.NewBusinessMetricRepository(cp) + + activityTrackingRepository := repository.NewActivityTrackingRepository(cp) + + versionCleanupRepository := repository.NewVersionCleanupRepository(cp) + + olricProvider, err := cache.NewOlricProvider() + if err != nil { + log.Error("Failed to create olricProvider: " + err.Error()) + panic("Failed to create olricProvider: " + err.Error()) + } + + configs := []client.GitClientConfiguration{{Integration: view.GitlabIntegration, BaseUrl: gitlabUrl}} + tokenRevocationHandler := service.NewTokenRevocationHandler(gitIntegrationRepository, olricProvider) + tokenExpirationHandler := service.NewTokenExpirationHandler(gitIntegrationRepository, olricProvider, systemInfoService) + gitClientProvider, err := service.NewGitClientProvider(configs, gitIntegrationRepository, tokenRevocationHandler, tokenExpirationHandler, olricProvider) + if err != nil { + log.Error("Failed to create GitClientProvider: " + err.Error()) + panic("Failed to create GitClientProvider: " + err.Error()) + } + privateUserPackageService := service.NewPrivateUserPackageService(publishedRepository, usersRepository, roleRepository, favoritesRepository) + integrationsService := service.NewIntegrationsService(gitIntegrationRepository, gitClientProvider) + userService := service.NewUserService(usersRepository, gitClientProvider, systemInfoService, privateUserPackageService) + + projectService := service.NewProjectService(gitClientProvider, projectRepository, favoritesRepository, publishedRepository) + groupService := service.NewGroupService(projectRepository, projectService, favoritesRepository, publishedRepository, usersRepository) + + wsLoadBalancer, err := service.NewWsLoadBalancer(olricProvider) + if err != nil { + log.Error("Failed to create wsLoadBalancer: " + err.Error()) + panic("Failed to create wsLoadBalancer: " + err.Error()) + } + + templateService := service.NewTemplateService() + + cleanupService := service.NewCleanupService(cp) + monitoringService := service.NewMonitoringService(cp) + packageVersionEnrichmentService := service.NewPackageVersionEnrichmentService(publishedRepository) + activityTrackingService := service.NewActivityTrackingService(activityTrackingRepository, publishedRepository, userService) + operationService := service.NewOperationService(operationRepository, publishedRepository, packageVersionEnrichmentService) + roleService := service.NewRoleService(roleRepository, userService, activityTrackingService, publishedRepository) + wsBranchService := service.NewWsBranchService(userService, wsLoadBalancer) + branchEditorsService := service.NewBranchEditorsService(userService, wsBranchService, branchRepository, olricProvider) + branchService := service.NewBranchService(projectService, draftRepository, gitClientProvider, publishedRepository, wsBranchService, branchEditorsService, branchRepository) + projectFilesService := service.NewProjectFilesService(gitClientProvider, projectRepository, branchService) + ptHandler := service.NewPackageTransitionHandler(transitionRepository) + publishedService := service.NewPublishedService(branchService, publishedRepository, projectRepository, buildRepository, gitClientProvider, wsBranchService, favoritesRepository, operationRepository, activityTrackingService, monitoringService, minioStorageService, systemInfoService) + contentService := service.NewContentService(draftRepository, projectService, branchService, gitClientProvider, wsBranchService, templateService, systemInfoService) + refService := service.NewRefService(draftRepository, projectService, branchService, publishedRepository, wsBranchService) + wsFileEditService := service.NewWsFileEditService(userService, contentService, branchEditorsService, wsLoadBalancer) + portalService := service.NewPortalService(basePath, publishedService, publishedRepository, projectRepository) + operationGroupService := service.NewOperationGroupService(operationRepository, publishedRepository, packageVersionEnrichmentService, activityTrackingService) + versionService := service.NewVersionService(gitClientProvider, projectRepository, favoritesRepository, publishedRepository, publishedService, operationRepository, operationService, activityTrackingService, systemInfoService, packageVersionEnrichmentService, portalService, versionCleanupRepository, operationGroupService) + packageService := service.NewPackageService(gitClientProvider, projectRepository, favoritesRepository, publishedRepository, versionService, roleService, activityTrackingService, operationGroupService, usersRepository, ptHandler, systemInfoService) + + logsService := service.NewLogsService() + internalWebsocketService := service.NewInternalWebsocketService(wsLoadBalancer, olricProvider) + commitService := service.NewCommitService(draftRepository, contentService, branchService, projectService, gitClientProvider, wsBranchService, wsFileEditService, branchEditorsService) + searchService := service.NewSearchService(projectService, publishedService, branchService, gitClientProvider, contentService) + apihubApiKeyService := service.NewApihubApiKeyService(apihubApiKeyRepository, publishedRepository, activityTrackingService, userService, roleRepository, roleService.IsSysadm) + + refResolverService := service.NewRefResolverService(publishedRepository) + buildProcessorService := service.NewBuildProcessorService(buildRepository, refResolverService) + buildService := service.NewBuildService(buildRepository, buildProcessorService, publishedService, systemInfoService, packageService, refResolverService) + buildResultService := service.NewBuildResultService(buildResultRepository, systemInfoService, minioStorageService) + versionService.SetBuildService(buildService) + operationGroupService.SetBuildService(buildService) + + agentService := service.NewAgentRegistrationService(agentRepository) + excelService := service.NewExcelService(publishedRepository, versionService, operationService, packageService) + comparisonService := service.NewComparisonService(publishedRepository, operationRepository, packageVersionEnrichmentService) + businessMetricService := service.NewBusinessMetricService(businessMetricRepository) + + dbCleanupService := service.NewDBCleanupService(buildCleanupRepository, migrationRunRepository, minioStorageService, systemInfoService) + if err := dbCleanupService.CreateCleanupJob(systemInfoService.GetBuildsCleanupSchedule()); err != nil { + log.Error("Failed to start cleaning job" + err.Error()) + } + + transitionService := service.NewTransitionService(transitionRepository, publishedRepository) + transformationService := service.NewTransformationService(publishedRepository, operationRepository) + + gitHookService := service.NewGitHookService(projectRepository, branchService, buildService, userService) + + zeroDayAdminService := service.NewZeroDayAdminService(userService, roleService, usersRepository) + + integrationsController := controller.NewIntegrationsController(integrationsService) + projectController := controller.NewProjectController(projectService, groupService, searchService) + branchController := controller.NewBranchController(branchService, commitService, projectFilesService, searchService, publishedService, branchEditorsService, wsBranchService) + groupController := controller.NewGroupController(groupService, publishedService, roleService) + contentController := controller.NewContentController(contentService, branchService, searchService, wsFileEditService, wsBranchService, systemInfoService) + publishedController := controller.NewPublishedController(publishedService, portalService, searchService) + refController := controller.NewRefController(refService, wsBranchService) + branchWSController := controller.NewBranchWSController(branchService, wsLoadBalancer, internalWebsocketService) + fileWSController := controller.NewFileWSController(wsFileEditService, wsLoadBalancer, internalWebsocketService) + + logsController := controller.NewLogsController(logsService, roleService) + systemInfoController := controller.NewSystemInfoController(systemInfoService) + sysAdminController := controller.NewSysAdminController(roleService) + apihubApiKeyController := controller.NewApihubApiKeyController(apihubApiKeyService, roleService) + cleanupController := controller.NewCleanupController(cleanupService) + + agentClient := client.NewAgentClient() + agentController := controller.NewAgentController(agentService, agentClient) + agentProxyController := controller.NewAgentProxyController(agentService, systemInfoService) + playgroundProxyController := controller.NewPlaygroundProxyController(systemInfoService) + publishV2Controller := controller.NewPublishV2Controller(buildService, publishedService, buildResultService, roleService, systemInfoService) + exportController := controller.NewExportController(publishedService, portalService, searchService, roleService, excelService, versionService, monitoringService) + + packageController := controller.NewPackageController(packageService, publishedService, portalService, searchService, roleService, monitoringService, ptHandler) + versionController := controller.NewVersionController(versionService, roleService, monitoringService, ptHandler, roleService.IsSysadm) + roleController := controller.NewRoleController(roleService) + samlAuthController := security.NewSamlAuthController(userService, systemInfoService) + userController := controller.NewUserController(userService, privateUserPackageService, roleService.IsSysadm) + jwtPubKeyController := security.NewJwtPubKeyController() + oauthController := security.NewOauth20Controller(integrationsService, userService, systemInfoService) + operationController := controller.NewOperationController(roleService, operationService, buildService, monitoringService, ptHandler) + operationGroupController := controller.NewOperationGroupController(roleService, operationGroupService, versionService) + searchController := controller.NewSearchController(operationService, versionService, monitoringService) + tempMigrationController := mController.NewTempMigrationController(dbMigrationService, roleService.IsSysadm) + activityTrackingController := controller.NewActivityTrackingController(activityTrackingService, roleService, ptHandler) + comparisonController := controller.NewComparisonController(operationService, versionService, buildService, roleService, comparisonService, monitoringService, ptHandler) + buildCleanupController := controller.NewBuildCleanupController(dbCleanupService, roleService.IsSysadm) + transitionController := controller.NewTransitionController(transitionService, roleService.IsSysadm) + businessMetricController := controller.NewBusinessMetricController(businessMetricService, excelService, roleService.IsSysadm) + + apiDocsController := controller.NewApiDocsController(basePath) + + transformationController := controller.NewTransformationController(roleService, buildService, versionService, transformationService, operationGroupService) + + minioStorageController := controller.NewMinioStorageController(minioStorageCreds, minioStorageService) + + gitHookController := controller.NewGitHookController(gitHookService) + + r.HandleFunc("/api/v1/integrations/{integrationId}/apikey", security.Secure(integrationsController.GetUserApiKeyStatus)).Methods(http.MethodGet) + r.HandleFunc("/api/v1/integrations/{integrationId}/apikey", security.Secure(integrationsController.SetUserApiKey)).Methods(http.MethodPut) + r.HandleFunc("/api/v1/integrations/{integrationId}/repositories", security.Secure(integrationsController.ListRepositories)).Methods(http.MethodGet) + r.HandleFunc("/api/v1/integrations/{integrationId}/repositories/{repositoryId}/branches", security.Secure(integrationsController.ListBranchesAndTags)).Methods(http.MethodGet) + + r.HandleFunc("/api/v1/projects", security.Secure(projectController.GetFilteredProjects)).Methods(http.MethodGet) + r.HandleFunc("/api/v1/projects", security.Secure(projectController.AddProject)).Methods(http.MethodPost) + r.HandleFunc("/api/v1/projects/{projectId}", security.Secure(projectController.GetProject)).Methods(http.MethodGet) + r.HandleFunc("/api/v1/projects/{projectId}", security.Secure(projectController.UpdateProject)).Methods(http.MethodPut) + r.HandleFunc("/api/v1/projects/{projectId}", security.Secure(projectController.DeleteProject)).Methods(http.MethodDelete) + r.HandleFunc("/api/v1/projects/{projectId}/favor", security.Secure(projectController.FavorProject)).Methods(http.MethodPost) + r.HandleFunc("/api/v1/projects/{projectId}/disfavor", security.Secure(projectController.DisfavorProject)).Methods(http.MethodPost) + + r.HandleFunc("/api/v1/groups", security.Secure(groupController.AddGroup)).Methods(http.MethodPost) + r.HandleFunc("/api/v1/groups/{groupId}", security.Secure(groupController.GetGroupInfo)).Methods(http.MethodGet) + r.HandleFunc("/api/v1/groups", security.Secure(groupController.GetAllGroups)).Methods(http.MethodGet) + r.HandleFunc("/api/v1/groups/{groupId}/favor", security.Secure(groupController.FavorGroup)).Methods(http.MethodPost) + r.HandleFunc("/api/v1/groups/{groupId}/disfavor", security.Secure(groupController.DisfavorGroup)).Methods(http.MethodPost) + + r.HandleFunc("/api/v1/projects/{projectId}/branches", security.Secure(branchController.GetProjectBranches)).Methods(http.MethodGet) + r.HandleFunc("/api/v1/projects/{projectId}/branches/{branchName}", security.Secure(branchController.GetProjectBranchDetails)).Methods(http.MethodGet) + r.HandleFunc("/api/v1/projects/{projectId}/branches/{branchName}/config", security.Secure(branchController.GetProjectBranchConfigRaw)).Methods(http.MethodGet) + r.HandleFunc("/api/v1/projects/{projectId}/branches/{branchName}/save", security.Secure(branchController.CommitBranchDraftChanges)).Methods(http.MethodPost) + r.HandleFunc("/api/v1/projects/{projectId}/branches/{branchName}/zip", security.Secure(branchController.GetProjectBranchContentZip)).Methods(http.MethodGet) + r.HandleFunc("/api/v1/projects/{projectId}/branches/{branchName}/integration/files", security.Secure(branchController.GetProjectBranchFiles)).Methods(http.MethodGet) + r.HandleFunc("/api/v1/projects/{projectId}/branches/{branchName}/history", security.Secure(branchController.GetProjectBranchCommitHistory_deprecated)).Methods(http.MethodGet) + r.HandleFunc("/api/v1/projects/{projectId}/branches/{branchName}", security.Secure(branchController.DeleteBranch)).Methods(http.MethodDelete) + r.HandleFunc("/api/v1/projects/{projectId}/branches/{branchName}/clone", security.Secure(branchController.CloneBranch)).Methods(http.MethodPost) + r.HandleFunc("/api/v1/projects/{projectId}/branches/{branchName}/reset", security.Secure(branchController.DeleteBranchDraft)).Methods(http.MethodPost) + r.HandleFunc("/api/v1/projects/{projectId}/branches/{branchName}/conflicts", security.Secure(branchController.GetBranchConflicts)).Methods(http.MethodGet) + r.HandleFunc("/api/v1/projects/{projectId}/branches/{branchName}/integration/files", security.Secure(contentController.AddFile)).Methods(http.MethodPost) + r.HandleFunc("/api/v1/projects/{projectId}/branches/{branchName}/refs", security.Secure(refController.UpdateRefs)).Methods(http.MethodPatch) + r.HandleFunc("/api/v1/projects/{projectId}/branches/{branchName}/editors", security.Secure(branchController.AddBranchEditor)).Methods(http.MethodPost) + r.HandleFunc("/api/v1/projects/{projectId}/branches/{branchName}/editors", security.Secure(branchController.RemoveBranchEditor)).Methods(http.MethodDelete) + + r.HandleFunc("/api/v1/projects/{projectId}/branches/{branchName}/files/{fileId}", security.Secure(contentController.GetContent)).Methods(http.MethodGet) //deprecated??? + r.HandleFunc("/api/v1/projects/{projectId}/branches/{branchName}/files/{fileId}/file", security.Secure(contentController.GetContentAsFile)).Methods(http.MethodGet) + r.HandleFunc("/api/v1/projects/{projectId}/branches/{branchName}/files/{fileId}", security.Secure(contentController.UpdateContent)).Methods(http.MethodPut) + r.HandleFunc("/api/v1/projects/{projectId}/branches/{branchName}/upload", security.Secure(contentController.UploadContent)).Methods(http.MethodPost) + r.HandleFunc("/api/v1/projects/{projectId}/branches/{branchName}/files/{fileId}/history", security.Secure(contentController.GetContentHistory)).Methods(http.MethodGet) + r.HandleFunc("/api/v1/projects/{projectId}/branches/{branchName}/files/{fileId}/history/{commitId}", security.Secure(contentController.GetContentFromCommit)).Methods(http.MethodGet) + r.HandleFunc("/api/v1/projects/{projectId}/blobs/{blobId}", security.Secure(contentController.GetContentFromBlobId)).Methods(http.MethodGet) + r.HandleFunc("/api/v1/projects/{projectId}/branches/{branchName}/files/{fileId}/rename", security.Secure(contentController.MoveFile)).Methods(http.MethodPost) + r.HandleFunc("/api/v1/projects/{projectId}/branches/{branchName}/files/{fileId}/reset", security.Secure(contentController.ResetFile)).Methods(http.MethodPost) + r.HandleFunc("/api/v1/projects/{projectId}/branches/{branchName}/files/{fileId}/restore", security.Secure(contentController.RestoreFile)).Methods(http.MethodPost) + r.HandleFunc("/api/v1/projects/{projectId}/branches/{branchName}/files/{fileId}", security.Secure(contentController.DeleteFile)).Methods(http.MethodDelete) + r.HandleFunc("/api/v1/projects/{projectId}/branches/{branchName}/files/{fileId}/meta", security.Secure(contentController.UpdateMetadata)).Methods(http.MethodPatch) + r.HandleFunc("/api/v1/projects/{projectId}/branches/{branchName}/allfiles", security.Secure(contentController.GetAllContent)).Methods(http.MethodGet) + + r.HandleFunc("/api/v1/projects/{packageId}/versions/{version}", security.Secure(publishedController.GetVersion)).Methods(http.MethodGet) + r.HandleFunc("/api/v1/projects/{packageId}/versions/{version}/documentation", security.Secure(publishedController.GenerateVersionDocumentation)).Methods(http.MethodGet) + r.HandleFunc("/api/v1/projects/{packageId}/versions/{version}/files/{slug}/documentation", security.Secure(publishedController.GenerateFileDocumentation)).Methods(http.MethodGet) + r.HandleFunc("/api/v1/projects/{packageId}/versions/{version}/files/{fileSlug}/share", security.Secure(publishedController.SharePublishedFile)).Methods(http.MethodPost) + r.HandleFunc("/api/v1/shared/{shared_id}", security.NoSecure(publishedController.GetSharedContentFile)).Methods(http.MethodGet) + + r.HandleFunc("/api/v1/system/info", security.Secure(systemInfoController.GetSystemInfo)).Methods(http.MethodGet) + r.HandleFunc("/api/v1/system/configuration", samlAuthController.GetSystemSSOInfo).Methods(http.MethodGet) + + r.HandleFunc("/api/v1/projects/{projectId}/versions/{version}/files/{fileSlug}/share", security.Secure(publishedController.SharePublishedFile)).Methods(http.MethodPost) + r.HandleFunc("/api/v1/debug/logs", security.Secure(logsController.StoreLogs)).Methods(http.MethodPut) + r.HandleFunc("/api/v1/debug/logs/setLevel", security.Secure(logsController.SetLogLevel)).Methods(http.MethodPost) + r.HandleFunc("/api/v1/debug/logs/checkLevel", security.Secure(logsController.CheckLogLevel)).Methods(http.MethodGet) + + //websocket + r.HandleFunc("/ws/v1/projects/{projectId}/branches/{branchName}", security.SecureWebsocket(branchWSController.ConnectToProjectBranch)) + r.HandleFunc("/ws/v1/projects/{projectId}/branches/{branchName}/files/{fileId}", security.SecureWebsocket(fileWSController.ConnectToFile)) + + //Search + r.HandleFunc("/api/v2/search/{searchLevel}", security.Secure(searchController.Search_deprecated)).Methods(http.MethodPost) //deprecated + r.HandleFunc("/api/v3/search/{searchLevel}", security.Secure(searchController.Search)).Methods(http.MethodPost) + + r.HandleFunc("/api/v2/builders/{builderId}/tasks", security.Secure(publishV2Controller.GetFreeBuild)).Methods(http.MethodPost) + + r.HandleFunc("/api/v2/packages", security.Secure(packageController.CreatePackage)).Methods(http.MethodPost) + r.HandleFunc("/api/v2/packages/{packageId}", security.Secure(packageController.UpdatePackage)).Methods(http.MethodPatch) + r.HandleFunc("/api/v2/packages/{packageId}", security.Secure(packageController.DeletePackage)).Methods(http.MethodDelete) + r.HandleFunc("/api/v2/packages/{packageId}/favor", security.Secure(packageController.FavorPackage)).Methods(http.MethodPost) + r.HandleFunc("/api/v2/packages/{packageId}/disfavor", security.Secure(packageController.DisfavorPackage)).Methods(http.MethodPost) + r.HandleFunc("/api/v2/packages/{packageId}", security.Secure(packageController.GetPackage)).Methods(http.MethodGet) + r.HandleFunc("/api/v2/packages/{packageId}/status", security.Secure(packageController.GetPackageStatus)).Methods(http.MethodGet) + r.HandleFunc("/api/v2/packages", security.Secure(packageController.GetPackagesList)).Methods(http.MethodGet) + r.HandleFunc("/api/v2/packages/{packageId}/publish/availableStatuses", security.Secure(packageController.GetAvailableVersionStatusesForPublish)).Methods(http.MethodGet) + + r.HandleFunc("/api/v2/packages/{packageId}/apiKeys", security.Secure(apihubApiKeyController.GetApiKeys_deprecated)).Methods(http.MethodGet) + r.HandleFunc("/api/v3/packages/{packageId}/apiKeys", security.Secure(apihubApiKeyController.GetApiKeys_v3_deprecated)).Methods(http.MethodGet) + r.HandleFunc("/api/v4/packages/{packageId}/apiKeys", security.Secure(apihubApiKeyController.GetApiKeys)).Methods(http.MethodGet) + r.HandleFunc("/api/v2/packages/{packageId}/apiKeys", security.Secure(apihubApiKeyController.CreateApiKey_deprecated)).Methods(http.MethodPost) + r.HandleFunc("/api/v3/packages/{packageId}/apiKeys", security.Secure(apihubApiKeyController.CreateApiKey_v3_deprecated)).Methods(http.MethodPost) + r.HandleFunc("/api/v4/packages/{packageId}/apiKeys", security.Secure(apihubApiKeyController.CreateApiKey)).Methods(http.MethodPost) + r.HandleFunc("/api/v2/packages/{packageId}/apiKeys/{id}", security.Secure(apihubApiKeyController.RevokeApiKey)).Methods(http.MethodDelete) + + r.HandleFunc("/api/v2/packages/{packageId}/members", security.Secure(roleController.GetPackageMembers)).Methods(http.MethodGet) + r.HandleFunc("/api/v2/packages/{packageId}/members", security.Secure(roleController.AddPackageMembers)).Methods(http.MethodPost) + r.HandleFunc("/api/v2/packages/{packageId}/members/{userId}", security.Secure(roleController.UpdatePackageMembers)).Methods(http.MethodPatch) + r.HandleFunc("/api/v2/packages/{packageId}/members/{userId}", security.Secure(roleController.DeletePackageMember)).Methods(http.MethodDelete) + + r.HandleFunc("/api/v2/packages/{packageId}/recalculateGroups", security.Secure(packageController.RecalculateOperationGroups)).Methods(http.MethodPost) + r.HandleFunc("/api/v2/packages/{packageId}/calculateGroups", security.Secure(packageController.CalculateOperationGroups)).Methods(http.MethodGet) + + //api for agent + r.HandleFunc("/api/v2/users/{userId}/availablePackagePromoteStatuses", security.Secure(roleController.GetAvailableUserPackagePromoteStatuses)).Methods(http.MethodPost) + + r.HandleFunc("/api/v2/packages/{packageId}/publish/{publishId}/status", security.Secure(publishV2Controller.GetPublishStatus)).Methods(http.MethodGet) + r.HandleFunc("/api/v2/packages/{packageId}/publish/statuses", security.Secure(publishV2Controller.GetPublishStatuses)).Methods(http.MethodPost) + r.HandleFunc("/api/v2/packages/{packageId}/publish", security.Secure(publishV2Controller.Publish)).Methods(http.MethodPost) + r.HandleFunc("/api/v2/packages/{packageId}/publish/{publishId}/status", security.Secure(publishV2Controller.SetPublishStatus_deprecated)).Methods(http.MethodPost) + r.HandleFunc("/api/v3/packages/{packageId}/publish/{publishId}/status", security.Secure(publishV2Controller.SetPublishStatus)).Methods(http.MethodPost) + r.HandleFunc("/api/v1/packages/{packageId}/publish/withOperationsGroup", security.Secure(versionController.PublishFromCSV)).Methods(http.MethodPost) + r.HandleFunc("/api/v1/packages/{packageId}/publish/{publishId}/withOperationsGroup/status", security.Secure(versionController.GetCSVDashboardPublishStatus)).Methods(http.MethodGet) + r.HandleFunc("/api/v1/packages/{packageId}/publish/{publishId}/withOperationsGroup/report", security.Secure(versionController.GetCSVDashboardPublishReport)).Methods(http.MethodGet) + + r.HandleFunc("/api/v2/packages/{packageId}/versions/{version}", security.Secure(versionController.GetPackageVersionContent_deprecated)).Methods(http.MethodGet) + r.HandleFunc("/api/v3/packages/{packageId}/versions/{version}", security.Secure(versionController.GetPackageVersionContent)).Methods(http.MethodGet) + r.HandleFunc("/api/v2/packages/{packageId}/versions", security.Secure(versionController.GetPackageVersionsList_deprecated)).Methods(http.MethodGet) + r.HandleFunc("/api/v3/packages/{packageId}/versions", security.Secure(versionController.GetPackageVersionsList)).Methods(http.MethodGet) + r.HandleFunc("/api/v2/packages/{packageId}/versions/{version}", security.Secure(versionController.DeleteVersion)).Methods(http.MethodDelete) + r.HandleFunc("/api/v2/packages/{packageId}/versions/{version}", security.Secure(versionController.PatchVersion)).Methods(http.MethodPatch) + r.HandleFunc("/api/v2/packages/{packageId}/versions/recursiveDelete", security.Secure(versionController.DeleteVersionsRecursively)).Methods(http.MethodPost) + + r.HandleFunc("/api/v2/packages/{packageId}/versions/{version}/files/{slug}/raw", security.Secure(versionController.GetVersionedContentFileRaw)).Methods(http.MethodGet) + r.HandleFunc("/api/v2/sharedFiles/{sharedFileId}", security.NoSecure(versionController.GetSharedContentFile)).Methods(http.MethodGet) + r.HandleFunc("/api/v2/packages/{packageId}/versions/{version}/changes", security.Secure(versionController.GetVersionChanges)).Methods(http.MethodGet) + r.HandleFunc("/api/v2/packages/{packageId}/versions/{version}/problems", security.Secure(versionController.GetVersionProblems)).Methods(http.MethodGet) + r.HandleFunc("/api/v2/sharedFiles", security.Secure(versionController.SharePublishedFile)).Methods(http.MethodPost) + + r.HandleFunc("/api/v2/packages/{packageId}/versions/{version}/doc", security.Secure(exportController.GenerateVersionDoc)).Methods(http.MethodGet) + r.HandleFunc("/api/v2/packages/{packageId}/versions/{version}/files/{slug}/doc", security.Secure(exportController.GenerateFileDoc)).Methods(http.MethodGet) + + r.HandleFunc("/api/v2/auth/saml", security.NoSecure(samlAuthController.StartSamlAuthentication)).Methods(http.MethodGet) // deprecated. + r.HandleFunc("/login/sso/saml", security.NoSecure(samlAuthController.StartSamlAuthentication)).Methods(http.MethodGet) + r.HandleFunc("/saml/acs", security.NoSecure(samlAuthController.AssertionConsumerHandler)).Methods(http.MethodPost) + r.HandleFunc("/saml/metadata", security.NoSecure(samlAuthController.ServeMetadata)).Methods(http.MethodGet) + + // Required for agent to verify apihub tokens + r.HandleFunc("/api/v2/auth/publicKey", security.NoSecure(jwtPubKeyController.GetRsaPublicKey)).Methods(http.MethodGet) + // Required to verify api key for external authorization + r.HandleFunc("/api/v2/auth/apiKey", security.NoSecure(apihubApiKeyController.GetApiKeyByKey)).Methods(http.MethodGet) + r.HandleFunc("/api/v1/auth/apiKey/{apiKeyId}", security.Secure(apihubApiKeyController.GetApiKeyById)).Methods(http.MethodGet) + + r.HandleFunc("/api/v2/users/{userId}/profile/avatar", security.NoSecure(userController.GetUserAvatar)).Methods(http.MethodGet) // Should not be secured! FE renders avatar as 0 { + docLabels := documentMetadata.GetStringArray("labels") + if len(docLabels) > 0 { + fileEntMetadata.SetLabels(docLabels) + } + docBlobId := documentMetadata.GetStringValue("blobId") + if docBlobId != "" { + fileEntMetadata.SetBlobId(docBlobId) + } + docInfo := documentMetadata.GetObject("info") + if docInfo != nil { + fileEntMetadata.SetInfo(docInfo) + } + docExternalDocs := documentMetadata.GetObject("externalDocs") + if docExternalDocs != nil { + fileEntMetadata.SetExternalDocs(docExternalDocs) + } + + tags, err := documentMetadata.GetObjectArray("tags") + if err != nil { + return nil, nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackagedFile, + Message: exception.InvalidPackagedFileMsg, + Params: map[string]interface{}{"file": document.Slug, "error": err.Error()}, + } + } + if tags != nil { + fileEntMetadata.SetDocTags(tags) + } + } + index := i + if a.PackageInfo.MigrationBuild { + index = documentMetadata.GetIntValue("index") + } + fileEntities = append(fileEntities, &entity.PublishedContentEntity{ + PackageId: a.PackageInfo.PackageId, + Version: a.PackageInfo.Version, + Revision: a.PackageInfo.Revision, + FileId: document.FileId, + Checksum: checksum, + Index: index, + Slug: document.Slug, + Name: name, + Path: path, + DataType: document.Type, + Format: document.Format, + Title: document.Title, + Metadata: fileEntMetadata, + OperationIds: document.OperationIds, + Filename: document.Filename, + }) + fileDataEntities = append(fileDataEntities, &entity.PublishedContentDataEntity{ + PackageId: a.PackageInfo.PackageId, + Checksum: checksum, + MediaType: mediaType, + Data: fileData, + }) + } + } + log.Debugf("Zip documents reading time: %vms", time.Since(filesFromZipReadStart).Milliseconds()) + return fileEntities, fileDataEntities, nil +} + +func (a *BuildResultToEntitiesReader) ReadTransformedDocumentsToEntity() (*entity.TransformedContentDataEntity, error) { + var data []byte + if a.PackageInfo.BuildType == view.MergedSpecificationType { + if len(a.PackageDocuments.Documents) != 1 { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackageArchivedFile, + Message: exception.InvalidPackageArchivedFileMsg, + Params: map[string]interface{}{ + "file": "documents", + "error": fmt.Sprintf("expected exactly 1 document for '%v' buildType, documents: %v", a.PackageInfo.BuildType, len(a.PackageDocuments.Documents)), + }, + } + } + document := a.PackageDocuments.Documents[0] + if fileHeader, exists := a.DocumentsHeaders[document.Filename]; exists { + fileData, err := ReadZipFile(fileHeader) + if err != nil { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackageArchivedFile, + Message: exception.InvalidPackageArchivedFileMsg, + Params: map[string]interface{}{"file": document.Slug, "error": err.Error()}, + } + } + data = fileData + } + } else { + zipBuf := bytes.Buffer{} + zw := zip.NewWriter(&zipBuf) + for _, document := range a.PackageDocuments.Documents { + if fileHeader, exists := a.DocumentsHeaders[document.Filename]; exists { + fileData, err := ReadZipFile(fileHeader) + if err != nil { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackageArchivedFile, + Message: exception.InvalidPackageArchivedFileMsg, + Params: map[string]interface{}{"file": document.Slug, "error": err.Error()}, + } + } + err = AddFileToZip(zw, document.Filename, fileData) + if err != nil { + return nil, err + } + } + } + err := zw.Close() + if err != nil { + return nil, err + } + data = zipBuf.Bytes() + } + format := a.PackageInfo.Format + if format == "" { + format = string(view.JsonDocumentFormat) + } + return &entity.TransformedContentDataEntity{ + PackageId: a.PackageInfo.PackageId, + Version: a.PackageInfo.Version, + Revision: a.PackageInfo.Revision, + ApiType: a.PackageInfo.ApiType, + BuildType: a.PackageInfo.BuildType, + Format: format, + GroupId: view.MakeOperationGroupId(a.PackageInfo.PackageId, a.PackageInfo.Version, a.PackageInfo.Revision, a.PackageInfo.ApiType, a.PackageInfo.GroupName), + Data: data, + DocumentsInfo: a.PackageDocuments.Documents, + }, nil +} + +func (a *BuildResultToEntitiesReader) ReadOperationsToEntities() ([]*entity.OperationEntity, []*entity.OperationDataEntity, error) { + operationsFromZipReadStart := time.Now() + operationEntities := make([]*entity.OperationEntity, 0) + operationDataEntities := make([]*entity.OperationDataEntity, 0) + operationsExternalMetadataMap := a.calculateOperationsExternalMetadataMap() + for _, operation := range a.PackageOperations.Operations { + if fileHeader, exists := a.OperationFileHeaders[operation.OperationId]; exists { + fileData, err := ReadZipFile(fileHeader) + if err != nil { + return nil, nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackageArchivedFile, + Message: exception.InvalidPackageArchivedFileMsg, + Params: map[string]interface{}{"file": operation.OperationId, "error": err.Error()}, + } + } + metadata := entity.Metadata{} + var operationMetadata entity.Metadata = operation.Metadata + var customTags map[string]interface{} + switch operation.ApiType { + case string(view.RestApiType): + if len(operation.Tags) > 0 { + metadata.SetTags(operation.Tags) + } + metadata.SetPath(operationMetadata.GetStringValue("path")) + metadata.SetMethod(operationMetadata.GetStringValue("method")) + case string(view.GraphqlApiType): + if len(operation.Tags) > 0 { + metadata.SetTags(operation.Tags) + } + metadata.SetType(operationMetadata.GetStringValue("type")) + metadata.SetMethod(operationMetadata.GetStringValue("method")) + case string(view.ProtobufApiType): + metadata.SetType(operationMetadata.GetStringValue("type")) + metadata.SetMethod(operationMetadata.GetStringValue("method")) + } + + customTags, err = operationMetadata.GetMapStringToInterface("customTags") + if err != nil { + return nil, nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackagedFile, + Message: exception.InvalidPackagedFileMsg, + Params: map[string]interface{}{"file": "operations.json", "error": fmt.Sprintf("Unable to process field 'customTags' value '%s': %s", + operationMetadata.GetObject("customTags"), err.Error())}, + } + } + operationExternalMetadataKey := view.OperationExternalMetadataKey{ + ApiType: operation.ApiType, + Method: strings.ToLower(metadata.GetMethod()), + Path: operationMetadata.GetStringValue("originalPath"), + } + operationExternalMetadata := operationsExternalMetadataMap[operationExternalMetadataKey] + + if len(operationExternalMetadata) != 0 && customTags == nil { + customTags = make(map[string]interface{}) + } + + for k, v := range operationExternalMetadata { + customTags[k] = v + } + + operationEntities = append(operationEntities, &entity.OperationEntity{ + PackageId: a.PackageInfo.PackageId, + Version: a.PackageInfo.Version, + Revision: a.PackageInfo.Revision, + OperationId: operation.OperationId, + DataHash: operation.DataHash, + Deprecated: operation.Deprecated, + Kind: operation.ApiKind, + Type: operation.ApiType, + Title: operation.Title, + Metadata: metadata, + DeprecatedItems: operation.DeprecatedItems, + DeprecatedInfo: operation.DeprecatedInfo, + PreviousReleaseVersions: operation.PreviousReleaseVersions, + Models: operation.Models, + CustomTags: customTags, + ApiAudience: operation.ApiAudience, + }) + operationDataEntities = append(operationDataEntities, &entity.OperationDataEntity{ + DataHash: operation.DataHash, + Data: fileData, + SearchScope: operation.SearchScopes, + }) + } + } + log.Debugf("Zip operations reading time: %vms", time.Since(operationsFromZipReadStart).Milliseconds()) + return operationEntities, operationDataEntities, nil +} + +func (a *BuildResultToEntitiesReader) ReadOperationComparisonsToEntities() ([]*entity.VersionComparisonEntity, []*entity.OperationComparisonEntity, []string, error) { + versionComparisonEntities := make([]*entity.VersionComparisonEntity, 0) + operationComparisonEntities := make([]*entity.OperationComparisonEntity, 0) + versionComparisonsFromCache := make([]string, 0) + var mainVersionComparison *entity.VersionComparisonEntity + mainVersionRefs := make([]string, 0) + for _, comparison := range a.PackageComparisons.Comparisons { + versionComparisonEnt := &entity.VersionComparisonEntity{} + mainVersion := false + if comparison.Version != "" { + //check if comparison's current version is a version that is being published + if (a.PackageInfo.Revision == comparison.Revision || comparison.Revision == 0) && + a.PackageInfo.Version == comparison.Version && + a.PackageInfo.PackageId == comparison.PackageId { + mainVersion = true + mainVersionComparison = versionComparisonEnt + versionComparisonEnt.PackageId = comparison.PackageId + versionComparisonEnt.Version = a.PackageInfo.Version + versionComparisonEnt.Revision = a.PackageInfo.Revision + } else { + versionComparisonEnt.PackageId = comparison.PackageId + versionComparisonEnt.Version = comparison.Version + versionComparisonEnt.Revision = comparison.Revision + } + } + if comparison.PreviousVersion != "" { + versionComparisonEnt.PreviousPackageId = comparison.PreviousVersionPackageId + versionComparisonEnt.PreviousVersion = comparison.PreviousVersion + versionComparisonEnt.PreviousRevision = comparison.PreviousVersionRevision + } + versionComparisonEnt.NoContent = false + versionComparisonEnt.LastActive = time.Now() + versionComparisonEnt.OperationTypes = comparison.OperationTypes + versionComparisonEnt.BuilderVersion = a.PackageInfo.BuilderVersion + versionComparisonEnt.ComparisonId = view.MakeVersionComparisonId( + versionComparisonEnt.PackageId, + versionComparisonEnt.Version, + versionComparisonEnt.Revision, + versionComparisonEnt.PreviousPackageId, + versionComparisonEnt.PreviousVersion, + versionComparisonEnt.PreviousRevision) + if !mainVersion { + mainVersionRefs = append(mainVersionRefs, versionComparisonEnt.ComparisonId) + } + if comparison.FromCache { + versionComparisonsFromCache = append(versionComparisonsFromCache, versionComparisonEnt.ComparisonId) + continue + } + versionComparisonEntities = append(versionComparisonEntities, versionComparisonEnt) + if comparison.ComparisonFileId == "" { + continue + } + if fileHeader, exists := a.ComparisonsFileHeaders[comparison.ComparisonFileId]; exists { + fileData, err := ReadZipFile(fileHeader) + if err != nil { + return nil, nil, nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackageArchivedFile, + Message: exception.InvalidPackageArchivedFileMsg, + Params: map[string]interface{}{"file": comparison.ComparisonFileId, "error": err.Error()}, + } + } + var operationChanges view.PackageOperationChanges + err = json.Unmarshal(fileData, &operationChanges) + if err != nil { + return nil, nil, nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackageArchivedFile, + Message: exception.InvalidPackageArchivedFileMsg, + Params: map[string]interface{}{"file": comparison.ComparisonFileId, "error": "failed to unmarshal operation changes"}, + Debug: err.Error(), + } + } + validationErr := utils.ValidateObject(operationChanges) + if validationErr != nil { + return nil, nil, nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackagedFile, + Message: exception.InvalidPackagedFileMsg, + Params: map[string]interface{}{"file": comparison.ComparisonFileId, "error": validationErr.Error()}, + } + } + for _, operationComparison := range operationChanges.OperationComparisons { + //todo maybe check that changedOperation.OperationId really exists in this package or in our db + operationComparisonEntities = append(operationComparisonEntities, + &entity.OperationComparisonEntity{ + PackageId: versionComparisonEnt.PackageId, + Version: versionComparisonEnt.Version, + Revision: versionComparisonEnt.Revision, + PreviousPackageId: versionComparisonEnt.PreviousPackageId, + PreviousVersion: versionComparisonEnt.PreviousVersion, + PreviousRevision: versionComparisonEnt.PreviousRevision, + ComparisonId: versionComparisonEnt.ComparisonId, + OperationId: operationComparison.OperationId, + DataHash: operationComparison.DataHash, + PreviousDataHash: operationComparison.PreviousDataHash, + ChangesSummary: operationComparison.ChangeSummary, + Changes: map[string]interface{}{"changes": operationComparison.Changes}, + }) + } + } + } + if len(versionComparisonEntities) > 0 && mainVersionComparison == nil { + return nil, nil, nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackagedFile, + Message: exception.InvalidPackagedFileMsg, + Params: map[string]interface{}{"file": "comparisons", "error": "comparison for a version specified in package info not found"}, + } + } + if mainVersionComparison != nil { + mainVersionComparison.Refs = mainVersionRefs + } + return versionComparisonEntities, operationComparisonEntities, versionComparisonsFromCache, nil +} + +func (a *BuildResultToEntitiesReader) ReadBuilderNotificationsToEntities(publishId string) []*entity.BuilderNotificationsEntity { + builderNotificationsEntities := make([]*entity.BuilderNotificationsEntity, 0) + for _, builderNotifications := range a.BuilderNotifications.Notifications { + builderNotificationsEntities = append(builderNotificationsEntities, + &entity.BuilderNotificationsEntity{ + BuildId: publishId, + Severity: builderNotifications.Severity, + Message: builderNotifications.Message, + FileId: builderNotifications.FileId, + }) + } + return builderNotificationsEntities +} + +func (a *BuildResultToEntitiesReader) calculateOperationsExternalMetadataMap() map[view.OperationExternalMetadataKey]map[string]interface{} { + result := map[view.OperationExternalMetadataKey]map[string]interface{}{} + if a.PackageInfo.ExternalMetadata == nil { + return result + } + + for _, meta := range a.PackageInfo.ExternalMetadata.Operations { + result[view.OperationExternalMetadataKey{ + ApiType: meta.ApiType, + Method: strings.ToLower(meta.Method), + Path: meta.Path, + }] = meta.ExternalMetadata + } + + return result +} diff --git a/qubership-apihub-service/archive/SourcesArchive.go b/qubership-apihub-service/archive/SourcesArchive.go new file mode 100644 index 0000000..19d860f --- /dev/null +++ b/qubership-apihub-service/archive/SourcesArchive.go @@ -0,0 +1,48 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package archive + +import ( + "archive/zip" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type SourcesArchive struct { + ZipReader *zip.Reader + BuildCfg *view.BuildConfig + + FileHeaders map[string]*zip.File +} + +func NewSourcesArchive(zipReader *zip.Reader, buildCfg *view.BuildConfig) *SourcesArchive { + result := &SourcesArchive{ + ZipReader: zipReader, + BuildCfg: buildCfg, + FileHeaders: map[string]*zip.File{}, + } + result.splitFiles() + return result +} + +func (a *SourcesArchive) splitFiles() { + for _, zipFile := range a.ZipReader.File { + if zipFile.FileInfo().IsDir() { + continue + } + filepath := zipFile.Name + a.FileHeaders[filepath] = zipFile + } +} diff --git a/qubership-apihub-service/archive/ZipUtils.go b/qubership-apihub-service/archive/ZipUtils.go new file mode 100644 index 0000000..7ec5de1 --- /dev/null +++ b/qubership-apihub-service/archive/ZipUtils.go @@ -0,0 +1,41 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package archive + +import ( + "archive/zip" + "io/ioutil" +) + +func ReadZipFile(zf *zip.File) ([]byte, error) { + f, err := zf.Open() + if err != nil { + return nil, err + } + defer f.Close() + return ioutil.ReadAll(f) +} + +func AddFileToZip(zw *zip.Writer, name string, content []byte) error { + mdFw, err := zw.Create(name) + if err != nil { + return err + } + _, err = mdFw.Write(content) + if err != nil { + return err + } + return nil +} diff --git a/qubership-apihub-service/cache/olric.go b/qubership-apihub-service/cache/olric.go new file mode 100644 index 0000000..bd3b4b0 --- /dev/null +++ b/qubership-apihub-service/cache/olric.go @@ -0,0 +1,194 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cache + +import ( + "encoding/gob" + "fmt" + "math/rand" + "net" + "os" + "strconv" + "sync" + "time" + + "github.com/buraksezer/olric" + discovery "github.com/buraksezer/olric-cloud-plugin/lib" + "github.com/buraksezer/olric/config" + log "github.com/sirupsen/logrus" +) + +type OlricProvider interface { + Get() *olric.Olric + GetBindAddr() string +} + +type olricProviderImpl struct { + wg sync.WaitGroup + cfg *config.Config + olricC *olric.Olric +} + +func NewOlricProvider() (OlricProvider, error) { + prov := &olricProviderImpl{wg: sync.WaitGroup{}} + + var err error + gob.Register(map[string]interface{}{}) + prov.cfg, err = getConfig() + if err != nil { + return nil, err + } + + prov.wg.Add(1) + + prov.cfg.Started = prov.startCallback + + prov.olricC, err = olric.New(prov.cfg) + if err != nil { + return nil, err + } + + go func() { + err = prov.olricC.Start() + if err != nil { + log.Panicf("Olric cache node cannot be started. Error: %s", err.Error()) + } + }() + + return prov, nil +} + +func (op *olricProviderImpl) startCallback() { + op.wg.Done() +} + +func (op *olricProviderImpl) Get() *olric.Olric { + op.wg.Wait() + return op.olricC +} + +func (op *olricProviderImpl) GetBindAddr() string { + op.wg.Wait() + return op.cfg.BindAddr +} + +func getConfig() (*config.Config, error) { + mode := getMode() + switch mode { + case "lan": + log.Info("Olric run in cloud mode") + cfg := config.New(mode) + + cfg.LogLevel = "WARN" + cfg.LogVerbosity = 2 + + namespace, err := getNamespace() + if err != nil { + return nil, err + } + + cloudDiscovery := &discovery.CloudDiscovery{} + labelSelector := fmt.Sprintf("name=%s", getServiceName()) + cfg.ServiceDiscovery = map[string]interface{}{ + "plugin": cloudDiscovery, + "provider": "k8s", + "args": fmt.Sprintf("namespace=%s label_selector=\"%s\"", namespace, labelSelector), + } + + // TODO: try to get from replica set via kube client + replicaCount := getReplicaCount() + log.Infof("replicaCount is set to %d", replicaCount) + + cfg.PartitionCount = uint64(replicaCount * 4) + cfg.ReplicaCount = replicaCount + + cfg.MemberCountQuorum = int32(replicaCount) + cfg.BootstrapTimeout = 60 * time.Second + cfg.MaxJoinAttempts = 60 + + return cfg, nil + case "local": + log.Info("Olric run in local mode") + cfg := config.New(mode) + + cfg.LogLevel = "WARN" + cfg.LogVerbosity = 2 + + cfg.BindAddr = "localhost" + + cfg.BindPort = getRandomFreePort() + cfg.MemberlistConfig.BindPort = getRandomFreePort() + cfg.PartitionCount = 5 + + return cfg, nil + default: + log.Warnf("Unknown olric discovery mode %s. Will use default \"local\" mode", mode) + return config.New("local"), nil + } +} + +func getRandomFreePort() int { + for { + port := rand.Intn(48127) + 1024 + if isPortFree("localhost", port) { + return port + } + } +} + +func isPortFree(address string, port int) bool { + ln, err := net.Listen("tcp", address+":"+strconv.Itoa(port)) + + if err != nil { + return false + } + + _ = ln.Close() + return true +} + +func getMode() string { + olricCacheMode, exists := os.LookupEnv("OLRIC_DISCOVERY_MODE") + if exists { + return olricCacheMode + } + + return "local" +} + +func getReplicaCount() int { + replicaCountStr, exists := os.LookupEnv("OLRIC_REPLICA_COUNT") + if exists { + rc, err := strconv.Atoi(replicaCountStr) + if err != nil { + log.Errorf("Invalid OLRIC_REPLICA_COUNT env value, expecting int. Replica count set to 1.") + return 1 + } + return rc + } + return 1 +} + +func getNamespace() (string, error) { + ns, exists := os.LookupEnv("NAMESPACE") + if !exists { + return "", fmt.Errorf("NAMESPACE env is not set") + } + return ns, nil +} + +func getServiceName() string { + return "apihub-backend" +} diff --git a/qubership-apihub-service/client/Agent.go b/qubership-apihub-service/client/Agent.go new file mode 100644 index 0000000..23cdede --- /dev/null +++ b/qubership-apihub-service/client/Agent.go @@ -0,0 +1,89 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package client + +import ( + "crypto/tls" + "encoding/json" + "fmt" + "net/http" + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + "gopkg.in/resty.v1" +) + +type AgentClient interface { + GetNamespaces(ctx context.SecurityContext, agentUrl string) (*view.AgentNamespaces, error) + ListServiceNames(ctx context.SecurityContext, agentUrl string, namespace string) (*view.ServiceNamesResponse, error) +} + +func NewAgentClient() AgentClient { + return &agentClientImpl{} +} + +type agentClientImpl struct { +} + +func (a agentClientImpl) ListServiceNames(ctx context.SecurityContext, agentUrl string, namespace string) (*view.ServiceNamesResponse, error) { + req := a.makeRequest(ctx) + + resp, err := req.Get(fmt.Sprintf("%s/api/v1/namespaces/%s/serviceNames", agentUrl, namespace)) + if err != nil { + return nil, fmt.Errorf("failed to list service for namespace %s: %w", namespace, err) + } + if resp.StatusCode() != http.StatusOK { + return nil, fmt.Errorf("failed to list service for namespace %s: response status %v != 200", namespace, resp.StatusCode()) + } + var serviceNames view.ServiceNamesResponse + err = json.Unmarshal(resp.Body(), &serviceNames) + if err != nil { + return nil, err + } + return &serviceNames, nil +} + +func (a agentClientImpl) GetNamespaces(ctx context.SecurityContext, agentUrl string) (*view.AgentNamespaces, error) { + req := a.makeRequest(ctx) + resp, err := req.Get(fmt.Sprintf("%s/api/v1/namespaces", agentUrl)) + if err != nil { + return nil, fmt.Errorf("failed to get namespaces: %w", err) + } + if resp.StatusCode() != http.StatusOK { + return nil, fmt.Errorf("failed to get namespaces: response status %v != 200", resp.StatusCode()) + } + var namespaces view.AgentNamespaces + err = json.Unmarshal(resp.Body(), &namespaces) + if err != nil { + return nil, err + } + return &namespaces, nil +} + +func (a agentClientImpl) makeRequest(ctx context.SecurityContext) *resty.Request { + tr := http.Transport{TLSClientConfig: &tls.Config{InsecureSkipVerify: true}} + cl := http.Client{Transport: &tr, Timeout: time.Second * 60} + + client := resty.NewWithClient(&cl) + req := client.R() + if ctx.GetUserToken() != "" { + req.SetHeader("Authorization", fmt.Sprintf("Bearer %s", ctx.GetUserToken())) + } + if ctx.GetApiKey() != "" { + req.SetHeader("api-key", ctx.GetApiKey()) + } + return req +} diff --git a/qubership-apihub-service/client/GitClient.go b/qubership-apihub-service/client/GitClient.go new file mode 100644 index 0000000..ca8a992 --- /dev/null +++ b/qubership-apihub-service/client/GitClient.go @@ -0,0 +1,48 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package client + +import ( + "io" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + "golang.org/x/net/context" +) + +type GitClient interface { + GetRepoNameAndUrl(ctx context.Context, projectId string) (string, string, error) + SearchRepositories(ctx context.Context, search string, limit int) ([]view.GitRepository, []view.GitGroup, error) + GetRepoBranches(ctx context.Context, id string, search string, limit int) ([]string, []bool, error) + BranchExists(ctx context.Context, id string, branchName string) (bool, bool, error) + ListDirectory(ctx context.Context, projectId string, branchName string, path string, pagingParams view.PagingParams, existingFiles map[string]bool, existingFolders []string) ([]view.FileNode, error) + ListDirectoryFilesRecursive(ctx context.Context, projectId string, branchName string, path string) ([]string, error) + GetFileContent(ctx context.Context, projectId string, ref string, filePath string) ([]byte, string, string, error) + GetFileContentByBlobId(ctx context.Context, projectId string, blobId string) ([]byte, string, error) + FileExists(ctx context.Context, projectId string, branchName string, filePath string) (bool, error) + GetCommitsList(ctx context.Context, projectId string, branchName string, path string) ([]view.GitCommit, error) + GetFileBlobId(ctx context.Context, projectId string, branchName string, path string) (string, error) + GetBranchLastCommitId(ctx context.Context, projectId string, branchName string) (string, error) + CommitChanges(ctx context.Context, projectId string, branchName string, newBranchName string, message string, changes []Action) error + CloneBranch(ctx context.Context, projectId string, branchName string, newBranchName string) error + CreateMergeRequest(ctx context.Context, projectId string, sourceBranchName string, targetBranchName string, title string, description string) (string, error) + DeleteBranch(ctx context.Context, projectId string, branchName string) error + GetCurrentUserInfo(ctx context.Context, login string) (*view.User, error) + GetRepoTags(ctx context.Context, projectId string, search string, limit int) ([]string, error) + TagExists(ctx context.Context, id string, tag string) (bool, error) + BranchOrTagExists(ctx context.Context, id string, branchName string) (bool, bool, error) + GetTagLastCommitId(ctx context.Context, projectId string, tagName string) (string, error) + GetBranchOrTagLastCommitId(ctx context.Context, projectId string, branchName string) (string, error) + WriteCommitArchive(ctx context.Context, projectId string, commitId string, writer io.Writer, format string) error +} diff --git a/qubership-apihub-service/client/GitClientConfiguration.go b/qubership-apihub-service/client/GitClientConfiguration.go new file mode 100644 index 0000000..65a2940 --- /dev/null +++ b/qubership-apihub-service/client/GitClientConfiguration.go @@ -0,0 +1,22 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package client + +import "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + +type GitClientConfiguration struct { + Integration view.GitIntegrationType + BaseUrl string +} diff --git a/qubership-apihub-service/client/GitlabClient.go b/qubership-apihub-service/client/GitlabClient.go new file mode 100644 index 0000000..264c548 --- /dev/null +++ b/qubership-apihub-service/client/GitlabClient.go @@ -0,0 +1,1608 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package client + +import ( + "context" + "errors" + "fmt" + "io" + "net/http" + "net/url" + "path" + "strconv" + "strings" + "time" + + "golang.org/x/time/rate" + + actx "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + log "github.com/sirupsen/logrus" + "github.com/xanzy/go-gitlab" +) + +func NewGitlabOauthClient(gitlabUrl, accessToken string, userId string, tokenRevocationHandler TokenRevocationHandler, tokenExpirationHandler TokenExpirationHandler) (GitClient, error) { + if gitlabUrl == "" { + return nil, fmt.Errorf("parameter %s can't be blank", "gitlabUrl") + } + if accessToken == "" { + return nil, fmt.Errorf("parameter %s can't be blank", "accessToken") + } + client, err := gitlab.NewOAuthClient(accessToken, gitlab.WithBaseURL(gitlabUrl+"/api/v4")) + if err != nil { + return nil, err + } + version, response, err := client.Version.GetVersion() + if err != nil { + if tokenExpired(err) { + accessToken, _, expError := tokenExpirationHandler.TokenExpired(userId, view.GitlabIntegration) + if expError != nil { + return nil, expError + } + client, err = gitlab.NewOAuthClient(accessToken, gitlab.WithBaseURL(gitlabUrl+"/api/v4")) + if err != nil { + return nil, err + } + version, _, err = client.Version.GetVersion() + } + + if tokenIsRevoked(err) { + return nil, tokenRevocationHandler.TokenRevoked(userId, view.GitlabIntegration) + } + if deadlineExceeded(err) { + return nil, GitlabDeadlineExceeded(err) + } + if response.StatusCode == http.StatusUnauthorized { + return nil, tokenRevocationHandler.AuthFailed(userId, view.GitlabIntegration) + } + + return nil, errors.New("Failed to check connection to gitlab, error: " + err.Error()) + } + log.Debugf("Connection to gitlab established, server version: %s %s", version.Version, version.Revision) + return &gitlabClientImpl{ + client: client, + userId: userId, + tokenRevocationHandler: tokenRevocationHandler, + tokenExpirationHandler: tokenExpirationHandler, + ctx: context.TODO(), + rateLimiter: rate.NewLimiter(50, 1), // x requests per second + }, nil +} + +type gitlabClientImpl struct { + client *gitlab.Client + userId string + tokenRevocationHandler TokenRevocationHandler + tokenExpirationHandler TokenExpirationHandler + ctx context.Context + rateLimiter *rate.Limiter +} + +const DefaultContextTimeout = time.Second * 20 + +func (c gitlabClientImpl) SearchRepositories(ctx context.Context, search string, limit int) ([]view.GitRepository, []view.GitGroup, error) { + err := c.rateLimiter.Wait(ctx) + if err != nil { + return nil, nil, err + } + + var gitRepoCoordinates *GitRepoCoordinates + + isGitUrl := isGitRepoUrl(search) + if isGitUrl { + gitRepoCoordinates, err = parseGitRepoUrl(search) + } else { + gitRepoCoordinates = &GitRepoCoordinates{ + name: search, + } + } + searchNamespaces := false + var projectGroup string + var projectName string + if strings.Contains(gitRepoCoordinates.name, "/") { + searchNamespaces = true + projectFullPath := strings.Trim(gitRepoCoordinates.name, "/") + if strings.Contains(projectFullPath, "/") { + projectGroup = path.Dir(projectFullPath) + projectName = path.Base(projectFullPath) + } + } + + orderStr := "last_activity_at" + simple := true + options := &gitlab.ListProjectsOptions{ + Search: &gitRepoCoordinates.name, + OrderBy: &orderStr, + Simple: &simple, + SearchNamespaces: &searchNamespaces, + } + options.ListOptions = gitlab.ListOptions{PerPage: limit * 2} + + gitRepositories := make([]view.GitRepository, 0) + gitRepositoryMap := make(map[string]bool) + gitGroups := make([]view.GitGroup, 0) + gitGroupMap := make(map[string]bool) + + ctx, cancel := context.WithTimeout(ctx, DefaultContextTimeout) + defer cancel() + + projects, response, err := c.client.Projects.ListProjects(options, gitlab.WithContext(ctx)) + if err != nil { + if contextDeadlineExceeded(err) { + return nil, nil, GitlabDeadlineExceeded(err) + } + expErr := c.tokenUnexpectedlyExpired(response.StatusCode, err) + if expErr != nil { + return nil, nil, expErr + } + if tokenIsRevoked(err) { + return nil, nil, c.tokenRevocationHandler.TokenRevoked(c.userId, view.GitlabIntegration) + } + if deadlineExceeded(err) { + return nil, nil, GitlabDeadlineExceeded(err) + } + return nil, nil, err + } + + if isGitUrl { + for _, prj := range projects { + if prj.HTTPURLToRepo == search { + projectIdStr := strconv.Itoa(prj.ID) + gitRepositories = append(gitRepositories, view.GitRepository{RepositoryId: projectIdStr, Name: prj.PathWithNamespace, DefaultBranch: prj.DefaultBranch}) + return gitRepositories, gitGroups, nil + } + } + } else { + for _, prj := range projects { + if strings.Contains(strings.ToLower(prj.PathWithNamespace), strings.ToLower(search)) || + strings.Contains(strings.ToLower(prj.NameWithNamespace), strings.ToLower(search)) { // TODO: maybe use path with namespace + projectIdStr := strconv.Itoa(prj.ID) + gitRepositories = append(gitRepositories, view.GitRepository{RepositoryId: projectIdStr, Name: prj.PathWithNamespace, DefaultBranch: prj.DefaultBranch}) + gitRepositoryMap[projectIdStr] = true + if len(gitRepositories) >= limit { + break + } + } + } + } + + if projectGroup != "" && projectGroup != "/" && projectGroup != "." { + projectsToFill := limit - len(gitRepositories) + projectsFromGroup, err := c.getProjectsFromGroup(ctx, projectName, projectGroup, projectsToFill) + if err != nil { + return nil, nil, err + } + for _, prj := range projectsFromGroup { + if !gitRepositoryMap[prj.RepositoryId] { + gitRepositories = append(gitRepositories, prj) + } + } + } + + groupsToFill := limit - len(gitRepositories) + if groupsToFill > 0 { + groups, err := c.getGroups(ctx, gitRepoCoordinates.name, groupsToFill) + if err != nil { + return nil, nil, err + } + for _, grp := range groups { + gitGroupMap[grp.Name] = true + } + gitGroups = append(gitGroups, groups...) + namespacesToFill := groupsToFill - len(gitGroups) + if namespacesToFill > 0 { + namespaces, err := c.getNamespaces(ctx, gitRepoCoordinates.name, namespacesToFill) + if err != nil { + return nil, nil, err + } + for _, ns := range namespaces { + if !gitGroupMap[ns.Name] { + gitGroups = append(gitGroups, ns) + } + } + } + } + + return gitRepositories, gitGroups, nil +} + +func (c gitlabClientImpl) getProjectsFromGroup(ctx context.Context, search string, group string, limit int) ([]view.GitRepository, error) { + err := c.rateLimiter.Wait(ctx) + if err != nil { + return nil, err + } + + result := make([]view.GitRepository, 0) + orderStr := "last_activity_at" + simple := true + searchNamespaces := true + minAccessLevel := gitlab.GuestPermissions + options := &gitlab.ListProjectsOptions{ + Search: &group, + OrderBy: &orderStr, + Simple: &simple, + SearchNamespaces: &searchNamespaces, + MinAccessLevel: &minAccessLevel, + } + options.PerPage = 100 + + ctx, cancel := context.WithTimeout(ctx, DefaultContextTimeout) + defer cancel() + + projects, response, err := c.client.Projects.ListProjects(options, gitlab.WithContext(ctx)) + if err != nil { + if contextDeadlineExceeded(err) { + return nil, GitlabDeadlineExceeded(err) + } + expErr := c.tokenUnexpectedlyExpired(response.StatusCode, err) + if expErr != nil { + return nil, expErr + } + if tokenIsRevoked(err) { + return nil, c.tokenRevocationHandler.TokenRevoked(c.userId, view.GitlabIntegration) + } + if deadlineExceeded(err) { + return nil, GitlabDeadlineExceeded(err) + } + return nil, err + } + + for _, prj := range projects { + //if project name\path contains search and project namespace name\path contains group + if (strings.Contains(strings.ToLower(prj.Path), strings.ToLower(search)) || + strings.Contains(strings.ToLower(prj.Name), strings.ToLower(search))) && + (strings.Contains(strings.ToLower(prj.Namespace.FullPath), strings.ToLower(group)) || + strings.Contains(strings.ToLower(prj.Namespace.Name), strings.ToLower(group))) { + result = append(result, view.GitRepository{RepositoryId: strconv.Itoa(prj.ID), Name: prj.PathWithNamespace, DefaultBranch: prj.DefaultBranch}) + if len(result) >= limit { + break + } + } + } + return result, nil +} + +func (c gitlabClientImpl) getGroups(ctx context.Context, search string, limit int) ([]view.GitGroup, error) { + err := c.rateLimiter.Wait(ctx) + if err != nil { + return nil, err + } + + result := make([]view.GitGroup, 0) + allAvailable := true + groupOptions := &gitlab.ListGroupsOptions{ + Search: &search, + AllAvailable: &allAvailable, + } + groupOptions.ListOptions = gitlab.ListOptions{PerPage: limit} + ctx, cancel := context.WithTimeout(ctx, DefaultContextTimeout) + defer cancel() + groups, response, err := c.client.Groups.ListGroups(groupOptions, gitlab.WithContext(ctx)) + if err != nil { + if contextDeadlineExceeded(err) { + return nil, GitlabDeadlineExceeded(err) + } + expErr := c.tokenUnexpectedlyExpired(response.StatusCode, err) + if expErr != nil { + return nil, expErr + } + if tokenIsRevoked(err) { + return nil, c.tokenRevocationHandler.TokenRevoked(c.userId, view.GitlabIntegration) + } + if deadlineExceeded(err) { + return nil, GitlabDeadlineExceeded(err) + } + return nil, err + } + for _, grp := range groups { + result = append(result, view.GitGroup{Name: grp.FullPath}) + if len(result) >= limit { + break + } + } + return result, nil +} + +func (c gitlabClientImpl) getNamespaces(ctx context.Context, search string, limit int) ([]view.GitGroup, error) { + err := c.rateLimiter.Wait(ctx) + if err != nil { + return nil, err + } + + result := make([]view.GitGroup, 0) + namespaceOptions := &gitlab.ListNamespacesOptions{ + Search: &search, + } + namespaceOptions.ListOptions = gitlab.ListOptions{PerPage: limit} + ctx, cancel := context.WithTimeout(ctx, DefaultContextTimeout) + defer cancel() + namespaces, response, err := c.client.Namespaces.ListNamespaces(namespaceOptions, gitlab.WithContext(ctx)) + if err != nil { + if contextDeadlineExceeded(err) { + return nil, GitlabDeadlineExceeded(err) + } + expErr := c.tokenUnexpectedlyExpired(response.StatusCode, err) + if expErr != nil { + return nil, expErr + } + if tokenIsRevoked(err) { + return nil, c.tokenRevocationHandler.TokenRevoked(c.userId, view.GitlabIntegration) + } + if deadlineExceeded(err) { + return nil, GitlabDeadlineExceeded(err) + } + return nil, err + } + for _, ns := range namespaces { + result = append(result, view.GitGroup{Name: ns.FullPath}) + if len(result) >= limit { + break + } + } + return result, nil +} + +func (c gitlabClientImpl) GetFileContent(ctx context.Context, projectId string, ref string, filePath string) ([]byte, string, string, error) { + err := c.rateLimiter.Wait(ctx) + if err != nil { + return nil, "", "", err + } + + if projectId == "" { + return nil, "", "", fmt.Errorf("parameter %s can't be blank", "projectId") + } + if filePath == "" { + return nil, "", "", fmt.Errorf("parameter %s can't be blank", "filePath") + } + + var mdOptions gitlab.GetFileMetaDataOptions + if ref != "" { + mdOptions.Ref = &ref // Ref is the name of branch, tag or commit + } + ctx, cancel := context.WithTimeout(ctx, DefaultContextTimeout) + ctx = actx.CreateContextWithStacktrace(ctx, fmt.Sprintf("GetFileContent(%s,%s,%s))", projectId, ref, filePath)) + trackGitlabCall(ctx) + defer cancel() + metadata, response, err := c.client.RepositoryFiles.GetFileMetaData(projectId, filePath, &mdOptions, gitlab.WithContext(ctx)) + if response != nil && response.StatusCode == http.StatusNotFound { + return nil, "", "", nil + } + if err != nil { + if contextDeadlineExceeded(err) { + return nil, "", "", GitlabDeadlineExceeded(err) + } + expErr := c.tokenUnexpectedlyExpired(response.StatusCode, err) + if expErr != nil { + return nil, "", "", expErr + } + if tokenIsRevoked(err) { + return nil, "", "", c.tokenRevocationHandler.TokenRevoked(c.userId, view.GitlabIntegration) + } + if deadlineExceeded(err) { + return nil, "", "", GitlabDeadlineExceeded(err) + } + return nil, "", "", err + } + + var options gitlab.GetRawFileOptions + if ref != "" { + options.Ref = &ref // Ref is the name of branch, tag or commit + } + ctx, cancel = context.WithTimeout(ctx, DefaultContextTimeout) + defer cancel() + content, response, err := c.client.RepositoryFiles.GetRawFile(projectId, filePath, &options, gitlab.WithContext(ctx)) + if err != nil { + if contextDeadlineExceeded(err) { + return nil, "", "", GitlabDeadlineExceeded(err) + } + expErr := c.tokenUnexpectedlyExpired(response.StatusCode, err) + if expErr != nil { + return nil, "", "", expErr + } + if tokenIsRevoked(err) { + return nil, "", "", c.tokenRevocationHandler.TokenRevoked(c.userId, view.GitlabIntegration) + } + if deadlineExceeded(err) { + return nil, "", "", GitlabDeadlineExceeded(err) + } + return nil, "", "", err + } + + contentType := response.Header.Get("Content-Type") + + return content, contentType, metadata.BlobID, nil +} + +func (c gitlabClientImpl) GetFileContentByBlobId(ctx context.Context, projectId string, blobId string) ([]byte, string, error) { + err := c.rateLimiter.Wait(ctx) + if err != nil { + return nil, "", err + } + + if projectId == "" { + return nil, "", fmt.Errorf("parameter %s can't be blank", "projectId") + } + if blobId == "" { + return nil, "", fmt.Errorf("parameter %s can't be blank", "blobId") + } + + ctx, cancel := context.WithTimeout(ctx, DefaultContextTimeout) + ctx = actx.CreateContextWithStacktrace(ctx, fmt.Sprintf("GetFileContentByBlobId(%s,%s))", projectId, blobId)) + trackGitlabCall(ctx) + defer cancel() + fileData, response, err := c.client.Repositories.RawBlobContent(projectId, blobId, gitlab.WithContext(ctx)) + if response != nil && response.StatusCode == http.StatusNotFound { + return nil, "", nil + } + if err != nil { + if contextDeadlineExceeded(err) { + return nil, "", GitlabDeadlineExceeded(err) + } + expErr := c.tokenUnexpectedlyExpired(response.StatusCode, err) + if expErr != nil { + return nil, "", expErr + } + if tokenIsRevoked(err) { + return nil, "", c.tokenRevocationHandler.TokenRevoked(c.userId, view.GitlabIntegration) + } + if deadlineExceeded(err) { + return nil, "", GitlabDeadlineExceeded(err) + } + return nil, "", err + } + contentType := response.Header.Get("Content-Type") + + return fileData, contentType, nil +} + +func (c gitlabClientImpl) FileExists(ctx context.Context, projectId string, branchName string, filePath string) (bool, error) { + err := c.rateLimiter.Wait(ctx) + if err != nil { + return false, err + } + + var options gitlab.GetFileMetaDataOptions + if branchName != "" { + options.Ref = &branchName + } + + ctx, cancel := context.WithTimeout(ctx, DefaultContextTimeout) + ctx = actx.CreateContextWithStacktrace(ctx, fmt.Sprintf("FileExists(%s,%s,%s))", projectId, branchName, filePath)) + trackGitlabCall(ctx) + defer cancel() + _, response, err := c.client.RepositoryFiles.GetFileMetaData(projectId, filePath, &options, gitlab.WithContext(ctx)) + if response != nil && response.StatusCode == http.StatusNotFound { + return false, nil + } + if err != nil { + if contextDeadlineExceeded(err) { + return false, GitlabDeadlineExceeded(err) + } + expErr := c.tokenUnexpectedlyExpired(response.StatusCode, err) + if expErr != nil { + return false, expErr + } + if tokenIsRevoked(err) { + return false, c.tokenRevocationHandler.TokenRevoked(c.userId, view.GitlabIntegration) + } + if deadlineExceeded(err) { + return false, GitlabDeadlineExceeded(err) + } + return false, err + } + + if response != nil && response.StatusCode == http.StatusOK { + return true, nil + } + + return false, nil +} + +func (c gitlabClientImpl) ListDirectory(ctx context.Context, projectId string, branchName string, path string, pagingParams view.PagingParams, + existingFiles map[string]bool, existingFolders []string) ([]view.FileNode, error) { + + err := c.rateLimiter.Wait(ctx) + if err != nil { + return nil, err + } + + if projectId == "" { + return nil, fmt.Errorf("parameter %s can't be blank", "projectId") + } + var options gitlab.ListTreeOptions + if branchName != "" { + options.Ref = &branchName + } + path = strings.TrimPrefix(path, "/") + if path != "" { + options.Path = &path + } + + itemsLeft := pagingParams.ItemsPerPage + viewNodes := make([]view.FileNode, 0) + startOffset := pagingParams.ItemsPerPage * (pagingParams.Page - 1) + offset := 0 + + options.ListOptions = gitlab.ListOptions{Page: pagingParams.Page, PerPage: pagingParams.ItemsPerPage} + + for itemsLeft > 0 { + ctx, cancel := context.WithTimeout(ctx, DefaultContextTimeout) + defer cancel() + nodes, response, err := c.client.Repositories.ListTree(projectId, &options, gitlab.WithContext(ctx)) + if err != nil { + if contextDeadlineExceeded(err) { + return nil, GitlabDeadlineExceeded(err) + } + expErr := c.tokenUnexpectedlyExpired(response.StatusCode, err) + if expErr != nil { + return nil, expErr + } + if tokenIsRevoked(err) { + return nil, c.tokenRevocationHandler.TokenRevoked(c.userId, view.GitlabIntegration) + } + if deadlineExceeded(err) { + return nil, GitlabDeadlineExceeded(err) + } + if branchNotFound(err) { + return nil, GitlabBranchNotFound(projectId, branchName) + } + return nil, err + } + if (response.TotalItems - startOffset - offset) < itemsLeft { + itemsLeft = 0 + } + for _, node := range nodes { + offset++ + fileId := node.Path + + if node.Type == "tree" { + exists := false + for _, existingFolder := range existingFolders { + if strings.HasPrefix(fileId, existingFolder) || + fileId == strings.TrimSuffix(existingFolder, "/") { + exists = true + break + } + } + if exists { + continue + } + } + _, exists := existingFiles[fileId] + if exists { + continue + } + + viewNodes = append(viewNodes, *toViewNode(node)) + itemsLeft-- + } + options.ListOptions.Page = options.ListOptions.Page + 1 + + } + + return viewNodes, nil +} + +func (c gitlabClientImpl) ListDirectoryFilesRecursive(ctx context.Context, projectId string, branchName string, path string) ([]string, error) { + err := c.rateLimiter.Wait(ctx) + if err != nil { + return nil, err + } + + if projectId == "" { + return nil, fmt.Errorf("parameter %s can't be blank", "projectId") + } + var options gitlab.ListTreeOptions + if branchName != "" { + options.Ref = &branchName + } + path = strings.TrimPrefix(path, "/") + if path != "" { + options.Path = &path + } + truePtr := true + options.Recursive = &truePtr + + files := make([]string, 0) + + options.ListOptions = gitlab.ListOptions{Page: 1, PerPage: 100} + for options.ListOptions.Page != 0 { + ctx, cancel := context.WithTimeout(ctx, DefaultContextTimeout) + defer cancel() + nodes, response, err := c.client.Repositories.ListTree(projectId, &options, gitlab.WithContext(ctx)) + if err != nil { //todo check that 404 returns an error + if contextDeadlineExceeded(err) { + return nil, GitlabDeadlineExceeded(err) + } + expErr := c.tokenUnexpectedlyExpired(response.StatusCode, err) + if expErr != nil { + return nil, expErr + } + if tokenIsRevoked(err) { + return nil, c.tokenRevocationHandler.TokenRevoked(c.userId, view.GitlabIntegration) + } + if deadlineExceeded(err) { + return nil, GitlabDeadlineExceeded(err) + } + if branchNotFound(err) { + return nil, GitlabBranchNotFound(projectId, branchName) + } + return nil, err + } + for _, node := range nodes { + if node.Type == "tree" { + continue + } + fileId := node.Path + files = append(files, fileId) + } + options.ListOptions.Page = response.NextPage + } + return files, nil +} + +func (c gitlabClientImpl) GetRepoBranches(ctx context.Context, projectId string, search string, limit int) ([]string, []bool, error) { + err := c.rateLimiter.Wait(ctx) + if err != nil { + return nil, nil, err + } + + if projectId == "" { + return nil, nil, fmt.Errorf("parameter %s can't be blank", "projectId") + } + var names []string + var canPush []bool + + var opts gitlab.ListBranchesOptions + if search != "" { + opts = gitlab.ListBranchesOptions{Search: &search} + } + + var maxPageCount int + if limit == -1 || limit == 100 { + opts.PerPage = 100 + maxPageCount = -1 + } else if limit > 100 { + opts.PerPage = 100 + maxPageCount = limit / 100 + } else { + opts.PerPage = limit + maxPageCount = 1 + } + + ctx, cancel := context.WithTimeout(ctx, DefaultContextTimeout) + defer cancel() + branches, response, err := c.client.Branches.ListBranches(projectId, &opts, gitlab.WithContext(ctx)) + if err != nil { + if contextDeadlineExceeded(err) { + return nil, nil, GitlabDeadlineExceeded(err) + } + expErr := c.tokenUnexpectedlyExpired(response.StatusCode, err) + if expErr != nil { + return nil, nil, expErr + } + if tokenIsRevoked(err) { + return nil, nil, c.tokenRevocationHandler.TokenRevoked(c.userId, view.GitlabIntegration) + } + if deadlineExceeded(err) { + return nil, nil, GitlabDeadlineExceeded(err) + } + if response != nil && response.StatusCode == http.StatusNotFound { + return nil, nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.RepositoryIdNotFound, + Message: exception.RepositoryIdNotFoundMsg, + Params: map[string]interface{}{"repositoryId": projectId}, + Debug: err.Error(), + } + } + return nil, nil, err + } + if branches == nil || len(branches) == 0 { + log.Debugf("No branches found for project with id %v! search='%s'", projectId, search) + return nil, nil, nil + } + if response.TotalPages > 1 && maxPageCount != 1 { + if maxPageCount == -1 { + maxPageCount = response.TotalPages + } + for i := 2; i < maxPageCount; i++ { + opts.Page = i + ctx, cancel := context.WithTimeout(ctx, DefaultContextTimeout) + defer cancel() + branchesFromPage, listResponse, err := c.client.Branches.ListBranches(projectId, &opts, gitlab.WithContext(ctx)) + if err != nil { + if contextDeadlineExceeded(err) { + return nil, nil, GitlabDeadlineExceeded(err) + } + expErr := c.tokenUnexpectedlyExpired(listResponse.StatusCode, err) + if expErr != nil { + return nil, nil, expErr + } + if tokenIsRevoked(err) { + return nil, nil, c.tokenRevocationHandler.TokenRevoked(c.userId, view.GitlabIntegration) + } + if deadlineExceeded(err) { + return nil, nil, GitlabDeadlineExceeded(err) + } + return nil, nil, err + } + branches = append(branches, branchesFromPage...) + } + } + for _, branch := range branches { + names = append(names, branch.Name) + canPush = append(canPush, branch.CanPush) + } + + return names, canPush, nil +} + +func (c gitlabClientImpl) BranchExists(ctx context.Context, projectId string, branchName string) (bool, bool, error) { + if projectId == "" { + return false, false, fmt.Errorf("parameter %s can't be blank", "projectId") + } + if branchName == "" { + return false, false, fmt.Errorf("parameter %s can't be blank", "branchName") + } + err := c.rateLimiter.Wait(ctx) + if err != nil { + return false, false, err + } + + ctx, cancel := context.WithTimeout(ctx, DefaultContextTimeout) + defer cancel() + branch, response, err := c.client.Branches.GetBranch(projectId, branchName, gitlab.WithContext(ctx)) + if err != nil { + if contextDeadlineExceeded(err) { + return false, false, GitlabDeadlineExceeded(err) + } + expErr := c.tokenUnexpectedlyExpired(response.StatusCode, err) + if expErr != nil { + return false, false, expErr + } + if response != nil && response.StatusCode == http.StatusNotFound { + return false, false, nil + } + if tokenIsRevoked(err) { + return false, false, c.tokenRevocationHandler.TokenRevoked(c.userId, view.GitlabIntegration) + } + if deadlineExceeded(err) { + return false, false, GitlabDeadlineExceeded(err) + } + return false, false, err + } + if branch == nil || response.StatusCode != http.StatusOK { + return false, false, err + } + return true, branch.CanPush, nil +} + +func (c gitlabClientImpl) GetRepoNameAndUrl(ctx context.Context, gitRepoId string) (string, string, error) { + if gitRepoId == "" { + return "", "", fmt.Errorf("parameter %s can't be blank", "gitRepoId") + } + err := c.rateLimiter.Wait(ctx) + if err != nil { + return "", "", err + } + ctx, cancel := context.WithTimeout(ctx, DefaultContextTimeout) + defer cancel() + project, response, err := c.client.Projects.GetProject(gitRepoId, &gitlab.GetProjectOptions{}, gitlab.WithContext(ctx)) + if err != nil { + if contextDeadlineExceeded(err) { + return "", "", GitlabDeadlineExceeded(err) + } + expErr := c.tokenUnexpectedlyExpired(response.StatusCode, err) + if expErr != nil { + return "", "", expErr + } + if tokenIsRevoked(err) { + return "", "", c.tokenRevocationHandler.TokenRevoked(c.userId, view.GitlabIntegration) + } + if deadlineExceeded(err) { + return "", "", GitlabDeadlineExceeded(err) + } + return "", "", err + } + if response != nil && response.StatusCode == http.StatusNotFound { + return "", "", fmt.Errorf("requested git repository with id '%s' not found", gitRepoId) + } + return project.PathWithNamespace, project.HTTPURLToRepo, nil +} + +func (c gitlabClientImpl) GetCommitsList(ctx context.Context, projectId string, branchName string, path string) ([]view.GitCommit, error) { + if projectId == "" { + return nil, fmt.Errorf("parameter %s can't be blank", "projectId") + } + if branchName == "" { + return nil, fmt.Errorf("parameter %s can't be blank", "branchName") + } + if path == "" { + return nil, fmt.Errorf("parameter %s can't be blank", "path") + } + + err := c.rateLimiter.Wait(ctx) + if err != nil { + return nil, err + } + + ctx, cancel := context.WithTimeout(ctx, DefaultContextTimeout) + defer cancel() + commits, response, err := c.client.Commits.ListCommits(projectId, &gitlab.ListCommitsOptions{ + RefName: &branchName, + Path: &path}, gitlab.WithContext(ctx)) + + if err != nil { + if contextDeadlineExceeded(err) { + return nil, GitlabDeadlineExceeded(err) + } + expErr := c.tokenUnexpectedlyExpired(response.StatusCode, err) + if expErr != nil { + return nil, expErr + } + if tokenIsRevoked(err) { + return nil, c.tokenRevocationHandler.TokenRevoked(c.userId, view.GitlabIntegration) + } + if deadlineExceeded(err) { + return nil, GitlabDeadlineExceeded(err) + } + return nil, fmt.Errorf("failed to get commits list %v", err) + } + + if response.TotalPages > 1 { + for i := 2; i <= response.TotalPages; i++ { + ctx, cancel := context.WithTimeout(ctx, DefaultContextTimeout) + defer cancel() + commitsFromPage, listResponse, err := c.client.Commits.ListCommits(projectId, &gitlab.ListCommitsOptions{ + ListOptions: gitlab.ListOptions{Page: i}, + RefName: &branchName, + Path: &path}, gitlab.WithContext(ctx)) + if err != nil { + if contextDeadlineExceeded(err) { + return nil, GitlabDeadlineExceeded(err) + } + expErr := c.tokenUnexpectedlyExpired(listResponse.StatusCode, err) + if expErr != nil { + return nil, expErr + } + if tokenIsRevoked(err) { + return nil, c.tokenRevocationHandler.TokenRevoked(c.userId, view.GitlabIntegration) + } + if deadlineExceeded(err) { + return nil, GitlabDeadlineExceeded(err) + } + return nil, err + } + commits = append(commits, commitsFromPage...) + } + } + + gitCommits := []view.GitCommit{} + for _, commit := range commits { + gitCommits = append(gitCommits, + view.GitCommit{Id: commit.ID, + CommitterName: commit.CommitterName, + CommitterEmail: commit.CommitterEmail, + CommittedDate: *commit.CommittedDate, + Message: commit.Message}) + } + return gitCommits, nil +} + +func (c gitlabClientImpl) GetFileBlobId(ctx context.Context, projectId string, branchName string, path string) (string, error) { + if projectId == "" { + return "", fmt.Errorf("parameter %s can't be blank", "projectId") + } + if path == "" { + return "", fmt.Errorf("parameter %s can't be blank", "filePath") + } + + err := c.rateLimiter.Wait(ctx) + if err != nil { + return "", err + } + + var mdOptions gitlab.GetFileMetaDataOptions + if branchName != "" { + mdOptions.Ref = &branchName + } + ctx, cancel := context.WithTimeout(ctx, DefaultContextTimeout) + ctx = actx.CreateContextWithStacktrace(ctx, fmt.Sprintf("GetFileBlobId(%s,%s,%s))", projectId, branchName, path)) + trackGitlabCall(ctx) + defer cancel() + metadata, response, err := c.client.RepositoryFiles.GetFileMetaData(projectId, path, &mdOptions, gitlab.WithContext(ctx)) + if response != nil && response.StatusCode == http.StatusNotFound { + return "", nil + } + if err != nil { + if contextDeadlineExceeded(err) { + return "", GitlabDeadlineExceeded(err) + } + expErr := c.tokenUnexpectedlyExpired(response.StatusCode, err) + if expErr != nil { + return "", expErr + } + if tokenIsRevoked(err) { + return "", c.tokenRevocationHandler.TokenRevoked(c.userId, view.GitlabIntegration) + } + if deadlineExceeded(err) { + return "", GitlabDeadlineExceeded(err) + } + return "", err + } + + return metadata.BlobID, nil +} + +func (c gitlabClientImpl) GetBranchLastCommitId(ctx context.Context, projectId string, branchName string) (string, error) { + if projectId == "" { + return "", fmt.Errorf("parameter %s can't be blank", "projectId") + } + if branchName == "" { + return "", fmt.Errorf("parameter %s can't be blank", "branchName") + } + + err := c.rateLimiter.Wait(ctx) + if err != nil { + return "", err + } + + ctx, cancel := context.WithTimeout(ctx, DefaultContextTimeout) + defer cancel() + branch, response, err := c.client.Branches.GetBranch(projectId, branchName, gitlab.WithContext(ctx)) + if err != nil { + if contextDeadlineExceeded(err) { + return "", GitlabDeadlineExceeded(err) + } + if response != nil && response.StatusCode == http.StatusNotFound { + return "", nil + } + expErr := c.tokenUnexpectedlyExpired(response.StatusCode, err) + if expErr != nil { + return "", expErr + } + if tokenIsRevoked(err) { + return "", c.tokenRevocationHandler.TokenRevoked(c.userId, view.GitlabIntegration) + } + return "", err + } + if branch == nil || response.StatusCode != http.StatusOK || branch.Commit == nil { + return "", err + } + + return branch.Commit.ID, nil +} + +func (c gitlabClientImpl) CommitChanges(ctx context.Context, projectId string, branchName string, newBranchName string, message string, actions []Action) error { + if projectId == "" { + return fmt.Errorf("parameter %s can't be blank", "projectId") + } + if branchName == "" { + return fmt.Errorf("parameter %s can't be blank", "branchName") + } + if message == "" { + return fmt.Errorf("parameter %s can't be blank", "message") + } + if len(actions) == 0 { + return fmt.Errorf("parameter %s can't be blank", "actions") + } + + err := c.rateLimiter.Wait(ctx) + if err != nil { + return err + } + + gitlabActions := toGitlabActions(actions) + + opt := gitlab.CreateCommitOptions{ + StartBranch: &branchName, + Branch: &newBranchName, + CommitMessage: &message, + Actions: gitlabActions, + } + ctx, cancel := context.WithTimeout(ctx, DefaultContextTimeout) + defer cancel() + _, response, err := c.client.Commits.CreateCommit(projectId, &opt, gitlab.WithContext(ctx)) + if response != nil && response.StatusCode == http.StatusForbidden { + return &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientRightsToCommit, + Message: exception.InsufficientRightsToCommitMsg, + Params: map[string]interface{}{"branch": branchName}, + Debug: err.Error(), + } + } + if err != nil { + if contextDeadlineExceeded(err) { + return GitlabDeadlineExceeded(err) + } + expErr := c.tokenUnexpectedlyExpired(response.StatusCode, err) + if expErr != nil { + return expErr + } + if tokenIsRevoked(err) { + return c.tokenRevocationHandler.TokenRevoked(c.userId, view.GitlabIntegration) + } + if deadlineExceeded(err) { + return GitlabDeadlineExceeded(err) + } + if strings.Contains(err.Error(), "no tickets in message") { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.NoTicketInCommit, + Message: exception.NoTicketInCommitMsg, + Debug: err.Error(), + } + } + + return fmt.Errorf("failed to commit changes %+v", err.Error()) + } + return nil +} + +func (c gitlabClientImpl) CloneBranch(ctx context.Context, projectId string, branchName string, newBranchName string) error { + if projectId == "" { + return fmt.Errorf("parameter %s can't be blank", "projectId") + } + if branchName == "" { + return fmt.Errorf("parameter %s can't be blank", "branchName") + } + if newBranchName == "" { + return fmt.Errorf("parameter %s can't be blank", "newBranchName") + } + + err := c.rateLimiter.Wait(ctx) + if err != nil { + return err + } + + opt := gitlab.CreateBranchOptions{ + Branch: &newBranchName, + Ref: &branchName, + } + ctx, cancel := context.WithTimeout(ctx, DefaultContextTimeout) + defer cancel() + _, response, err := c.client.Branches.CreateBranch(projectId, &opt, gitlab.WithContext(ctx)) + if response != nil && response.StatusCode == http.StatusForbidden { + return fmt.Errorf("failed to clone branch, not enough rights") + } + if err != nil { + if contextDeadlineExceeded(err) { + return GitlabDeadlineExceeded(err) + } + expErr := c.tokenUnexpectedlyExpired(response.StatusCode, err) + if expErr != nil { + return expErr + } + if tokenIsRevoked(err) { + return c.tokenRevocationHandler.TokenRevoked(c.userId, view.GitlabIntegration) + } + if deadlineExceeded(err) { + return GitlabDeadlineExceeded(err) + } + return fmt.Errorf("failed to clone branch %+v", err.Error()) + } + return nil +} + +func (c gitlabClientImpl) CreateMergeRequest(ctx context.Context, projectId string, sourceBranchName string, targetBranchName string, title string, description string) (string, error) { + if projectId == "" { + return "", fmt.Errorf("parameter %s can't be blank", "projectId") + } + if sourceBranchName == "" { + return "", fmt.Errorf("parameter %s can't be blank", "sourceBranchName") + } + if targetBranchName == "" { + return "", fmt.Errorf("parameter %s can't be blank", "targetBranchName") + } + if title == "" { + return "", fmt.Errorf("parameter %s can't be blank", "title") + } + + err := c.rateLimiter.Wait(ctx) + if err != nil { + return "", err + } + + removeSourceBranch := true + opt := gitlab.CreateMergeRequestOptions{ + Title: &title, + Description: &description, + TargetBranch: &targetBranchName, + SourceBranch: &sourceBranchName, + RemoveSourceBranch: &removeSourceBranch, + } + ctx, cancel := context.WithTimeout(ctx, DefaultContextTimeout) + defer cancel() + mergeRequest, response, err := c.client.MergeRequests.CreateMergeRequest(projectId, &opt, gitlab.WithContext(ctx)) + if response.StatusCode == http.StatusForbidden { + return "", fmt.Errorf("failed to create merge request for project %v, not enough rights", projectId) + } + if err != nil { + if contextDeadlineExceeded(err) { + return "", GitlabDeadlineExceeded(err) + } + expErr := c.tokenUnexpectedlyExpired(response.StatusCode, err) + if expErr != nil { + return "", expErr + } + if tokenIsRevoked(err) { + return "", c.tokenRevocationHandler.TokenRevoked(c.userId, view.GitlabIntegration) + } + if deadlineExceeded(err) { + return "", GitlabDeadlineExceeded(err) + } + return "", fmt.Errorf("failed to merge request branch %+v", err.Error()) + } + return mergeRequest.WebURL, nil +} + +func (c gitlabClientImpl) DeleteBranch(ctx context.Context, projectId string, branchName string) error { + if projectId == "" { + return fmt.Errorf("parameter %s can't be blank", "projectId") + } + if branchName == "" { + return fmt.Errorf("parameter %s can't be blank", "branchName") + } + + err := c.rateLimiter.Wait(ctx) + if err != nil { + return err + } + + ctx, cancel := context.WithTimeout(ctx, DefaultContextTimeout) + defer cancel() + response, err := c.client.Branches.DeleteBranch(projectId, branchName, gitlab.WithContext(ctx)) + if response != nil && response.StatusCode == http.StatusForbidden { + return fmt.Errorf("failed to delete branch, not enough rights") + } + if err != nil { + if contextDeadlineExceeded(err) { + return GitlabDeadlineExceeded(err) + } + expErr := c.tokenUnexpectedlyExpired(response.StatusCode, err) + if expErr != nil { + return expErr + } + if tokenIsRevoked(err) { + return c.tokenRevocationHandler.TokenRevoked(c.userId, view.GitlabIntegration) + } + if deadlineExceeded(err) { + return GitlabDeadlineExceeded(err) + } + return fmt.Errorf("failed to delete branch %+v", err.Error()) + } + return nil +} + +func (c gitlabClientImpl) GetRepoTags(ctx context.Context, projectId string, search string, limit int) ([]string, error) { + if projectId == "" { + return nil, fmt.Errorf("parameter %s can't be blank", "projectId") + } + + err := c.rateLimiter.Wait(ctx) + if err != nil { + return nil, err + } + + var names []string + + var orderBy, sort = "name", "asc" + var opts gitlab.ListTagsOptions + if search != "" { + opts = gitlab.ListTagsOptions{Search: &search} + } + opts.OrderBy = &orderBy + opts.Sort = &sort + + var maxPageCount int + if limit == -1 || limit == 100 { + opts.PerPage = 100 + maxPageCount = -1 + } else if limit > 100 { + opts.PerPage = 100 + maxPageCount = limit / 100 + } else { + opts.PerPage = limit + maxPageCount = 1 + } + + ctx, cancel := context.WithTimeout(ctx, DefaultContextTimeout) + defer cancel() + tags, response, err := c.client.Tags.ListTags(projectId, &opts, gitlab.WithContext(ctx)) + if err != nil { + if contextDeadlineExceeded(err) { + return nil, GitlabDeadlineExceeded(err) + } + expErr := c.tokenUnexpectedlyExpired(response.StatusCode, err) + if expErr != nil { + return nil, expErr + } + if tokenIsRevoked(err) { + return nil, c.tokenRevocationHandler.TokenRevoked(c.userId, view.GitlabIntegration) + } + if deadlineExceeded(err) { + return nil, GitlabDeadlineExceeded(err) + } + if response != nil && response.StatusCode == http.StatusNotFound { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.RepositoryIdNotFound, + Message: exception.RepositoryIdNotFoundMsg, + Params: map[string]interface{}{"repositoryId": projectId}, + Debug: err.Error(), + } + } + return nil, err + } + if tags == nil || len(tags) == 0 { + log.Debugf("No tags found for project with id %v! search='%s'", projectId, search) + return nil, nil + } + if response.TotalPages > 1 && maxPageCount != 1 { + if maxPageCount == -1 { + maxPageCount = response.TotalPages + } + for i := 1; i < maxPageCount; i++ { + opts.Page = i + ctx, cancel := context.WithTimeout(ctx, DefaultContextTimeout) + defer cancel() + branchesFromPage, listResponse, err := c.client.Tags.ListTags(projectId, &opts, gitlab.WithContext(ctx)) + if err != nil { + if contextDeadlineExceeded(err) { + return nil, GitlabDeadlineExceeded(err) + } + expErr := c.tokenUnexpectedlyExpired(listResponse.StatusCode, err) + if expErr != nil { + return nil, expErr + } + if tokenIsRevoked(err) { + return nil, c.tokenRevocationHandler.TokenRevoked(c.userId, view.GitlabIntegration) + } + if deadlineExceeded(err) { + return nil, GitlabDeadlineExceeded(err) + } + return nil, err + } + tags = append(tags, branchesFromPage...) + } + } + for _, tag := range tags { + names = append(names, tag.Name) + } + + return names, nil +} + +func (c gitlabClientImpl) TagExists(ctx context.Context, id string, tag string) (bool, error) { + if id == "" { + return false, fmt.Errorf("parameter %s can't be blank", "projectId") + } + if tag == "" { + return false, fmt.Errorf("parameter %s can't be blank", "tag") + } + + err := c.rateLimiter.Wait(ctx) + if err != nil { + return false, err + } + + ctx, cancel := context.WithTimeout(ctx, DefaultContextTimeout) + defer cancel() + branch, response, err := c.client.Tags.GetTag(id, tag, gitlab.WithContext(ctx)) + if err != nil { + if contextDeadlineExceeded(err) { + return false, GitlabDeadlineExceeded(err) + } + expErr := c.tokenUnexpectedlyExpired(response.StatusCode, err) + if expErr != nil { + return false, expErr + } + if response != nil && response.StatusCode == http.StatusNotFound { + return false, nil + } + if tokenIsRevoked(err) { + return false, c.tokenRevocationHandler.TokenRevoked(c.userId, view.GitlabIntegration) + } + if deadlineExceeded(err) { + return false, GitlabDeadlineExceeded(err) + } + return false, err + } + if branch == nil || response.StatusCode != http.StatusOK { + return false, err + } + return true, nil +} + +func (c gitlabClientImpl) GetTagLastCommitId(ctx context.Context, projectId string, tagName string) (string, error) { + if projectId == "" { + return "", fmt.Errorf("parameter %s can't be blank", "projectId") + } + if tagName == "" { + return "", fmt.Errorf("parameter %s can't be blank", "tagName") + } + + err := c.rateLimiter.Wait(ctx) + if err != nil { + return "", err + } + + ctx, cancel := context.WithTimeout(ctx, DefaultContextTimeout) + defer cancel() + tag, response, err := c.client.Tags.GetTag(projectId, tagName, gitlab.WithContext(ctx)) + if err != nil { + if contextDeadlineExceeded(err) { + return "", GitlabDeadlineExceeded(err) + } + if response != nil && response.StatusCode == http.StatusNotFound { + return "", nil + } + expErr := c.tokenUnexpectedlyExpired(response.StatusCode, err) + if expErr != nil { + return "", expErr + } + if tokenIsRevoked(err) { + return "", c.tokenRevocationHandler.TokenRevoked(c.userId, view.GitlabIntegration) + } + return "", err + } + if tag == nil || response.StatusCode != http.StatusOK || tag.Commit == nil { + return "", err + } + + return tag.Commit.ID, nil +} + +func (c gitlabClientImpl) GetBranchOrTagLastCommitId(ctx context.Context, projectId string, branchName string) (string, error) { + err := c.rateLimiter.Wait(ctx) + if err != nil { + return "", err + } + + lastCommit, err := c.GetBranchLastCommitId(ctx, projectId, branchName) + if err != nil { + return "", err + } + if lastCommit == "" { + return c.GetTagLastCommitId(ctx, projectId, branchName) + } + return lastCommit, nil +} + +func (c gitlabClientImpl) BranchOrTagExists(ctx context.Context, id string, branchName string) (bool, bool, error) { + err := c.rateLimiter.Wait(ctx) + if err != nil { + return false, false, err + } + + exists, canPush, err := c.BranchExists(ctx, id, branchName) + if err != nil { + return false, false, err + } + if !exists { + exists, err = c.TagExists(ctx, id, branchName) + return exists, false, err + } + return exists, canPush, nil +} + +func (c gitlabClientImpl) GetCurrentUserInfo(ctx context.Context, login string) (*view.User, error) { + err := c.rateLimiter.Wait(ctx) + if err != nil { + return nil, err + } + + ctx, cancel := context.WithTimeout(ctx, DefaultContextTimeout) + defer cancel() + usr, _, err := c.client.Users.CurrentUser(gitlab.WithContext(ctx)) + + if err != nil { + if contextDeadlineExceeded(err) { + return nil, GitlabDeadlineExceeded(err) + } + if tokenIsRevoked(err) { + return nil, c.tokenRevocationHandler.TokenRevoked(c.userId, view.GitlabIntegration) + } + if deadlineExceeded(err) { + return nil, GitlabDeadlineExceeded(err) + } + return nil, err + } + + if usr == nil { + return nil, fmt.Errorf("current user is NULL") + } + + return &view.User{Id: login, Name: usr.Name, AvatarUrl: usr.AvatarURL, Email: usr.Email}, nil +} + +func (c gitlabClientImpl) WriteCommitArchive(ctx context.Context, projectId string, commitId string, writer io.Writer, format string) error { + err := c.rateLimiter.Wait(ctx) + if err != nil { + return err + } + + ctx, cancel := context.WithTimeout(ctx, time.Second*60) + defer cancel() + + _, err = c.client.Repositories.StreamArchive( + projectId, + writer, + &gitlab.ArchiveOptions{ + SHA: &commitId, + Format: &format, + }, + gitlab.WithContext(ctx), + ) + if err != nil { + if contextDeadlineExceeded(err) { + return GitlabDeadlineExceeded(err) + } + if tokenIsRevoked(err) { + return c.tokenRevocationHandler.TokenRevoked(c.userId, view.GitlabIntegration) + } + if deadlineExceeded(err) { + return GitlabDeadlineExceeded(err) + } + return err + } + + return nil +} + +type GitRepoCoordinates struct { + host string + groupsStr string + groups []string + name string +} + +func parseGitRepoUrl(url string) (*GitRepoCoordinates, error) { + if !(strings.HasPrefix(url, "https://") && strings.HasSuffix(url, ".git")) { + return nil, fmt.Errorf("incorrect https git repo URL provided. Expecting format https://git.domain.com/abc/def.git") + } + + url = strings.TrimPrefix(url, "https://") + url = strings.TrimSuffix(url, ".git") + + parts := strings.Split(url, "/") + if len(parts) < 2 { + return nil, fmt.Errorf("incorrect https git repo URL provided. Cannot detect repo name") + } + + result := GitRepoCoordinates{groups: []string{}} + result.host = "https://" + parts[0] + count := len(parts) + for i := 1; i < count; i++ { + if i == (count - 1) { + result.name = parts[i] + break + } + result.groups = append(result.groups, parts[i]) + } + result.groupsStr = strings.Join(result.groups, "/") + + return &result, nil +} + +func isGitRepoUrl(str string) bool { + url, err := url.ParseRequestURI(str) + if err != nil { + return false + } + return strings.HasSuffix(url.Path, ".git") +} + +func toViewNode(gitlabNode *gitlab.TreeNode) *view.FileNode { + isFolder := false + if gitlabNode.Type == "tree" { + isFolder = true + } + + return &view.FileNode{Name: gitlabNode.Name, IsFolder: isFolder} +} + +var base64Encoding = "base64" + +func toGitlabActions(actions []Action) []*gitlab.CommitActionOptions { + result := []*gitlab.CommitActionOptions{} + for _, action := range actions { + tmpAction := action + var gitlabAction *gitlab.FileActionValue + switch action.Type { + case ActionTypeCreate: + gitlabAction = gitlab.FileAction(gitlab.FileCreate) + case ActionTypeDelete: + gitlabAction = gitlab.FileAction(gitlab.FileDelete) + case ActionTypeMove: + gitlabAction = gitlab.FileAction(gitlab.FileMove) + case ActionTypeUpdate: + gitlabAction = gitlab.FileAction(gitlab.FileUpdate) + } + + if action.isBase64Encoded { + result = append(result, &gitlab.CommitActionOptions{ + Action: gitlabAction, + FilePath: &tmpAction.FilePath, + PreviousPath: &tmpAction.PreviousPath, + Content: &tmpAction.Content, + Encoding: &base64Encoding, + }) + } else { + result = append(result, &gitlab.CommitActionOptions{ + Action: gitlabAction, + FilePath: &tmpAction.FilePath, + PreviousPath: &tmpAction.PreviousPath, + Content: &tmpAction.Content, + }) + } + + } + return result +} + +func tokenExpired(err error) bool { + if err == nil { + return false + } + return strings.Contains(err.Error(), "Token is expired") +} + +func tokenIsRevoked(err error) bool { + if strings.Contains(err.Error(), "Token was revoked") { + return true + } + return false +} + +func contextDeadlineExceeded(err error) bool { + return errors.Is(err, context.DeadlineExceeded) +} + +func deadlineExceeded(err error) bool { + return strings.Contains(err.Error(), "Deadline Exceeded") +} + +func branchNotFound(err error) bool { + return strings.Contains(err.Error(), "Tree Not Found") +} + +func (c gitlabClientImpl) tokenUnexpectedlyExpired(responseCode int, err error) error { + if responseCode == http.StatusUnauthorized && tokenExpired(err) { + _, _, expError := c.tokenExpirationHandler.TokenExpired(c.userId, view.GitlabIntegration) + if expError != nil { + return fmt.Errorf("failed to refresh gitlab token: %v", expError) + } + return &exception.CustomError{ + Status: http.StatusInternalServerError, + Code: exception.IntegrationTokenUnexpectedlyExpired, + Message: exception.IntegrationTokenUnexpectedlyExpiredMsg, + } + } + return nil +} + +func trackGitlabCall(ctx context.Context) { + stacktrace := actx.GetStacktraceFromContext(ctx) + if stacktrace == nil { + return + } + // ok we have the stacktrace now + log.Debugf("Gitlab call stacktrace: %+v", stacktrace) // initial impl +} diff --git a/qubership-apihub-service/client/GitlabErrorHandler.go b/qubership-apihub-service/client/GitlabErrorHandler.go new file mode 100644 index 0000000..2eb75be --- /dev/null +++ b/qubership-apihub-service/client/GitlabErrorHandler.go @@ -0,0 +1,41 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package client + +import ( + "net/http" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + log "github.com/sirupsen/logrus" +) + +func GitlabDeadlineExceeded(err error) error { + log.Errorf("Gitlab is currently unavailable. Please try again later") + return &exception.CustomError{ + Status: http.StatusFailedDependency, + Code: exception.GitlabDeadlineExceeded, + Message: exception.GitlabDeadlineExceededMsg, + Debug: err.Error(), + } +} + +func GitlabBranchNotFound(projectId string, branchName string) error { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.BranchNotFound, + Message: exception.BranchNotFoundMsg, + Params: map[string]interface{}{"branch": branchName, "projectId": projectId}, + } +} diff --git a/qubership-apihub-service/client/TokenExpirationHandler.go b/qubership-apihub-service/client/TokenExpirationHandler.go new file mode 100644 index 0000000..2c92a39 --- /dev/null +++ b/qubership-apihub-service/client/TokenExpirationHandler.go @@ -0,0 +1,41 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package client + +import ( + "net/http" + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + log "github.com/sirupsen/logrus" +) + +type TokenExpirationHandler interface { + TokenExpired(userId string, integrationType view.GitIntegrationType) (string, *time.Time, error) +} + +type TokenExpirationHandlerStub struct { +} + +func (t TokenExpirationHandlerStub) TokenExpired(userId string, integrationType view.GitIntegrationType) (string, *time.Time, error) { + log.Errorf("Token expired! userId: %s, integrationType: %s", userId, integrationType) + return "", nil, &exception.CustomError{ + Status: http.StatusNotExtended, + Code: exception.IntegrationTokenExpired, + Message: exception.IntegrationTokenExpiredMsg, + Params: map[string]interface{}{"integration": integrationType}, + } +} diff --git a/qubership-apihub-service/client/TokenRevocationHandler.go b/qubership-apihub-service/client/TokenRevocationHandler.go new file mode 100644 index 0000000..3341b43 --- /dev/null +++ b/qubership-apihub-service/client/TokenRevocationHandler.go @@ -0,0 +1,46 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package client + +import ( + "net/http" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + log "github.com/sirupsen/logrus" +) + +type TokenRevocationHandler interface { + TokenRevoked(userId string, integrationType view.GitIntegrationType) error + AuthFailed(userId string, integrationType view.GitIntegrationType) error +} + +type TokenRevocationHandlerStub struct { +} + +func (t TokenRevocationHandlerStub) TokenRevoked(userId string, integrationType view.GitIntegrationType) error { + log.Errorf("Token was unexpectedly revoked! userId: %s, integrationType: %s", userId, integrationType) + return &exception.CustomError{ + Status: http.StatusNotExtended, + Code: exception.IntegrationTokenRevoked, + Message: exception.IntegrationTokenRevokedMsg, + Params: map[string]interface{}{"integration": integrationType}, + } +} + +func (t TokenRevocationHandlerStub) AuthFailed(userId string, integrationType view.GitIntegrationType) error { + log.Warnf("Git auth failed for user %s, integrationType: %s", userId, integrationType) + return nil +} diff --git a/qubership-apihub-service/client/dto.go b/qubership-apihub-service/client/dto.go new file mode 100644 index 0000000..e2c2315 --- /dev/null +++ b/qubership-apihub-service/client/dto.go @@ -0,0 +1,162 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package client + +import ( + "encoding/base64" + "path" + "strings" +) + +const ActionTypeCreate = "create" +const ActionTypeDelete = "delete" +const ActionTypeMove = "move" +const ActionTypeUpdate = "update" + +type Action struct { + Type string + FilePath string + PreviousPath string + Content string + isBase64Encoded bool +} + +func NewActionBuilder() *ActionBuilder { + return &ActionBuilder{actions: []Action{}} +} + +type ActionBuilder struct { + actions []Action +} + +func (b *ActionBuilder) Create(path string, content []byte) *ActionBuilder { + if isKnownTextFormatExtension(path) { + b.actions = append(b.actions, Action{ + Type: ActionTypeCreate, + FilePath: path, + Content: string(content), + }) + } else { + base64Res := make([]byte, base64.StdEncoding.EncodedLen(len(content))) + base64.StdEncoding.Encode(base64Res, content) + b.actions = append(b.actions, Action{ + Type: ActionTypeCreate, + FilePath: path, + Content: string(base64Res), + isBase64Encoded: true, + }) + } + + return b +} + +func (b *ActionBuilder) Update(path string, content []byte) *ActionBuilder { + if isKnownTextFormatExtension(path) { + b.actions = append(b.actions, Action{ + Type: ActionTypeUpdate, + FilePath: path, + Content: string(content), + }) + } else { + + base64Res := make([]byte, base64.StdEncoding.EncodedLen(len(content))) + base64.StdEncoding.Encode(base64Res, content) + b.actions = append(b.actions, Action{ + Type: ActionTypeUpdate, + FilePath: path, + Content: string(base64Res), + isBase64Encoded: true, + }) + } + return b +} + +// todo content field is required for this operation? +func (b *ActionBuilder) Delete(path string, content []byte) *ActionBuilder { + if isKnownTextFormatExtension(path) { + b.actions = append(b.actions, Action{ + Type: ActionTypeDelete, + FilePath: path, + Content: string(content), + }) + } else { + base64Res := make([]byte, base64.StdEncoding.EncodedLen(len(content))) + base64.StdEncoding.Encode(base64Res, content) + b.actions = append(b.actions, Action{ + Type: ActionTypeDelete, + FilePath: path, + Content: string(base64Res), + isBase64Encoded: true, + }) + } + return b +} + +func (b *ActionBuilder) Move(oldPath string, newPath string, content []byte) *ActionBuilder { + if isKnownTextFormatExtension(newPath) { + b.actions = append(b.actions, Action{ + Type: ActionTypeMove, + FilePath: newPath, + PreviousPath: oldPath, + Content: string(content), + }) + } else { + base64Res := make([]byte, base64.StdEncoding.EncodedLen(len(content))) + base64.StdEncoding.Encode(base64Res, content) + b.actions = append(b.actions, Action{ + Type: ActionTypeMove, + FilePath: newPath, + PreviousPath: oldPath, + Content: string(base64Res), + isBase64Encoded: true, + }) + } + return b +} + +func (b *ActionBuilder) MoveAndUpdate(oldPath string, newPath string, content []byte) *ActionBuilder { + if isKnownTextFormatExtension(newPath) { + b.actions = append(b.actions, Action{ + Type: ActionTypeUpdate, + FilePath: newPath, + PreviousPath: oldPath, + Content: string(content), + }) + } else { + base64Res := make([]byte, base64.StdEncoding.EncodedLen(len(content))) + base64.StdEncoding.Encode(base64Res, content) + b.actions = append(b.actions, Action{ + Type: ActionTypeUpdate, + FilePath: newPath, + PreviousPath: oldPath, + Content: string(base64Res), + isBase64Encoded: true, + }) + } + return b +} + +func (b ActionBuilder) Build() []Action { + return b.actions +} + +func isKnownTextFormatExtension(filePath string) bool { + ext := strings.ToLower(strings.TrimPrefix(path.Ext(filePath), ".")) + switch ext { + case "json", "yaml", "yml", "md", "txt": + return true + } + return false +} diff --git a/qubership-apihub-service/context/ContextWrappers.go b/qubership-apihub-service/context/ContextWrappers.go new file mode 100644 index 0000000..8df9527 --- /dev/null +++ b/qubership-apihub-service/context/ContextWrappers.go @@ -0,0 +1,56 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package context + +import "context" + +const stacktraceKey = "stacktrace" +const securityKey = "security" + +func CreateContextWithStacktrace(ctx context.Context, functionWithParameters string) context.Context { + var result context.Context + val := ctx.Value(stacktraceKey) + arr, ok := val.([]string) + if !ok { + result = context.WithValue(ctx, stacktraceKey, []string{functionWithParameters}) + } else { + result = context.WithValue(ctx, stacktraceKey, append(arr, functionWithParameters)) + } + return result +} + +func GetStacktraceFromContext(ctx context.Context) []string { + val := ctx.Value(stacktraceKey) + arr, ok := val.([]string) + if !ok { + return nil + } else { + return arr + } +} + +func CreateContextWithSecurity(ctx context.Context, secCtx SecurityContext) context.Context { + return context.WithValue(ctx, securityKey, secCtx) +} + +func GetSecurityContext(ctx context.Context) *SecurityContext { + val := ctx.Value(securityKey) + secCtx, ok := val.(SecurityContext) + if !ok { + return nil + } else { + return &secCtx + } +} diff --git a/qubership-apihub-service/context/SecurityContext.go b/qubership-apihub-service/context/SecurityContext.go new file mode 100644 index 0000000..5a6dcc5 --- /dev/null +++ b/qubership-apihub-service/context/SecurityContext.go @@ -0,0 +1,126 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package context + +import ( + "net/http" + "strings" + + "github.com/shaj13/go-guardian/v2/auth" +) + +const SystemRoleExt = "systemRole" +const ApikeyRoleExt = "apikeyRole" +const ApikeyPackageIdExt = "apikeyPackageId" + +type SecurityContext interface { + GetUserId() string + GetUserSystemRole() string + GetApikeyRoles() []string + GetApikeyPackageId() string + GetUserToken() string + GetApiKey() string +} + +func Create(r *http.Request) SecurityContext { + user := auth.User(r) + userId := user.GetID() + systemRole := user.GetExtensions().Get(SystemRoleExt) + apikeyRole := user.GetExtensions().Get(ApikeyRoleExt) + apikeyPackageId := user.GetExtensions().Get(ApikeyPackageIdExt) + token := getAuthorizationToken(r) + if token != "" { + return &securityContextImpl{ + userId: userId, + systemRole: systemRole, + apikeyPackageId: apikeyPackageId, + apikeyRole: apikeyRole, + token: token, + apiKey: "", + } + } else { + return &securityContextImpl{ + userId: userId, + systemRole: systemRole, + apikeyPackageId: apikeyPackageId, + apikeyRole: apikeyRole, + token: "", + apiKey: getApihubApiKey(r), + } + } +} + +func CreateSystemContext() SecurityContext { + return &securityContextImpl{userId: "system"} +} + +func CreateFromId(userId string) SecurityContext { + return &securityContextImpl{ + userId: userId, + } +} + +type securityContextImpl struct { + userId string + systemRole string + apikeyRole string + apikeyPackageId string + token string + apiKey string +} + +func (ctx securityContextImpl) GetUserId() string { + return ctx.userId +} + +func (ctx securityContextImpl) GetUserSystemRole() string { + return ctx.systemRole +} + +func (ctx securityContextImpl) GetApikeyRoles() []string { + if ctx.apikeyRole == "" { + return []string{} + } + return SplitApikeyRoles(ctx.apikeyRole) +} + +func (ctx securityContextImpl) GetApikeyPackageId() string { + return ctx.apikeyPackageId +} + +func SplitApikeyRoles(roles string) []string { + return strings.Split(roles, ",") +} + +func MergeApikeyRoles(roles []string) string { + return strings.Join(roles, ",") +} + +func getAuthorizationToken(r *http.Request) string { + authorizationHeaderValue := r.Header.Get("authorization") + return strings.ReplaceAll(authorizationHeaderValue, "Bearer ", "") +} + +func getApihubApiKey(r *http.Request) string { + return r.Header.Get("api-key") +} + +func (ctx securityContextImpl) GetUserToken() string { + return ctx.token +} + +func (ctx securityContextImpl) GetApiKey() string { + return ctx.apiKey +} diff --git a/qubership-apihub-service/controller/ActivityTrackingController.go b/qubership-apihub-service/controller/ActivityTrackingController.go new file mode 100644 index 0000000..4567b7a --- /dev/null +++ b/qubership-apihub-service/controller/ActivityTrackingController.go @@ -0,0 +1,449 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package controller + +import ( + "fmt" + "net/http" + "net/url" + "strconv" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + log "github.com/sirupsen/logrus" +) + +type ActivityTrackingController interface { + GetActivityHistory_deprecated(w http.ResponseWriter, r *http.Request) + GetActivityHistory(w http.ResponseWriter, r *http.Request) + GetActivityHistoryForPackage_deprecated(w http.ResponseWriter, r *http.Request) + GetActivityHistoryForPackage(w http.ResponseWriter, r *http.Request) +} + +func NewActivityTrackingController(activityTrackingService service.ActivityTrackingService, roleService service.RoleService, ptHandler service.PackageTransitionHandler) ActivityTrackingController { + return &activityTrackingControllerImpl{activityTrackingService: activityTrackingService, roleService: roleService, ptHandler: ptHandler} +} + +type activityTrackingControllerImpl struct { + activityTrackingService service.ActivityTrackingService + roleService service.RoleService + ptHandler service.PackageTransitionHandler +} + +func (a activityTrackingControllerImpl) GetActivityHistory_deprecated(w http.ResponseWriter, r *http.Request) { + var err error + onlyFavorite := false + onlyFavoriteStr := r.URL.Query().Get("onlyFavorite") + if onlyFavoriteStr != "" { + onlyFavorite, err = strconv.ParseBool(onlyFavoriteStr) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "onlyFavorite", "type": "bool"}, + Debug: err.Error(), + }) + return + } + } + + textFilter, err := url.QueryUnescape(r.URL.Query().Get("textFilter")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "textFilter"}, + Debug: err.Error(), + }) + return + } + + limit, customError := getLimitQueryParam(r) + if customError != nil { + RespondWithCustomError(w, customError) + return + } + + page := 0 + if r.URL.Query().Get("page") != "" { + page, err = strconv.Atoi(r.URL.Query().Get("page")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "page", "type": "int"}, + Debug: err.Error(), + }) + return + } + } + + types, err := getListFromParam(r, "types") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "types"}, + Debug: err.Error(), + }) + return + } + + onlyShared := false + if r.URL.Query().Get("onlyShared") != "" { + onlyShared, err = strconv.ParseBool(r.URL.Query().Get("onlyShared")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "onlyShared", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + + kind, err := getListFromParam(r, "kind") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "kind"}, + Debug: err.Error(), + }) + return + } + + // TODO: role check? + activityHistoryReq := view.ActivityHistoryReq{ + OnlyFavorite: onlyFavorite, + TextFilter: textFilter, + Types: types, + OnlyShared: onlyShared, + Kind: kind, + Limit: limit, + Page: page, + } + result, err := a.activityTrackingService.GetActivityHistory_deprecated(context.Create(r), activityHistoryReq) + if err != nil { + log.Error("Failed to get activity events for favourite packages: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to get activity events", + Debug: err.Error()}) + } + return + } + RespondWithJson(w, http.StatusOK, result) +} + +func (a activityTrackingControllerImpl) GetActivityHistory(w http.ResponseWriter, r *http.Request) { + var err error + onlyFavorite := false + onlyFavoriteStr := r.URL.Query().Get("onlyFavorite") + if onlyFavoriteStr != "" { + onlyFavorite, err = strconv.ParseBool(onlyFavoriteStr) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "onlyFavorite", "type": "bool"}, + Debug: err.Error(), + }) + return + } + } + + textFilter, err := url.QueryUnescape(r.URL.Query().Get("textFilter")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "textFilter"}, + Debug: err.Error(), + }) + return + } + + limit, customError := getLimitQueryParam(r) + if customError != nil { + RespondWithCustomError(w, customError) + return + } + + page := 0 + if r.URL.Query().Get("page") != "" { + page, err = strconv.Atoi(r.URL.Query().Get("page")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "page", "type": "int"}, + Debug: err.Error(), + }) + return + } + } + + types, err := getListFromParam(r, "types") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "types"}, + Debug: err.Error(), + }) + return + } + + onlyShared := false + if r.URL.Query().Get("onlyShared") != "" { + onlyShared, err = strconv.ParseBool(r.URL.Query().Get("onlyShared")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "onlyShared", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + + kind, err := getListFromParam(r, "kind") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "kind"}, + Debug: err.Error(), + }) + return + } + + // TODO: role check? + activityHistoryReq := view.ActivityHistoryReq{ + OnlyFavorite: onlyFavorite, + TextFilter: textFilter, + Types: types, + OnlyShared: onlyShared, + Kind: kind, + Limit: limit, + Page: page, + } + result, err := a.activityTrackingService.GetActivityHistory(context.Create(r), activityHistoryReq) + if err != nil { + log.Error("Failed to get activity events for favourite packages: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to get activity events", + Debug: err.Error()}) + } + return + } + RespondWithJson(w, http.StatusOK, result) +} + +func (a activityTrackingControllerImpl) GetActivityHistoryForPackage_deprecated(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := a.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, a.ptHandler, packageId, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + + includeRefs := false + includeRefsStr := r.URL.Query().Get("includeRefs") + if includeRefsStr != "" { + includeRefs, err = strconv.ParseBool(includeRefsStr) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "includeRefs", "type": "bool"}, + Debug: err.Error(), + }) + return + } + } + + textFilter, err := url.QueryUnescape(r.URL.Query().Get("textFilter")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "textFilter"}, + Debug: err.Error(), + }) + return + } + + limit, customError := getLimitQueryParam(r) + if customError != nil { + RespondWithCustomError(w, customError) + return + } + + page := 0 + if r.URL.Query().Get("page") != "" { + page, err = strconv.Atoi(r.URL.Query().Get("page")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "page", "type": "int"}, + Debug: err.Error(), + }) + return + } + } + + types, err := getListFromParam(r, "types") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "types"}, + Debug: err.Error(), + }) + return + } + + result, err := a.activityTrackingService.GetEventsForPackage_deprecated(packageId, includeRefs, limit, page, textFilter, types) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, a.ptHandler, packageId, fmt.Sprintf("Failed to get activity events for package %s", packageId), err) + return + } + RespondWithJson(w, http.StatusOK, result) +} + +func (a activityTrackingControllerImpl) GetActivityHistoryForPackage(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := a.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, a.ptHandler, packageId, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + + includeRefs := false + includeRefsStr := r.URL.Query().Get("includeRefs") + if includeRefsStr != "" { + includeRefs, err = strconv.ParseBool(includeRefsStr) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "includeRefs", "type": "bool"}, + Debug: err.Error(), + }) + return + } + } + + textFilter, err := url.QueryUnescape(r.URL.Query().Get("textFilter")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "textFilter"}, + Debug: err.Error(), + }) + return + } + + limit, customError := getLimitQueryParam(r) + if customError != nil { + RespondWithCustomError(w, customError) + return + } + + page := 0 + if r.URL.Query().Get("page") != "" { + page, err = strconv.Atoi(r.URL.Query().Get("page")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "page", "type": "int"}, + Debug: err.Error(), + }) + return + } + } + + types, err := getListFromParam(r, "types") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "types"}, + Debug: err.Error(), + }) + return + } + + result, err := a.activityTrackingService.GetEventsForPackage(packageId, includeRefs, limit, page, textFilter, types) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, a.ptHandler, packageId, fmt.Sprintf("Failed to get activity events for package %s", packageId), err) + return + } + RespondWithJson(w, http.StatusOK, result) +} diff --git a/qubership-apihub-service/controller/AgentController.go b/qubership-apihub-service/controller/AgentController.go new file mode 100644 index 0000000..e24f9ab --- /dev/null +++ b/qubership-apihub-service/controller/AgentController.go @@ -0,0 +1,257 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package controller + +import ( + "encoding/json" + "fmt" + "io/ioutil" + "net/http" + "strconv" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/client" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" +) + +type AgentController interface { + ProcessAgentSignal(w http.ResponseWriter, r *http.Request) + ListAgents(w http.ResponseWriter, r *http.Request) + GetAgent(w http.ResponseWriter, r *http.Request) + GetAgentNamespaces(w http.ResponseWriter, r *http.Request) + ListServiceNames(w http.ResponseWriter, r *http.Request) +} + +func NewAgentController(agentRegistrationService service.AgentRegistrationService, agentClient client.AgentClient) AgentController { + return &agentControllerImpl{ + agentRegistrationService: agentRegistrationService, + agentClient: agentClient, + } +} + +type agentControllerImpl struct { + agentRegistrationService service.AgentRegistrationService + agentClient client.AgentClient +} + +func (a agentControllerImpl) ProcessAgentSignal(w http.ResponseWriter, r *http.Request) { + defer r.Body.Close() + body, err := ioutil.ReadAll(r.Body) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + var message view.AgentKeepaliveMessage + err = json.Unmarshal(body, &message) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + validationErr := utils.ValidateObject(message) + if validationErr != nil { + if customError, ok := validationErr.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + return + } + } + version, err := a.agentRegistrationService.ProcessAgentSignal(message) + if err != nil { + RespondWithError(w, fmt.Sprintf("Failed to process agent keepalive message %+v", message), err) + return + } + RespondWithJson(w, http.StatusOK, version) +} + +func (a agentControllerImpl) ListAgents(w http.ResponseWriter, r *http.Request) { + onlyActiveStr := r.URL.Query().Get("onlyActive") + var err error + onlyActive := true + if onlyActiveStr != "" { + onlyActive, err = strconv.ParseBool(onlyActiveStr) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "onlyActive", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + + showIncompatibleStr := r.URL.Query().Get("showIncompatible") + showIncompatible := false + if showIncompatibleStr != "" { + showIncompatible, err = strconv.ParseBool(showIncompatibleStr) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "showIncompatible", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + + result, err := a.agentRegistrationService.ListAgents(onlyActive, showIncompatible) + if err != nil { + RespondWithError(w, "Failed to list agents", err) + return + } + + RespondWithJson(w, http.StatusOK, result) +} + +func (a agentControllerImpl) GetAgent(w http.ResponseWriter, r *http.Request) { + agentId := getStringParam(r, "id") + + agent, err := a.agentRegistrationService.GetAgent(agentId) + if err != nil { + RespondWithError(w, "Failed to get agent", err) + return + } + if agent == nil { + w.WriteHeader(http.StatusNotFound) + return + } + + RespondWithJson(w, http.StatusOK, agent) +} + +func (a agentControllerImpl) GetAgentNamespaces(w http.ResponseWriter, r *http.Request) { + agentId := getStringParam(r, "agentId") + + agent, err := a.agentRegistrationService.GetAgent(agentId) + if err != nil { + RespondWithError(w, "Failed to get agent namespaces", err) + return + } + if agent == nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.AgentNotFound, + Message: exception.AgentNotFoundMsg, + Params: map[string]interface{}{"agentId": agentId}, + }) + return + } + if agent.Status != view.AgentStatusActive { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusFailedDependency, + Code: exception.InactiveAgent, + Message: exception.InactiveAgentMsg, + Params: map[string]interface{}{"agentId": agentId}}) + return + } + if agent.AgentVersion == "" { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusFailedDependency, + Code: exception.IncompatibleAgentVersion, + Message: exception.IncompatibleAgentVersionMsg, + Params: map[string]interface{}{"version": agent.AgentVersion}, + }) + return + } + if agent.CompatibilityError != nil && agent.CompatibilityError.Severity == view.SeverityError { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusFailedDependency, + Message: agent.CompatibilityError.Message, + }) + return + } + agentNamespaces, err := a.agentClient.GetNamespaces(context.Create(r), agent.AgentUrl) + if err != nil { + RespondWithError(w, "Failed to get agent namespaces", err) + return + } + RespondWithJson(w, http.StatusOK, agentNamespaces) +} + +func (a agentControllerImpl) ListServiceNames(w http.ResponseWriter, r *http.Request) { + agentId := getStringParam(r, "agentId") + namespace := getStringParam(r, "namespace") + + agent, err := a.agentRegistrationService.GetAgent(agentId) + if err != nil { + RespondWithError(w, "Failed to get agent namespaces", err) + return + } + if agent == nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.AgentNotFound, + Message: exception.AgentNotFoundMsg, + Params: map[string]interface{}{"agentId": agentId}, + }) + return + } + if agent.Status != view.AgentStatusActive { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusFailedDependency, + Code: exception.InactiveAgent, + Message: exception.InactiveAgentMsg, + Params: map[string]interface{}{"agentId": agentId}}) + return + } + if agent.AgentVersion == "" { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusFailedDependency, + Code: exception.IncompatibleAgentVersion, + Message: exception.IncompatibleAgentVersionMsg, + Params: map[string]interface{}{"version": agent.AgentVersion}, + }) + return + } + if agent.CompatibilityError != nil && agent.CompatibilityError.Severity == view.SeverityError { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusFailedDependency, + Message: agent.CompatibilityError.Message, + }) + return + } + + serviceNames, err := a.agentClient.ListServiceNames(context.Create(r), agent.AgentUrl, namespace) + if err != nil { + RespondWithError(w, "Failed to get service names", err) + return + } + RespondWithJson(w, http.StatusOK, serviceNames) +} + +func copyHeader(dst, src http.Header) { + for k, vv := range src { + for _, v := range vv { + dst.Add(k, v) + } + } +} diff --git a/qubership-apihub-service/controller/AgentProxyController.go b/qubership-apihub-service/controller/AgentProxyController.go new file mode 100644 index 0000000..be8a6a3 --- /dev/null +++ b/qubership-apihub-service/controller/AgentProxyController.go @@ -0,0 +1,135 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package controller + +import ( + "crypto/tls" + "io" + "net/http" + "net/url" + "strings" + + log "github.com/sirupsen/logrus" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type ProxyController interface { + Proxy(w http.ResponseWriter, req *http.Request) +} + +func NewAgentProxyController(agentRegistrationService service.AgentRegistrationService, systemInfoService service.SystemInfoService) ProxyController { + return &agentProxyControllerImpl{ + agentRegistrationService: agentRegistrationService, + tr: http.Transport{TLSClientConfig: &tls.Config{InsecureSkipVerify: true}}, + systemInfoService: systemInfoService, + } +} + +type agentProxyControllerImpl struct { + agentRegistrationService service.AgentRegistrationService + tr http.Transport + systemInfoService service.SystemInfoService +} + +func (a *agentProxyControllerImpl) Proxy(w http.ResponseWriter, r *http.Request) { + agentId := getStringParam(r, "agentId") + + agent, err := a.agentRegistrationService.GetAgent(agentId) + if err != nil { + RespondWithError(w, "Failed to proxy a request", err) + return + } + if agent == nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.AgentNotFound, + Message: exception.AgentNotFoundMsg, + Params: map[string]interface{}{"agentId": agentId}, + }) + return + } + if agent.Status != view.AgentStatusActive { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusFailedDependency, + Code: exception.InactiveAgent, + Message: exception.InactiveAgentMsg, + Params: map[string]interface{}{"agentId": agentId}}) + return + } + if agent.AgentVersion == "" { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusFailedDependency, + Code: exception.IncompatibleAgentVersion, + Message: exception.IncompatibleAgentVersionMsg, + Params: map[string]interface{}{"version": agent.AgentVersion}, + }) + } + if agent.CompatibilityError != nil && agent.CompatibilityError.Severity == view.SeverityError { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusFailedDependency, + Message: agent.CompatibilityError.Message, + }) + } + agentUrl, err := url.Parse(agent.AgentUrl) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusFailedDependency, + Code: exception.InvalidAgentUrl, + Message: exception.InvalidAgentUrlMsg, + Params: map[string]interface{}{"url": agent.AgentUrl, "agentId": agentId}, + Debug: err.Error(), + }) + return + } + var validHost bool + for _, host := range a.systemInfoService.GetAllowedHosts() { + if strings.Contains(agentUrl.Host, host) { + validHost = true + break + } + } + if !validHost { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.HostNotAllowed, + Message: exception.HostNotAllowedMsg, + Params: map[string]interface{}{"host": agentUrl.String()}, + }) + return + } + + r.URL.Host = agentUrl.Host + r.URL.Scheme = agentUrl.Scheme + r.Host = agentUrl.Host + log.Debugf("Sending proxy request to %s", r.URL) + resp, err := a.tr.RoundTrip(r) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusFailedDependency, + Code: exception.ProxyFailed, + Message: exception.ProxyFailedMsg, + Params: map[string]interface{}{"url": r.URL.String()}, + Debug: err.Error(), + }) + return + } + defer resp.Body.Close() + copyHeader(w.Header(), resp.Header) + w.WriteHeader(resp.StatusCode) + io.Copy(w, resp.Body) +} diff --git a/qubership-apihub-service/controller/ApiDocsController.go b/qubership-apihub-service/controller/ApiDocsController.go new file mode 100644 index 0000000..2e374f4 --- /dev/null +++ b/qubership-apihub-service/controller/ApiDocsController.go @@ -0,0 +1,96 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package controller + +import ( + "errors" + "fmt" + "io/ioutil" + "net/http" + "os" + "strings" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type ApiDocsController interface { + GetSpecsUrls(w http.ResponseWriter, r *http.Request) + GetSpec(w http.ResponseWriter, r *http.Request) +} + +func NewApiDocsController(fsRoot string) ApiDocsController { + return apiDocsControllerImpl{ + urls: []view.Url{ + {Url: "/v3/api-docs/APIHUB BE API contract", Name: "APIHUB BE API contract"}, + {Url: "/v3/api-docs/APIHUB registry public API", Name: "APIHUB registry public API"}, + }, + fsRoot: fsRoot + "/api", + } +} + +type apiDocsControllerImpl struct { + urls []view.Url + fsRoot string +} + +func (a apiDocsControllerImpl) GetSpecsUrls(w http.ResponseWriter, r *http.Request) { + configUrl := view.ApiConfig{ + ConfigUrl: "/v3/api-docs/swagger-config", + Urls: a.urls, + } + RespondWithJson(w, http.StatusOK, configUrl) +} + +func (a apiDocsControllerImpl) GetSpec(w http.ResponseWriter, r *http.Request) { + var content []byte + var err error + switch path := r.URL.Path; path { + case a.urls[0].Url: + fullPath := a.fsRoot + "/APIHUB API.yaml" + _, err = os.Stat(fullPath) + if err != nil { + break + } + content, err = ioutil.ReadFile(fullPath) + if err != nil { + break + } + a.respond(w, content) + case a.urls[1].Url: + fullPath := a.fsRoot + "/Public Registry API.yaml" + _, err = os.Stat(fullPath) + if err != nil { + break + } + content, err = ioutil.ReadFile(fullPath) + if err != nil { + break + } + a.respond(w, content) + default: + err = errors.New(fmt.Sprintf("There is no API with '%s' title", strings.TrimPrefix(path, "/v3/api-docs/"))) + } + + if err != nil { + RespondWithError(w, "Failed to read API spec", err) + return + } +} + +func (a apiDocsControllerImpl) respond(w http.ResponseWriter, content []byte) { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + w.Write(content) +} diff --git a/qubership-apihub-service/controller/ApihubApiKeyController.go b/qubership-apihub-service/controller/ApihubApiKeyController.go new file mode 100644 index 0000000..a7bc037 --- /dev/null +++ b/qubership-apihub-service/controller/ApihubApiKeyController.go @@ -0,0 +1,415 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package controller + +import ( + "encoding/json" + "io/ioutil" + "net/http" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type ApihubApiKeyController interface { + CreateApiKey_deprecated(w http.ResponseWriter, r *http.Request) + CreateApiKey_v3_deprecated(w http.ResponseWriter, r *http.Request) + CreateApiKey(w http.ResponseWriter, r *http.Request) + RevokeApiKey(w http.ResponseWriter, r *http.Request) + GetApiKeys_deprecated(w http.ResponseWriter, r *http.Request) + GetApiKeys_v3_deprecated(w http.ResponseWriter, r *http.Request) + GetApiKeys(w http.ResponseWriter, r *http.Request) + GetApiKeyByKey(w http.ResponseWriter, r *http.Request) + GetApiKeyById(w http.ResponseWriter, r *http.Request) +} + +func NewApihubApiKeyController(apihubApiKeyService service.ApihubApiKeyService, roleService service.RoleService) ApihubApiKeyController { + return &ApihubApiKeyControllerImpl{ + apihubApiKeyService: apihubApiKeyService, + roleService: roleService, + } +} + +type ApihubApiKeyControllerImpl struct { + apihubApiKeyService service.ApihubApiKeyService + roleService service.RoleService +} + +func (a ApihubApiKeyControllerImpl) CreateApiKey_deprecated(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + + if packageId == "*" { + if !a.roleService.IsSysadm(ctx) { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + Debug: "Only system administrator can create api key for all packages", + }) + return + } + } else { + sufficientPrivileges, err := a.roleService.HasRequiredPermissions(ctx, packageId, view.AccessTokenManagementPermission) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + Debug: "Access token management permission is required to create api key for the package", + }) + return + } + } + + defer r.Body.Close() + body, err := ioutil.ReadAll(r.Body) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + var createApiKeyReq view.ApihubApiKeyCreateReq_deprecated + err = json.Unmarshal(body, &createApiKeyReq) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + validationErr := utils.ValidateObject(createApiKeyReq) + if validationErr != nil { + if customError, ok := validationErr.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + return + } + } + + apiKey, err := a.apihubApiKeyService.CreateApiKey_deprecated(ctx, packageId, createApiKeyReq.Name, createApiKeyReq.Roles) + if err != nil { + RespondWithError(w, "Failed to create apihub api key", err) + return + } + RespondWithJson(w, http.StatusOK, apiKey) +} + +func (a ApihubApiKeyControllerImpl) CreateApiKey_v3_deprecated(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + + if packageId == "*" { + if !a.roleService.IsSysadm(ctx) { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + Debug: "Only system administrator can create api key for all packages", + }) + return + } + } else { + sufficientPrivileges, err := a.roleService.HasRequiredPermissions(ctx, packageId, view.AccessTokenManagementPermission) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + Debug: "Access token management permission is required to create api key for the package", + }) + return + } + } + + defer r.Body.Close() + body, err := ioutil.ReadAll(r.Body) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + var createApiKeyReq view.ApihubApiKeyCreateReq + err = json.Unmarshal(body, &createApiKeyReq) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + validationErr := utils.ValidateObject(createApiKeyReq) + if validationErr != nil { + if customError, ok := validationErr.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + return + } + } + + apiKey, err := a.apihubApiKeyService.CreateApiKey_v3_deprecated(ctx, packageId, createApiKeyReq.Name, createApiKeyReq.Roles) + if err != nil { + RespondWithError(w, "Failed to create apihub api key", err) + return + } + RespondWithJson(w, http.StatusOK, apiKey) +} + +func (a ApihubApiKeyControllerImpl) CreateApiKey(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + + if packageId == "*" { + if !a.roleService.IsSysadm(ctx) { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + Debug: "Only system administrator can create api key for all packages", + }) + return + } + } else { + sufficientPrivileges, err := a.roleService.HasRequiredPermissions(ctx, packageId, view.AccessTokenManagementPermission) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + Debug: "Access token management permission is required to create api key for the package", + }) + return + } + } + + defer r.Body.Close() + body, err := ioutil.ReadAll(r.Body) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + var createApiKeyReq view.ApihubApiKeyCreateReq + err = json.Unmarshal(body, &createApiKeyReq) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + validationErr := utils.ValidateObject(createApiKeyReq) + if validationErr != nil { + if customError, ok := validationErr.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + return + } + } + + apiKey, err := a.apihubApiKeyService.CreateApiKey(ctx, packageId, createApiKeyReq.Name, createApiKeyReq.CreatedFor, createApiKeyReq.Roles) + if err != nil { + RespondWithError(w, "Failed to create apihub api key", err) + return + } + RespondWithJson(w, http.StatusOK, apiKey) +} + +func (a ApihubApiKeyControllerImpl) RevokeApiKey(w http.ResponseWriter, r *http.Request) { + apiKeyId := getStringParam(r, "id") + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + + if packageId == "*" { + if !a.roleService.IsSysadm(ctx) { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + Debug: "Only system administrator can revoke api key for all packages", + }) + return + } + } else { + sufficientPrivileges, err := a.roleService.HasRequiredPermissions(ctx, packageId, view.AccessTokenManagementPermission) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + Debug: "Access token management permission is required to revoke api key for the package", + }) + return + } + } + err := a.apihubApiKeyService.RevokePackageApiKey(ctx, apiKeyId, packageId) + if err != nil { + RespondWithError(w, "Failed to revoke apihub api key", err) + return + } + w.WriteHeader(http.StatusNoContent) +} + +func (a ApihubApiKeyControllerImpl) GetApiKeys_deprecated(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := a.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + apiKeys, err := a.apihubApiKeyService.GetProjectApiKeys_deprecated(packageId) + if err != nil { + RespondWithError(w, "Failed to get all apihub api keys", err) + return + } + RespondWithJson(w, http.StatusOK, apiKeys) +} + +func (a ApihubApiKeyControllerImpl) GetApiKeys_v3_deprecated(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := a.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + apiKeys, err := a.apihubApiKeyService.GetProjectApiKeys(packageId) + if err != nil { + RespondWithError(w, "Failed to get all apihub api keys", err) + return + } + RespondWithJson(w, http.StatusOK, apiKeys) +} + +func (a ApihubApiKeyControllerImpl) GetApiKeys(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := a.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + apiKeys, err := a.apihubApiKeyService.GetProjectApiKeys(packageId) + if err != nil { + RespondWithError(w, "Failed to get all apihub api keys", err) + return + } + RespondWithJson(w, http.StatusOK, apiKeys) +} + +func (a ApihubApiKeyControllerImpl) GetApiKeyByKey(w http.ResponseWriter, r *http.Request) { + apiKeyHeader := r.Header.Get("api-key") + if apiKeyHeader == "" { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.ApiKeyHeaderIsEmpty, + Message: exception.ApiKeyHeaderIsEmptyMsg, + }) + return + } + apiKey, err := a.apihubApiKeyService.GetApiKeyByKey(apiKeyHeader) + if err != nil { + RespondWithError(w, "Failed to get apihub api key", err) + return + } + if apiKey == nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ApiKeyNotFoundByKey, + Message: exception.ApiKeyNotFoundByKeyMsg, + }) + return + } + RespondWithJson(w, http.StatusOK, apiKey) +} + +func (a ApihubApiKeyControllerImpl) GetApiKeyById(w http.ResponseWriter, r *http.Request) { + apiKeyId := getStringParam(r, "apiKeyId") + + apiKey, err := a.apihubApiKeyService.GetApiKeyById(apiKeyId) + if err != nil { + RespondWithError(w, "Failed to get apihub api key by id", err) + return + } + if apiKey == nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ApiKeyNotFoundById, + Message: exception.ApiKeyNotFoundByIdMsg, + Params: map[string]interface{}{"apiKeyId": apiKeyId}, + }) + return + } + RespondWithJson(w, http.StatusOK, apiKey) +} diff --git a/qubership-apihub-service/controller/BrachWSController.go b/qubership-apihub-service/controller/BrachWSController.go new file mode 100644 index 0000000..bef4df4 --- /dev/null +++ b/qubership-apihub-service/controller/BrachWSController.go @@ -0,0 +1,160 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package controller + +import ( + "fmt" + "net/http" + "net/url" + "strings" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" + "github.com/google/uuid" + ws "github.com/gorilla/websocket" + log "github.com/sirupsen/logrus" +) + +type BranchWSController interface { + ConnectToProjectBranch(w http.ResponseWriter, r *http.Request) + DebugSessionsLoadBalance(w http.ResponseWriter, r *http.Request) + TestLogWebsocketClient(w http.ResponseWriter, r *http.Request) + TestGetWebsocketClientMessages(w http.ResponseWriter, r *http.Request) +} + +func NewBranchWSController(branchService service.BranchService, wsLoadBalancer service.WsLoadBalancer, internalWebsocketService service.InternalWebsocketService) BranchWSController { + return &branchWSControllerImpl{ + branchService: branchService, + wsLoadBalancer: wsLoadBalancer, + internalWebsocketService: internalWebsocketService, + } +} + +type branchWSControllerImpl struct { + branchService service.BranchService + wsLoadBalancer service.WsLoadBalancer + internalWebsocketService service.InternalWebsocketService +} + +func (c branchWSControllerImpl) ConnectToProjectBranch(w http.ResponseWriter, r *http.Request) { + projectId := getStringParam(r, "projectId") + branchName, err := getUnescapedStringParam(r, "branchName") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "branchName"}, + Debug: err.Error(), + }) + return + } + + srv, err := c.wsLoadBalancer.SelectWsServer(projectId, branchName, "") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.UnableToSelectWsServer, + Message: exception.UnableToSelectWsServerMsg, + Debug: err.Error(), + }) + return + } + + var upgrader = ws.Upgrader{ + //skip origin check + CheckOrigin: func(r *http.Request) bool { return true }, + } + websocket, err := upgrader.Upgrade(w, r, nil) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.ConnectionNotUpgraded, + Message: exception.ConnectionNotUpgradedMsg, + Debug: err.Error(), + }) + return + } + wsId := uuid.New().String() + if srv != service.LocalServer { + token := "?token=" + strings.TrimPrefix(r.Header.Get("Authorization"), "Bearer ") + c.wsLoadBalancer.RedirectWs("ws://"+srv+":8080/ws/v1/projects/"+projectId+"/branches/"+getStringParam(r, "branchName")+token, websocket, r.Header.Get("Sec-Websocket-Key")) + return + } + + goCtx := context.CreateContextWithSecurity(r.Context(), context.Create(r)) + goCtx = context.CreateContextWithStacktrace(goCtx, fmt.Sprintf("ConnectToProjectBranch()")) + + err = c.branchService.ConnectToWebsocket(goCtx, projectId, branchName, wsId, websocket) + if err != nil { + log.Error("Failed to connect to websocket: ", err.Error()) + //don't send error response, it doesn't work on upgraded connection + return + } + //DO NOT ADD w.Write... since it's not suitable for websocket! +} + +func (c branchWSControllerImpl) DebugSessionsLoadBalance(w http.ResponseWriter, r *http.Request) { + sessions, err := c.wsLoadBalancer.ListSessions() + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to list websocket loadbalancer sessions", + Debug: err.Error(), + }) + return + } + + nodes, err := c.wsLoadBalancer.ListNodes() + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to list websocket loadbalancer nodes", + Debug: err.Error(), + }) + return + } + + forwardedSessions := c.wsLoadBalancer.ListForwardedSessions() + + bindAddr := c.wsLoadBalancer.GetBindAddr() + + RespondWithJson(w, http.StatusOK, debugResp{BindAddr: bindAddr, Sessions: sessions, Nodes: nodes, ForwardedSessions: forwardedSessions}) +} + +type debugResp struct { + BindAddr string + Sessions []service.WSLoadBalancerSession + Nodes []string + ForwardedSessions []string +} + +func (c branchWSControllerImpl) TestLogWebsocketClient(w http.ResponseWriter, r *http.Request) { + projectId := r.URL.Query().Get("projectId") + branchName := url.PathEscape(r.URL.Query().Get("branchName")) + token := r.URL.Query().Get("token") + + c.internalWebsocketService.LogIncomingBranchMessages(r.Host, projectId, branchName, token) + w.WriteHeader(http.StatusOK) +} + +func (c branchWSControllerImpl) TestGetWebsocketClientMessages(w http.ResponseWriter, r *http.Request) { + projectId := r.URL.Query().Get("projectId") + branchName := url.PathEscape(r.URL.Query().Get("branchName")) + + messages := c.internalWebsocketService.GetBranchSessionLogs(projectId, branchName) + RespondWithJson(w, http.StatusOK, messages) +} diff --git a/qubership-apihub-service/controller/BranchController.go b/qubership-apihub-service/controller/BranchController.go new file mode 100644 index 0000000..a111745 --- /dev/null +++ b/qubership-apihub-service/controller/BranchController.go @@ -0,0 +1,633 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package controller + +import ( + "fmt" + "net/http" + "net/url" + "strconv" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + log "github.com/sirupsen/logrus" +) + +type BranchController interface { + GetProjectBranches(w http.ResponseWriter, r *http.Request) + GetProjectBranchDetails(w http.ResponseWriter, r *http.Request) + GetProjectBranchConfigRaw(w http.ResponseWriter, r *http.Request) + CommitBranchDraftChanges(w http.ResponseWriter, r *http.Request) + GetProjectBranchContentZip(w http.ResponseWriter, r *http.Request) + GetProjectBranchFiles(w http.ResponseWriter, r *http.Request) + GetProjectBranchCommitHistory_deprecated(w http.ResponseWriter, r *http.Request) + CloneBranch(w http.ResponseWriter, r *http.Request) + DeleteBranch(w http.ResponseWriter, r *http.Request) + DeleteBranchDraft(w http.ResponseWriter, r *http.Request) + GetBranchConflicts(w http.ResponseWriter, r *http.Request) + AddBranchEditor(w http.ResponseWriter, r *http.Request) + RemoveBranchEditor(w http.ResponseWriter, r *http.Request) +} + +func NewBranchController(branchService service.BranchService, + commitService service.CommitService, + projectFilesService service.GitRepoFilesService, + searchService service.SearchService, + publishedService service.PublishedService, + branchEditorsService service.BranchEditorsService, + wsBranchService service.WsBranchService) BranchController { + return &branchControllerImpl{ + branchService: branchService, + commitService: commitService, + projectFilesService: projectFilesService, + searchService: searchService, + publishedService: publishedService, + branchEditorsService: branchEditorsService, + wsBranchService: wsBranchService, + } +} + +type branchControllerImpl struct { + branchService service.BranchService + commitService service.CommitService + projectFilesService service.GitRepoFilesService + searchService service.SearchService + publishedService service.PublishedService + branchEditorsService service.BranchEditorsService + wsBranchService service.WsBranchService +} + +func (b branchControllerImpl) GetProjectBranches(w http.ResponseWriter, r *http.Request) { + projectId := getStringParam(r, "projectId") + filter := r.URL.Query().Get("filter") + branches, err := b.searchService.GetProjectBranches(context.Create(r), projectId, filter) + if err != nil { + log.Error("Failed to get project branches: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to get project branches", + Debug: err.Error()}) + } + return + } + RespondWithJson(w, http.StatusOK, branches) +} + +func (b branchControllerImpl) GetProjectBranchDetails(w http.ResponseWriter, r *http.Request) { + projectId := getStringParam(r, "projectId") + branchName, err := getUnescapedStringParam(r, "branchName") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "branchName"}, + Debug: err.Error(), + }) + return + } + + goCtx := context.CreateContextWithSecurity(r.Context(), context.Create(r)) + goCtx = context.CreateContextWithStacktrace(goCtx, fmt.Sprintf("GetProjectBranchDetails()")) + + branch, err := b.branchService.GetBranchDetailsEP(goCtx, projectId, branchName, true) + if err != nil { + log.Error("Failed to get branch details: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to get branch details", + Debug: err.Error()}) + } + return + } + branch.RemoveFolders() + RespondWithJson(w, http.StatusOK, branch) +} + +func (b branchControllerImpl) GetProjectBranchConfigRaw(w http.ResponseWriter, r *http.Request) { + projectId := getStringParam(r, "projectId") + branchName, err := getUnescapedStringParam(r, "branchName") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "branchName"}, + Debug: err.Error(), + }) + return + } + originalStr := r.URL.Query().Get("original") + original := false + if originalStr != "" { + original, err = strconv.ParseBool(originalStr) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "original", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + + goCtx := context.CreateContextWithSecurity(r.Context(), context.Create(r)) + goCtx = context.CreateContextWithStacktrace(goCtx, fmt.Sprintf("GetProjectBranchConfigRaw()")) + + var configRaw []byte + if original { + configRaw, err = b.branchService.GetBranchRawConfigFromGit(goCtx, projectId, branchName) + } else { + configRaw, err = b.branchService.GetBranchRawConfigFromDraft(goCtx, projectId, branchName) + } + if err != nil { + log.Error("Failed to get branch config: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to get branch config", + Debug: err.Error()}) + } + return + } + w.Header().Set("Content-Type", "text/plain") + w.WriteHeader(http.StatusOK) + w.Write(configRaw) +} + +func (b branchControllerImpl) CommitBranchDraftChanges(w http.ResponseWriter, r *http.Request) { + projectId := getStringParam(r, "projectId") + branchName, err := getUnescapedStringParam(r, "branchName") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "branchName"}, + Debug: err.Error(), + }) + return + } + defer r.Body.Close() + params, err := getParamsFromBody(r) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + + commitMessage, err := getBodyStringParam(params, "comment") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameter, + Message: exception.InvalidParameterMsg, + Params: map[string]interface{}{"param": "comment"}, + Debug: err.Error(), + }) + return + } + if commitMessage == "" { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.EmptyParameter, + Message: exception.EmptyParameterMsg, + Params: map[string]interface{}{"param": "comment"}, + }) + return + } + newBranchName, err := getBodyStringParam(params, "branch") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameter, + Message: exception.InvalidParameterMsg, + Params: map[string]interface{}{"param": "branch"}, + Debug: err.Error(), + }) + return + } + createMergeRequestParam, err := getBodyBoolParam(params, "createMergeRequest") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "createMergeRequest", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + createMergeRequest := false + if createMergeRequestParam != nil { + createMergeRequest = *createMergeRequestParam + } + + err = b.commitService.CommitBranchDraftChanges(context.Create(r), projectId, branchName, newBranchName, commitMessage, createMergeRequest) + if err != nil { + log.Error("Failed to commit branch draft: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + b.wsBranchService.DisconnectClients(projectId, branchName) + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to commit branch draft", + Debug: err.Error()}) + } + return + } + + w.WriteHeader(http.StatusOK) +} + +func (b branchControllerImpl) GetProjectBranchContentZip(w http.ResponseWriter, r *http.Request) { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusNotImplemented, + Message: "Not implemented"}) +} + +func (b branchControllerImpl) GetProjectBranchFiles(w http.ResponseWriter, r *http.Request) { + projectId := getStringParam(r, "projectId") + branchName, err := getUnescapedStringParam(r, "branchName") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "branchName"}, + Debug: err.Error(), + }) + return + } + + path, err := url.QueryUnescape(r.URL.Query().Get("path")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "path"}, + Debug: err.Error(), + }) + return + } + + onlyAddable := false + onlyAddableStr := r.URL.Query().Get("onlyAddable") + if onlyAddableStr != "" { + onlyAddable, err = strconv.ParseBool(onlyAddableStr) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameter, + Message: exception.InvalidParameterMsg, + Params: map[string]interface{}{"param": "onlyAddable"}, + Debug: err.Error(), + }) + return + } + } + + page := 1 + if r.URL.Query().Get("page") != "" { + page, err = strconv.Atoi(r.URL.Query().Get("page")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "page", "type": "int"}, + Debug: err.Error(), + }) + return + } + } + + limit, customError := getLimitQueryParam(r) + if customError != nil { + RespondWithCustomError(w, customError) + return + } + + result, err := b.projectFilesService.ListFiles(context.Create(r), projectId, branchName, path, view.PagingParams{Page: page, ItemsPerPage: limit}, onlyAddable) + if err != nil { + log.Error("Failed to list files: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to list files", + Debug: err.Error()}) + } + return + } + + RespondWithJson(w, http.StatusOK, view.ListFilesView{Files: result}) +} + +func (b branchControllerImpl) GetProjectBranchCommitHistory_deprecated(w http.ResponseWriter, r *http.Request) { + projectId := getStringParam(r, "projectId") + branchName, err := getUnescapedStringParam(r, "branchName") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "branchName"}, + Debug: err.Error(), + }) + return + } + limit, customError := getLimitQueryParam(r) + if customError != nil { + RespondWithCustomError(w, customError) + return + } + page := 0 + if r.URL.Query().Get("page") != "" { + page, err = strconv.Atoi(r.URL.Query().Get("page")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "page", "type": "int"}, + Debug: err.Error()}) + return + } + } + changes, err := b.searchService.GetBranchHistory_deprecated(context.Create(r), projectId, branchName, limit, page) + if err != nil { + log.Error("Failed to get project branch commit history: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to get project branch commit history", + Debug: err.Error()}) + } + return + } + + RespondWithJson(w, http.StatusOK, changes) +} + +func (b branchControllerImpl) CloneBranch(w http.ResponseWriter, r *http.Request) { + projectId := getStringParam(r, "projectId") + branchName, err := getUnescapedStringParam(r, "branchName") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "branchName"}, + Debug: err.Error(), + }) + return + } + defer r.Body.Close() + params, err := getParamsFromBody(r) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + newBranchName, err := getBodyStringParam(params, "branch") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameter, + Message: exception.InvalidParameterMsg, + Params: map[string]interface{}{"param": "branch"}, + Debug: err.Error(), + }) + return + } + if newBranchName == "" { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.EmptyParameter, + Message: exception.EmptyParameterMsg, + Params: map[string]interface{}{"param": "branch"}, + }) + return + } + + goCtx := context.CreateContextWithSecurity(r.Context(), context.Create(r)) + goCtx = context.CreateContextWithStacktrace(goCtx, fmt.Sprintf("CloneBranch()")) + + err = b.branchService.CloneBranch(goCtx, projectId, branchName, newBranchName) + if err != nil { + log.Error("Failed to clone branch: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to clone branch", + Debug: err.Error()}) + } + return + } + //todo maybe put it in service + RespondWithJson(w, http.StatusOK, map[string]string{"cloned_branch": newBranchName}) +} + +func (b branchControllerImpl) DeleteBranch(w http.ResponseWriter, r *http.Request) { + projectId := getStringParam(r, "projectId") + branchName, err := getUnescapedStringParam(r, "branchName") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "branchName"}, + Debug: err.Error(), + }) + return + } + + goCtx := context.CreateContextWithSecurity(r.Context(), context.Create(r)) + goCtx = context.CreateContextWithStacktrace(goCtx, fmt.Sprintf("DeleteBranch()")) + + err = b.branchService.DeleteBranch(goCtx, projectId, branchName) + if err != nil { + log.Error("Failed to delete branch: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to delete branch", + Debug: err.Error()}) + } + return + } + w.WriteHeader(http.StatusNoContent) +} + +func (b branchControllerImpl) DeleteBranchDraft(w http.ResponseWriter, r *http.Request) { + projectId := getStringParam(r, "projectId") + branchName, err := getUnescapedStringParam(r, "branchName") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "branchName"}, + Debug: err.Error(), + }) + return + } + + goCtx := context.CreateContextWithSecurity(r.Context(), context.Create(r)) + goCtx = context.CreateContextWithStacktrace(goCtx, fmt.Sprintf("DeleteBranchDraft()")) + + err = b.branchService.ResetBranchDraft(goCtx, projectId, branchName, true) + if err != nil { + log.Error("Failed to delete branch draft: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + b.wsBranchService.DisconnectClients(projectId, branchName) + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to delete branch draft", + Debug: err.Error()}) + } + return + } + w.WriteHeader(http.StatusOK) +} + +func (b branchControllerImpl) GetBranchConflicts(w http.ResponseWriter, r *http.Request) { + projectId := getStringParam(r, "projectId") + branchName, err := getUnescapedStringParam(r, "branchName") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "branchName"}, + Debug: err.Error(), + }) + return + } + + goCtx := context.CreateContextWithSecurity(r.Context(), context.Create(r)) + goCtx = context.CreateContextWithStacktrace(goCtx, fmt.Sprintf("GetBranchConflicts()")) + + branchConflicts, err := b.branchService.CalculateBranchConflicts(goCtx, projectId, branchName) + if err != nil { + log.Error("Failed to get branch conflicts: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + b.wsBranchService.DisconnectClients(projectId, branchName) //todo maybe not needed here + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to get branch conflicts", + Debug: err.Error()}) + } + return + } + RespondWithJson(w, http.StatusOK, branchConflicts) +} + +func (b branchControllerImpl) AddBranchEditor(w http.ResponseWriter, r *http.Request) { + projectId := getStringParam(r, "projectId") + branchName, err := getUnescapedStringParam(r, "branchName") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "branchName"}, + Debug: err.Error(), + }) + return + } + + err = b.branchEditorsService.AddBranchEditor(projectId, branchName, context.Create(r).GetUserId()) + if err != nil { + log.Error("Failed to add editor: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to add editor", + Debug: err.Error()}) + } + return + } + + w.WriteHeader(http.StatusOK) +} + +func (b branchControllerImpl) RemoveBranchEditor(w http.ResponseWriter, r *http.Request) { + projectId := getStringParam(r, "projectId") + branchName, err := getUnescapedStringParam(r, "branchName") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "branchName"}, + Debug: err.Error(), + }) + return + } + + err = b.branchEditorsService.RemoveBranchEditor(projectId, branchName, context.Create(r).GetUserId()) + if err != nil { + log.Error("Failed to remove editor: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to remove editor", + Debug: err.Error()}) + } + return + } + + w.WriteHeader(http.StatusOK) +} diff --git a/qubership-apihub-service/controller/BuildCleanupController.go b/qubership-apihub-service/controller/BuildCleanupController.go new file mode 100644 index 0000000..ca98724 --- /dev/null +++ b/qubership-apihub-service/controller/BuildCleanupController.go @@ -0,0 +1,87 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package controller + +import ( + "net/http" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" + "github.com/gorilla/mux" +) + +type BuildCleanupController interface { + StartMigrationBuildCleanup(w http.ResponseWriter, r *http.Request) + GetMigrationBuildCleanupResult(w http.ResponseWriter, r *http.Request) +} + +func NewBuildCleanupController(buildCleanupService service.DBCleanupService, isSysadm func(context.SecurityContext) bool) BuildCleanupController { + return &buildCleanupControllerImpl{ + buildCleanupService: buildCleanupService, + isSysadm: isSysadm, + } +} + +type buildCleanupControllerImpl struct { + buildCleanupService service.DBCleanupService + isSysadm func(context.SecurityContext) bool +} + +func (b buildCleanupControllerImpl) StartMigrationBuildCleanup(w http.ResponseWriter, r *http.Request) { + ctx := context.Create(r) + sufficientPrivileges := b.isSysadm(ctx) + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + + id, err := b.buildCleanupService.StartMigrationBuildDataCleanup() + if err != nil { + RespondWithError(w, "Failed to cleanup migration builds", err) + } + + result := map[string]interface{}{} + result["id"] = id + + RespondWithJson(w, http.StatusOK, result) +} + +func (b buildCleanupControllerImpl) GetMigrationBuildCleanupResult(w http.ResponseWriter, r *http.Request) { + ctx := context.Create(r) + sufficientPrivileges := b.isSysadm(ctx) + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + + params := mux.Vars(r) + id := params["id"] + + result, err := b.buildCleanupService.GetMigrationBuildDataCleanupResult(id) + if err != nil { + RespondWithError(w, "Failed to get remove migration build data", err) + } + + RespondWithJson(w, http.StatusOK, result) +} diff --git a/qubership-apihub-service/controller/BusinessMetricController.go b/qubership-apihub-service/controller/BusinessMetricController.go new file mode 100644 index 0000000..f2e44d0 --- /dev/null +++ b/qubership-apihub-service/controller/BusinessMetricController.go @@ -0,0 +1,102 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package controller + +import ( + "fmt" + "net/http" + "strconv" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type BusinessMetricController interface { + GetBusinessMetrics(w http.ResponseWriter, r *http.Request) +} + +func NewBusinessMetricController(businessMetricService service.BusinessMetricService, excelService service.ExcelService, isSysadm func(context.SecurityContext) bool) BusinessMetricController { + return businessMetricControllerImpl{ + businessMetricService: businessMetricService, + isSysadm: isSysadm, + excelService: excelService, + } +} + +type businessMetricControllerImpl struct { + businessMetricService service.BusinessMetricService + excelService service.ExcelService + isSysadm func(context.SecurityContext) bool +} + +func (b businessMetricControllerImpl) GetBusinessMetrics(w http.ResponseWriter, r *http.Request) { + var err error + ctx := context.Create(r) + sufficientPrivileges := b.isSysadm(ctx) + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + + parentPackageId := r.URL.Query().Get("parentPackageId") + hierarchyLevel := 0 + if r.URL.Query().Get("hierarchyLevel") != "" { + hierarchyLevel, err = strconv.Atoi(r.URL.Query().Get("hierarchyLevel")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "hierarchyLevel", "type": "int"}, + Debug: err.Error(), + }) + return + } + } + format := r.URL.Query().Get("format") + if format == "" { + format = view.ExportFormatJson + } + businessMetrics, err := b.businessMetricService.GetBusinessMetrics(parentPackageId, hierarchyLevel) + if err != nil { + RespondWithError(w, "Failed to get business metrics", err) + return + } + switch format { + case view.ExportFormatJson: + RespondWithJson(w, http.StatusOK, businessMetrics) + return + case view.ExportFormatXlsx: + report, filename, err := b.excelService.ExportBusinessMetrics(businessMetrics) + if err != nil { + RespondWithError(w, "Failed to export business metrics as xlsx", err) + return + } + w.Header().Set("Content-Type", "application/octet-stream") + w.Header().Set("Content-Disposition", fmt.Sprintf(`attachment; filename="%v"`, filename)) + w.Header().Set("Content-Transfer-Encoding", "binary") + w.Header().Set("Expires", "0") + report.Write(w) + report.Close() + return + } +} diff --git a/qubership-apihub-service/controller/CleanupController.go b/qubership-apihub-service/controller/CleanupController.go new file mode 100644 index 0000000..394cf12 --- /dev/null +++ b/qubership-apihub-service/controller/CleanupController.go @@ -0,0 +1,65 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package controller + +import ( + "net/http" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" + log "github.com/sirupsen/logrus" +) + +type CleanupController interface { + ClearTestData(w http.ResponseWriter, r *http.Request) +} + +func NewCleanupController(cleanupService service.CleanupService) CleanupController { + return &cleanupControllerImpl{ + cleanupService: cleanupService, + } +} + +type cleanupControllerImpl struct { + cleanupService service.CleanupService +} + +func (c cleanupControllerImpl) ClearTestData(w http.ResponseWriter, r *http.Request) { + testId, err := getUnescapedStringParam(r, "testId") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "testId"}, + Debug: err.Error(), + }) + return + } + err = c.cleanupService.ClearTestData(testId) + if err != nil { + log.Error("Failed to clear test data: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to clear test data", + Debug: err.Error()}) + } + return + } + w.WriteHeader(http.StatusNoContent) +} diff --git a/qubership-apihub-service/controller/ComparisonController.go b/qubership-apihub-service/controller/ComparisonController.go new file mode 100644 index 0000000..c86cf91 --- /dev/null +++ b/qubership-apihub-service/controller/ComparisonController.go @@ -0,0 +1,365 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package controller + +import ( + "encoding/json" + "io/ioutil" + "net/http" + "net/url" + "strconv" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/metrics" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type ComparisonController interface { + CompareTwoVersions(w http.ResponseWriter, r *http.Request) + GetComparisonChangesSummary(w http.ResponseWriter, r *http.Request) +} + +func NewComparisonController(operationService service.OperationService, + versionService service.VersionService, + buildService service.BuildService, + roleService service.RoleService, + comparisonService service.ComparisonService, + monitoringService service.MonitoringService, + ptHandler service.PackageTransitionHandler) ComparisonController { + return &comparisonControllerImpl{ + operationService: operationService, + versionService: versionService, + buildService: buildService, + roleService: roleService, + comparisonService: comparisonService, + monitoringService: monitoringService, + ptHandler: ptHandler, + } +} + +type comparisonControllerImpl struct { + operationService service.OperationService + versionService service.VersionService + buildService service.BuildService + roleService service.RoleService + comparisonService service.ComparisonService + monitoringService service.MonitoringService + ptHandler service.PackageTransitionHandler +} + +func (c comparisonControllerImpl) CompareTwoVersions(w http.ResponseWriter, r *http.Request) { + ctx := context.Create(r) + builderId, err := url.QueryUnescape(r.URL.Query().Get("builderId")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "builderId"}, + Debug: err.Error(), + }) + return + } + clientBuild := false + if r.URL.Query().Get("clientBuild") != "" { + clientBuild, err = strconv.ParseBool(r.URL.Query().Get("clientBuild")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "clientBuild", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + if clientBuild && builderId == "" { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.RequiredParamsMissing, + Message: exception.RequiredParamsMissingMsg, + Params: map[string]interface{}{"params": "builderId"}, + }) + return + } + reCalculate := false + if r.URL.Query().Get("reCalculate") != "" { + reCalculate, err = strconv.ParseBool(r.URL.Query().Get("reCalculate")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "reCalculate", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + + defer r.Body.Close() + body, err := ioutil.ReadAll(r.Body) + + var compareVersionsReq view.CompareVersionsReq + err = json.Unmarshal(body, &compareVersionsReq) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + if err := utils.ValidateObject(compareVersionsReq); err != nil { + RespondWithError(w, "", exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidCompareVersionReq, + Message: exception.InvalidCompareVersionReqMsg, + Params: map[string]interface{}{"compareVersionReq": compareVersionsReq, "error": err.Error()}, + }) + } + + sufficientPrivileges, err := c.roleService.HasRequiredPermissions(ctx, compareVersionsReq.PackageId, view.ReadPermission) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + + revision, err := c.versionService.GetLatestRevision(compareVersionsReq.PackageId, compareVersionsReq.Version) + if err != nil { + RespondWithError(w, "Failed to get version", err) + return + } + prevVersionRevision, err := c.versionService.GetLatestRevision(compareVersionsReq.PreviousVersionPackageId, compareVersionsReq.PreviousVersion) + if err != nil { + RespondWithError(w, "Failed to get previous version", err) + return + } + + buildConfig := view.BuildConfig{ + PackageId: compareVersionsReq.PackageId, + Version: compareVersionsReq.Version, + PreviousVersionPackageId: compareVersionsReq.PreviousVersionPackageId, + PreviousVersion: compareVersionsReq.PreviousVersion, + BuildType: view.ChangelogType, + CreatedBy: ctx.GetUserId(), + + ComparisonRevision: revision, + ComparisonPrevRevision: prevVersionRevision, + } + + if reCalculate { + buildId, err := c.buildService.CreateChangelogBuild(buildConfig, clientBuild, builderId) + if err != nil { + RespondWithError(w, "Failed to create changelog type build", err) + return + } + if clientBuild { + RespondWithJson(w, http.StatusCreated, view.ChangelogBuildConfigView{ + PackageId: buildConfig.PackageId, + Version: buildConfig.Version, + PreviousVersionPackageId: buildConfig.PreviousVersionPackageId, + PreviousVersion: buildConfig.PreviousVersion, + BuildType: buildConfig.BuildType, + CreatedBy: buildConfig.CreatedBy, + BuildId: buildId, + }) + return + } + calculationProcessStatus := view.CalculationProcessStatus{ + Status: string(view.StatusRunning), + } + RespondWithJson(w, http.StatusAccepted, calculationProcessStatus) + return + } + + compareResult, err := c.comparisonService.ValidComparisonResultExists(compareVersionsReq.PackageId, compareVersionsReq.Version, compareVersionsReq.PreviousVersionPackageId, compareVersionsReq.PreviousVersion) + if err != nil { + RespondWithError(w, "Failed to get versions comparison result", err) + return + } + if compareResult { + w.WriteHeader(http.StatusOK) + return + } + + searchRequest := view.ChangelogBuildSearchRequest{ + PackageId: compareVersionsReq.PackageId, + Version: compareVersionsReq.Version, + PreviousVersionPackageId: compareVersionsReq.PreviousVersionPackageId, + PreviousVersion: compareVersionsReq.PreviousVersion, + BuildType: view.ChangelogType, + + ComparisonRevision: revision, + ComparisonPrevRevision: prevVersionRevision, + } + var calculationProcessStatus view.CalculationProcessStatus + buildView, err := c.buildService.GetBuildViewByChangelogSearchQuery(searchRequest) + if err != nil { + if customError, ok := err.(*exception.CustomError); ok { + if customError.Status == http.StatusNotFound { + buildId, err := c.buildService.CreateChangelogBuild(buildConfig, clientBuild, builderId) + if err != nil { + RespondWithError(w, "Failed to create changelog type build", err) + return + } + if clientBuild { + RespondWithJson(w, http.StatusCreated, view.ChangelogBuildConfigView{ + PackageId: buildConfig.PackageId, + Version: buildConfig.Version, + PreviousVersionPackageId: buildConfig.PreviousVersionPackageId, + PreviousVersion: buildConfig.PreviousVersion, + BuildType: buildConfig.BuildType, + CreatedBy: buildConfig.CreatedBy, + BuildId: buildId, + }) + return + } + calculationProcessStatus = view.CalculationProcessStatus{ + Status: string(view.StatusRunning), + } + RespondWithJson(w, http.StatusAccepted, calculationProcessStatus) + return + } + } + RespondWithError(w, "Failed to get buildStatus", err) + return + } + switch buildView.Status { + case string(view.StatusError): + calculationProcessStatus = view.CalculationProcessStatus{ + Status: string(view.StatusError), + Message: buildView.Details, + } + RespondWithJson(w, http.StatusAccepted, calculationProcessStatus) + return + case string(view.StatusComplete): + //this case is possible only if we have an old finished build for which we don't have a comparison (rebuild required) + //or if this build completed during this method execution (rebuild is not requried) + compareResult, err := c.comparisonService.ValidComparisonResultExists(compareVersionsReq.PackageId, compareVersionsReq.Version, compareVersionsReq.PreviousVersionPackageId, compareVersionsReq.PreviousVersion) + if err != nil { + RespondWithError(w, "Failed to get versions comparison result", err) + return + } + if compareResult { + w.WriteHeader(http.StatusOK) + return + } + buildId, err := c.buildService.CreateChangelogBuild(buildConfig, clientBuild, builderId) + if err != nil { + RespondWithError(w, "Failed to create changelog type build", err) + return + } + if clientBuild { + RespondWithJson(w, http.StatusCreated, view.ChangelogBuildConfigView{ + PackageId: buildConfig.PackageId, + Version: buildConfig.Version, + PreviousVersionPackageId: buildConfig.PreviousVersionPackageId, + PreviousVersion: buildConfig.PreviousVersion, + BuildType: buildConfig.BuildType, + CreatedBy: buildConfig.CreatedBy, + BuildId: buildId, + }) + return + } + calculationProcessStatus = view.CalculationProcessStatus{ + Status: string(view.StatusRunning), + } + RespondWithJson(w, http.StatusAccepted, calculationProcessStatus) + return + default: + calculationProcessStatus = view.CalculationProcessStatus{ + Status: string(view.StatusRunning), + } + RespondWithJson(w, http.StatusAccepted, calculationProcessStatus) + return + } +} + +func (c comparisonControllerImpl) GetComparisonChangesSummary(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := c.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, c.ptHandler, packageId, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + version, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + previousVersion, err := url.QueryUnescape(r.URL.Query().Get("previousVersion")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "previousVersion"}, + Debug: err.Error(), + }) + return + } + previousVersionPackageId, err := url.QueryUnescape(r.URL.Query().Get("previousVersionPackageId")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "previousVersionPackageId"}, + Debug: err.Error(), + }) + return + } + + c.monitoringService.IncreaseBusinessMetricCounter(ctx.GetUserId(), metrics.ComparisonsCalled, packageId) + if previousVersionPackageId != "" { + c.monitoringService.IncreaseBusinessMetricCounter(ctx.GetUserId(), metrics.ComparisonsCalled, previousVersionPackageId) + } + + comparisonSummary, err := c.comparisonService.GetComparisonResult(packageId, version, previousVersionPackageId, previousVersion) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, c.ptHandler, packageId, "Failed to get comparison changes summary", err) + return + } + RespondWithJson(w, http.StatusOK, comparisonSummary) +} diff --git a/qubership-apihub-service/controller/ContentController.go b/qubership-apihub-service/controller/ContentController.go new file mode 100644 index 0000000..24c5732 --- /dev/null +++ b/qubership-apihub-service/controller/ContentController.go @@ -0,0 +1,1039 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package controller + +import ( + "encoding/json" + "fmt" + "io/ioutil" + "net/http" + "strconv" + "strings" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" + log "github.com/sirupsen/logrus" +) + +type ContentController interface { + GetContent(w http.ResponseWriter, r *http.Request) + GetContentAsFile(w http.ResponseWriter, r *http.Request) + UpdateContent(w http.ResponseWriter, r *http.Request) + UploadContent(w http.ResponseWriter, r *http.Request) + GetContentHistory(w http.ResponseWriter, r *http.Request) + GetContentFromCommit(w http.ResponseWriter, r *http.Request) + GetContentFromBlobId(w http.ResponseWriter, r *http.Request) + MoveFile(w http.ResponseWriter, r *http.Request) + DeleteFile(w http.ResponseWriter, r *http.Request) + AddFile(w http.ResponseWriter, r *http.Request) + UpdateMetadata(w http.ResponseWriter, r *http.Request) + ResetFile(w http.ResponseWriter, r *http.Request) + RestoreFile(w http.ResponseWriter, r *http.Request) + GetAllContent(w http.ResponseWriter, r *http.Request) +} + +func NewContentController(contentService service.DraftContentService, + branchService service.BranchService, + searchService service.SearchService, + wsFileEditService service.WsFileEditService, + wsBranchService service.WsBranchService, + systemInfoService service.SystemInfoService) ContentController { + return &contentControllerImpl{ + contentService: contentService, + branchService: branchService, + searchService: searchService, + wsFileEditService: wsFileEditService, + wsBranchService: wsBranchService, + systemInfoService: systemInfoService, + } +} + +type contentControllerImpl struct { + contentService service.DraftContentService + branchService service.BranchService + searchService service.SearchService + wsFileEditService service.WsFileEditService + wsBranchService service.WsBranchService + systemInfoService service.SystemInfoService +} + +func (c contentControllerImpl) GetContent(w http.ResponseWriter, r *http.Request) { + contentId, err := getUnescapedStringParam(r, "fileId") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "fileId"}, + Debug: err.Error(), + }) + return + } + projectId := getStringParam(r, "projectId") + branchName, err := getUnescapedStringParam(r, "branchName") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "branchName"}, + Debug: err.Error(), + }) + return + } + + content, err := c.contentService.GetContentFromDraftOrGit(context.Create(r), projectId, branchName, contentId) + if err != nil { + log.Error("Failed to get content: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to get content", + Debug: err.Error()}) + } + return + } + w.Header().Set("Content-Type", content.DataType) + w.WriteHeader(http.StatusOK) + w.Write(content.Data) +} + +func (c contentControllerImpl) GetContentAsFile(w http.ResponseWriter, r *http.Request) { + contentId, err := getUnescapedStringParam(r, "fileId") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "fileId"}, + Debug: err.Error(), + }) + return + } + projectId := getStringParam(r, "projectId") + branchName, err := getUnescapedStringParam(r, "branchName") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "branchName"}, + Debug: err.Error(), + }) + return + } + + goCtx := context.CreateContextWithSecurity(r.Context(), context.Create(r)) + goCtx = context.CreateContextWithStacktrace(goCtx, fmt.Sprintf("GetContentAsFile()")) + + content, err := c.branchService.GetContentNoData(goCtx, projectId, branchName, contentId) + if err != nil { + log.Error("Failed to get content as file: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to get content as file", + Debug: err.Error()}) + } + return + } + var fileName string + var data []byte + + var contentData *view.ContentData + contentData, err = c.contentService.GetContentFromDraftOrGit(context.Create(r), projectId, branchName, contentId) + if err != nil { + log.Error("Failed to get content as file: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to get content as file", + Debug: err.Error()}) + } + return + } + data = contentData.Data + fileName = content.Name + + w.Header().Set("Content-Type", "application/octet-stream") + w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=%v", fileName)) + w.WriteHeader(http.StatusOK) + w.Write(data) +} + +func (c contentControllerImpl) UpdateContent(w http.ResponseWriter, r *http.Request) { + contentId, err := getUnescapedStringParam(r, "fileId") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "fileId"}, + Debug: err.Error(), + }) + return + } + projectId := getStringParam(r, "projectId") + branchName, err := getUnescapedStringParam(r, "branchName") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "branchName"}, + Debug: err.Error(), + }) + return + } + defer r.Body.Close() + data, err := ioutil.ReadAll(r.Body) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + //todo validate body here if it is not done on frontend + err = c.contentService.UpdateDraftContentData(context.Create(r), projectId, branchName, contentId, data) + if err != nil { + log.Error("Failed to update content: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + c.wsBranchService.DisconnectClients(projectId, branchName) + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to update content", + Debug: err.Error()}) + } + return + } + + c.wsFileEditService.SetFileContent(projectId, branchName, contentId, data) + + w.WriteHeader(http.StatusOK) +} + +func (c contentControllerImpl) UploadContent(w http.ResponseWriter, r *http.Request) { + projectId := getStringParam(r, "projectId") + branchName, err := getUnescapedStringParam(r, "branchName") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "branchName"}, + Debug: err.Error(), + }) + return + } + + if r.ContentLength > c.systemInfoService.GetBranchContentSizeLimitMB() { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BranchContentSizeExceeded, + Message: exception.BranchContentSizeExceededMsg, + Params: map[string]interface{}{"size": c.systemInfoService.GetBranchContentSizeLimitMB()}, + }) + return + } + + err = r.ParseMultipartForm(0) + if err != nil { + if strings.Contains(err.Error(), "http: request body too large") { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BranchContentSizeExceeded, + Message: exception.BranchContentSizeExceededMsg, + Params: map[string]interface{}{"size": c.systemInfoService.GetBranchContentSizeLimitMB()}, + }) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + } + return + } + defer func() { + err := r.MultipartForm.RemoveAll() + if err != nil { + log.Debugf("failed to remove temporal data: %+v", err) + } + }() + publishStr := r.FormValue("publish") + publish := true + if publishStr != "" { + publish, err = strconv.ParseBool(publishStr) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "publish", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + ctx := context.Create(r) + var contentToSave []view.Content + var contentDataToSave []view.ContentData + path := r.FormValue("path") + for _, files := range r.MultipartForm.File { + for _, f := range files { + file, err := f.Open() + if err != nil { + log.Error("Failed to upload content:", err.Error()) + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to upload content", + Debug: err.Error()}) + return + } + data, err := ioutil.ReadAll(file) + closeErr := file.Close() + if closeErr != nil { + log.Debugf("failed to close temporal file: %+v", err) + } + if err != nil { + log.Error("Failed to upload content:", err.Error()) + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to upload content", + Debug: err.Error()}) + return + } + contentToSave = append(contentToSave, view.Content{Name: f.Filename, Path: path, Publish: publish}) + contentDataToSave = append(contentDataToSave, view.ContentData{Data: data}) + } + } + if len(contentToSave) == 0 { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.NoFilesSent, + Message: exception.NoFilesSentMsg, + }) + return + } + + resultFileIds, err := c.contentService.CreateDraftContentWithData(ctx, projectId, branchName, contentToSave, contentDataToSave) + if err != nil { + log.Error("Failed to upload content: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to upload content", + Debug: err.Error()}) + } + return + } + + RespondWithJson(w, http.StatusOK, view.ContentAddResponse{FileIds: resultFileIds}) +} + +func (c contentControllerImpl) GetContentHistory(w http.ResponseWriter, r *http.Request) { + projectId := getStringParam(r, "projectId") + branchName, err := getUnescapedStringParam(r, "branchName") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "branchName"}, + Debug: err.Error(), + }) + return + } + fileId, err := getUnescapedStringParam(r, "fileId") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "fileId"}, + Debug: err.Error(), + }) + return + } + limit, customError := getLimitQueryParam(r) + if customError != nil { + RespondWithCustomError(w, customError) + return + } + page := 0 + if r.URL.Query().Get("page") != "" { + page, err = strconv.Atoi(r.URL.Query().Get("page")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "page", "type": "int"}, + Debug: err.Error()}) + return + } + } + fileHistory, err := c.searchService.GetContentHistory(context.Create(r), projectId, branchName, fileId, limit, page) + if err != nil { + log.Error("Failed to get content changes: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to get content changes", + Debug: err.Error()}) + } + return + } + RespondWithJson(w, http.StatusOK, fileHistory) +} + +func (c contentControllerImpl) MoveFile(w http.ResponseWriter, r *http.Request) { + projectId := getStringParam(r, "projectId") + branchName, err := getUnescapedStringParam(r, "branchName") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "branchName"}, + Debug: err.Error(), + }) + return + } + fileId, err := getUnescapedStringParam(r, "fileId") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "fileId"}, + Debug: err.Error(), + }) + return + } + defer r.Body.Close() + params, err := getParamsFromBody(r) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + + newFileId, err := getBodyStringParam(params, "newFileId") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameter, + Message: exception.InvalidParameterMsg, + Params: map[string]interface{}{"param": "newFileId"}, + Debug: err.Error(), + }) + return + } + if newFileId == "" { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.EmptyParameter, + Message: exception.EmptyParameterMsg, + Params: map[string]interface{}{"param": "newFileId"}, + }) + return + } + err = c.contentService.ChangeFileId(context.Create(r), projectId, branchName, fileId, newFileId) + if err != nil { + log.Error("Failed to change fileId: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + c.wsBranchService.DisconnectClients(projectId, branchName) + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to change fileId", + Debug: err.Error()}) + } + return + } + w.WriteHeader(http.StatusOK) +} + +func (c contentControllerImpl) DeleteFile(w http.ResponseWriter, r *http.Request) { + projectId := getStringParam(r, "projectId") + branchName, err := getUnescapedStringParam(r, "branchName") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "branchName"}, + Debug: err.Error(), + }) + return + } + fileId, err := getUnescapedStringParam(r, "fileId") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "fileId"}, + Debug: err.Error(), + }) + return + } + deleteStr := r.URL.Query().Get("delete") + deleteFromGit := false + if deleteStr != "" { + deleteFromGit, err = strconv.ParseBool(deleteStr) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "delete", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + if deleteFromGit { + err = c.contentService.DeleteFile(context.Create(r), projectId, branchName, fileId) + } else { + err = c.contentService.ExcludeFile(context.Create(r), projectId, branchName, fileId) + } + + if err != nil { + log.Error("Failed to delete file: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + c.wsBranchService.DisconnectClients(projectId, branchName) + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to delete file", + Debug: err.Error()}) + } + return + } + w.WriteHeader(http.StatusOK) +} + +func (c contentControllerImpl) AddFile(w http.ResponseWriter, r *http.Request) { + var err error + projectId := getStringParam(r, "projectId") + branchName, err := getUnescapedStringParam(r, "branchName") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "branchName"}, + Debug: err.Error(), + }) + return + } + defer r.Body.Close() + params, err := getParamsFromBody(r) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + + source, err := getBodyStringParam(params, "source") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameter, + Message: exception.InvalidParameterMsg, + Params: map[string]interface{}{"param": "source"}, + Debug: err.Error(), + }) + return + } + if source == "" { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.EmptyParameter, + Message: exception.EmptyParameterMsg, + Params: map[string]interface{}{"param": "source"}, + }) + return + } + publishParam, err := getBodyBoolParam(params, "publish") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameter, + Message: exception.InvalidParameterMsg, + Params: map[string]interface{}{"param": "publish"}, + Debug: err.Error(), + }) + return + } + publish := true + if publishParam != nil { + publish = *publishParam + } + dataObj, err := getBodyObjectParam(params, "data") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameter, + Message: exception.InvalidParameterMsg, + Params: map[string]interface{}{"param": "data"}, + Debug: err.Error(), + }) + return + } + var resultFileIds []string + + switch source { + case "git": + { + paths, parseErr := getBodyStrArrayParam(dataObj, "paths") + if len(paths) == 0 { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.EmptyParameter, + Message: exception.EmptyParameterMsg, + Params: map[string]interface{}{"param": "paths"}, + }) + return + } + if parseErr != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameter, + Message: exception.InvalidParameterMsg, + Params: map[string]interface{}{"param": "paths"}, + Debug: parseErr.Error(), + }) + return + } + resultFileIds, err = c.contentService.AddGitFiles(context.Create(r), projectId, branchName, paths, publish) + } + case "url": + { + url, parseErr := getBodyStringParam(dataObj, "url") + if parseErr != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameter, + Message: exception.InvalidParameterMsg, + Params: map[string]interface{}{"param": "url"}, + Debug: parseErr.Error(), + }) + return + } + if url == "" { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.EmptyParameter, + Message: exception.EmptyParameterMsg, + Params: map[string]interface{}{"param": "url"}, + }) + return + } + path, parseErr := getBodyStringParam(dataObj, "path") + if parseErr != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameter, + Message: exception.InvalidParameterMsg, + Params: map[string]interface{}{"param": "path"}, + Debug: parseErr.Error(), + }) + return + } + resultFileIds, err = c.contentService.AddFileFromUrl(context.Create(r), projectId, branchName, url, path, publish) + } + case "new": + { + fileName, parseErr := getBodyStringParam(dataObj, "name") + if parseErr != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameter, + Message: exception.InvalidParameterMsg, + Params: map[string]interface{}{"param": "name"}, + Debug: parseErr.Error(), + }) + return + } + if fileName == "" { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.EmptyParameter, + Message: exception.EmptyParameterMsg, + Params: map[string]interface{}{"param": "name"}, + }) + return + } + filePath, parseErr := getBodyStringParam(dataObj, "path") + if parseErr != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameter, + Message: exception.InvalidParameterMsg, + Params: map[string]interface{}{"param": "path"}, + Debug: parseErr.Error(), + }) + return + } + fileTypeStr, parseErr := getBodyStringParam(dataObj, "type") + if parseErr != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameter, + Message: exception.InvalidParameterMsg, + Params: map[string]interface{}{"param": "type"}, + Debug: parseErr.Error(), + }) + return + } + fileType := view.ParseTypeFromString(fileTypeStr) + /*if fileType == view.Unknown { + RespondWithCustomError(w, &exception.CustomError{ + Type: http.StatusBadRequest, + Code: exception.InvalidParameter, + Message: exception.InvalidParameterMsg, //todo maybe custom error for incorrect enum + Params: map[string]interface{}{"param": "type"}, + Debug: "File type unknown", + }) + return + }*/ + resultFileIds, err = c.contentService.AddEmptyFile(context.Create(r), projectId, branchName, fileName, fileType, filePath, publish) + } + default: + { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.UnsupportedSourceType, + Message: exception.UnsupportedSourceTypeMsg, + Params: map[string]interface{}{"type": source}, + }) + return + } + } + + if err != nil { + log.Error("Failed to add new file: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + c.wsBranchService.DisconnectClients(projectId, branchName) + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to add new file", + Debug: err.Error()}) + } + return + } + + RespondWithJson(w, http.StatusOK, view.ContentAddResponse{FileIds: resultFileIds}) +} + +func (c contentControllerImpl) GetContentFromCommit(w http.ResponseWriter, r *http.Request) { + projectId := getStringParam(r, "projectId") + branchName, err := getUnescapedStringParam(r, "branchName") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "branchName"}, + Debug: err.Error(), + }) + return + } + fileId, err := getUnescapedStringParam(r, "fileId") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "fileId"}, + Debug: err.Error(), + }) + return + } + commitId := getStringParam(r, "commitId") + + data, err := c.searchService.GetContentFromCommit(context.Create(r), projectId, branchName, fileId, commitId) + if err != nil { + log.Error("Failed to get content: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to get content", + Debug: err.Error()}) + } + return + } + w.Header().Set("Content-Type", "text/plain") // For frontend it's convenient to get all types as plain text + w.WriteHeader(http.StatusOK) + w.Write(data) +} + +func (c contentControllerImpl) GetContentFromBlobId(w http.ResponseWriter, r *http.Request) { + projectId := getStringParam(r, "projectId") + blobId := getStringParam(r, "blobId") + + data, err := c.searchService.GetContentFromBlobId(context.Create(r), projectId, blobId) + if err != nil { + log.Error("Failed to get content: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to get content", + Debug: err.Error()}) + } + return + } + w.Header().Set("Content-Type", "text/plain") // For frontend it's convenient to get all types as plain text + w.WriteHeader(http.StatusOK) + w.Write(data) +} + +func (c contentControllerImpl) UpdateMetadata(w http.ResponseWriter, r *http.Request) { + projectId := getStringParam(r, "projectId") + branchName, err := getUnescapedStringParam(r, "branchName") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "branchName"}, + Debug: err.Error(), + }) + return + } + //fileId from request can be either path to folder or path to file (depends on 'bulk' query parameter) + path, err := getUnescapedStringParam(r, "fileId") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "fileId"}, + Debug: err.Error(), + }) + return + } + + bulkStr := r.URL.Query().Get("bulk") + bulk := false + if bulkStr != "" { + bulk, err = strconv.ParseBool(bulkStr) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "bulk", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + + defer r.Body.Close() + body, err := ioutil.ReadAll(r.Body) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + var metaPatch view.ContentMetaPatch + err = json.Unmarshal(body, &metaPatch) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + + err = c.contentService.UpdateMetadata(context.Create(r), projectId, branchName, path, metaPatch, bulk) + if err != nil { + log.Error("Failed to update content metadata: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + c.wsBranchService.DisconnectClients(projectId, branchName) + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to update content metadata", + Debug: err.Error()}) + } + return + } + w.WriteHeader(http.StatusOK) +} + +func (c contentControllerImpl) ResetFile(w http.ResponseWriter, r *http.Request) { + projectId := getStringParam(r, "projectId") + branchName, err := getUnescapedStringParam(r, "branchName") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "branchName"}, + Debug: err.Error(), + }) + return + } + fileId, err := getUnescapedStringParam(r, "fileId") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "fileId"}, + Debug: err.Error(), + }) + return + } + + err = c.contentService.ResetFile(context.Create(r), projectId, branchName, fileId) + if err != nil { + log.Error("Failed to reset file to last commit: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + c.wsBranchService.DisconnectClients(projectId, branchName) + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to reset file to last commit", + Debug: err.Error()}) + } + return + } + + w.WriteHeader(http.StatusOK) +} + +func (c contentControllerImpl) RestoreFile(w http.ResponseWriter, r *http.Request) { + projectId := getStringParam(r, "projectId") + branchName, err := getUnescapedStringParam(r, "branchName") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "branchName"}, + Debug: err.Error(), + }) + return + } + fileId, err := getUnescapedStringParam(r, "fileId") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "fileId"}, + Debug: err.Error(), + }) + return + } + + err = c.contentService.RestoreFile(context.Create(r), projectId, branchName, fileId) + if err != nil { + log.Error("Failed to restore file: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + c.wsBranchService.DisconnectClients(projectId, branchName) + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to restore file", + Debug: err.Error()}) + } + return + } + + w.WriteHeader(http.StatusOK) +} + +func (c contentControllerImpl) GetAllContent(w http.ResponseWriter, r *http.Request) { + projectId := getStringParam(r, "projectId") + branchName, err := getUnescapedStringParam(r, "branchName") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "branchName"}, + Debug: err.Error(), + }) + return + } + + content, err := c.contentService.GetAllZippedContentFromDraftOrGit(context.Create(r), projectId, branchName) + if err != nil { + log.Error("Failed to get content: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to get content", + Debug: err.Error()}) + } + return + } + + w.Header().Set("Content-Type", "application/zip") + w.Write(content) +} diff --git a/qubership-apihub-service/controller/ControllerUtils.go b/qubership-apihub-service/controller/ControllerUtils.go new file mode 100644 index 0000000..9bd0e03 --- /dev/null +++ b/qubership-apihub-service/controller/ControllerUtils.go @@ -0,0 +1,206 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package controller + +import ( + "encoding/json" + "fmt" + "io/ioutil" + "net/http" + "net/url" + "strconv" + "strings" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/gorilla/mux" + log "github.com/sirupsen/logrus" +) + +func getStringParam(r *http.Request, p string) string { + params := mux.Vars(r) + return params[p] +} + +func getUnescapedStringParam(r *http.Request, p string) (string, error) { + params := mux.Vars(r) + return url.QueryUnescape(params[p]) +} + +func getParamsFromBody(r *http.Request) (map[string]interface{}, error) { + var params map[string]interface{} + body, err := ioutil.ReadAll(r.Body) + if err != nil { + return nil, err + } + if err := json.Unmarshal(body, ¶ms); err != nil { + return nil, err + } + return params, nil +} + +func getBodyObjectParam(params map[string]interface{}, p string) (map[string]interface{}, error) { + if params[p] == nil { + return nil, fmt.Errorf("parameter %v is missing", p) + } + if param, ok := params[p].(map[string]interface{}); ok { + return param, nil + } + return nil, fmt.Errorf("parameter %v has incorrect type", p) +} + +func getBodyStringParam(params map[string]interface{}, p string) (string, error) { + if params[p] == nil { + return "", nil + } + if param, ok := params[p].(string); ok { + return param, nil + } + return "", fmt.Errorf("parameter %v is not a string", p) +} + +func getBodyBoolParam(params map[string]interface{}, p string) (*bool, error) { + if params[p] == nil { + return nil, nil + } + if param, ok := params[p].(bool); ok { + return ¶m, nil + } + return nil, fmt.Errorf("parameter %v is not boolean", p) +} + +func getBodyStrArrayParam(params map[string]interface{}, p string) ([]string, error) { + if params[p] == nil { + return nil, fmt.Errorf("parameter %v is missing", p) + } + if param, ok := params[p].([]interface{}); ok { + arr := make([]string, 0) + for _, el := range param { + if elStr, ok := el.(string); ok { + arr = append(arr, elStr) + } + } + return arr, nil + } + return nil, fmt.Errorf("parameter %v has incorrect type", p) +} + +func RespondWithError(w http.ResponseWriter, msg string, err error) { + log.Errorf("%s: %s", msg, err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: msg, + Debug: err.Error()}) + } +} + +func RespondWithCustomError(w http.ResponseWriter, err *exception.CustomError) { + log.Debugf("Request failed. Code = %d. Message = %s. Params: %v. Debug: %s", err.Status, err.Message, err.Params, err.Debug) + RespondWithJson(w, err.Status, err) +} +func RespondWithJson(w http.ResponseWriter, code int, payload interface{}) { + response, _ := json.Marshal(payload) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(code) + w.Write(response) +} + +func IsAcceptableAlias(alias string) bool { + return alias == url.QueryEscape(alias) && !strings.Contains(alias, ".") +} + +func getListFromParam(r *http.Request, param string) ([]string, error) { + paramStr := r.URL.Query().Get(param) + if paramStr == "" { + return []string{}, nil + } + listStr, err := url.QueryUnescape(paramStr) + if err != nil { + return nil, err + } + return strings.Split(listStr, ","), nil +} + +func getLimitQueryParam(r *http.Request) (int, *exception.CustomError) { + return getLimitQueryParamBase(r, 100, 100) +} + +func getLimitQueryParamWithIncreasedMax(r *http.Request) (int, *exception.CustomError) { + return getLimitQueryParamBase(r, 100, 500) +} + +func getLimitQueryParamBase(r *http.Request, defaultLimit, maxLimit int) (int, *exception.CustomError) { + if r.URL.Query().Get("limit") != "" { + limit, err := strconv.Atoi(r.URL.Query().Get("limit")) + if err != nil { + return 0, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "limit", "type": "int"}, + Debug: err.Error(), + } + } + if limit < 1 || limit > maxLimit { + return 0, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameterValue, + Message: exception.InvalidLimitMsg, + Params: map[string]interface{}{"value": limit, "maxLimit": maxLimit}, + } + } + return limit, nil + } + return defaultLimit, nil +} + +// TODO: duplicate in v2 +func handlePkgRedirectOrRespondWithError(w http.ResponseWriter, r *http.Request, ptHandler service.PackageTransitionHandler, packageId, msg string, err error) { + if customError, ok := err.(*exception.CustomError); ok { + if strings.Contains(r.URL.Path, packageId) && + (customError.Code == exception.PackageNotFound || + customError.Code == exception.PublishedPackageVersionNotFound || + customError.Code == exception.PublishedVersionNotFound) { + newPkg, err := ptHandler.HandleMissingPackageId(packageId) + if err != nil { + RespondWithError(w, "Package not found, failed to check package move", err) + return + } + if newPkg != "" { + path := strings.Replace(r.URL.Path, packageId, newPkg, -1) + if r.URL.RawQuery != "" { + path += "?" + r.URL.RawQuery + } + w.Header().Add("Location", path) + w.WriteHeader(301) + return + } + } + } + RespondWithError(w, msg, err) +} + +func getTemplatePath(r *http.Request) string { + route := mux.CurrentRoute(r) + if route == nil { + return "" + } + templatePath, _ := route.GetPathTemplate() + return templatePath +} diff --git a/qubership-apihub-service/controller/ExportController.go b/qubership-apihub-service/controller/ExportController.go new file mode 100644 index 0000000..1527396 --- /dev/null +++ b/qubership-apihub-service/controller/ExportController.go @@ -0,0 +1,1085 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package controller + +import ( + "fmt" + "net/http" + "net/url" + "strconv" + + log "github.com/sirupsen/logrus" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/metrics" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type ExportController interface { + GenerateVersionDoc(w http.ResponseWriter, r *http.Request) + GenerateFileDoc(w http.ResponseWriter, r *http.Request) + GenerateApiChangesExcelReportV3(w http.ResponseWriter, r *http.Request) + GenerateApiChangesExcelReport(w http.ResponseWriter, r *http.Request) //deprecated + GenerateOperationsExcelReport(w http.ResponseWriter, r *http.Request) + GenerateDeprecatedOperationsExcelReport(w http.ResponseWriter, r *http.Request) + ExportOperationGroupAsOpenAPIDocuments_deprecated(w http.ResponseWriter, r *http.Request) + ExportOperationGroupAsOpenAPIDocuments(w http.ResponseWriter, r *http.Request) +} + +func NewExportController(publishedService service.PublishedService, + portalService service.PortalService, + searchService service.SearchService, + roleService service.RoleService, + excelService service.ExcelService, + versionService service.VersionService, + monitoringService service.MonitoringService) ExportController { + return &exportControllerImpl{ + publishedService: publishedService, + portalService: portalService, + searchService: searchService, + roleService: roleService, + excelService: excelService, + versionService: versionService, + monitoringService: monitoringService, + } +} + +type exportControllerImpl struct { + publishedService service.PublishedService + portalService service.PortalService + searchService service.SearchService + roleService service.RoleService + excelService service.ExcelService + versionService service.VersionService + monitoringService service.MonitoringService +} + +func (e exportControllerImpl) ExportOperationGroupAsOpenAPIDocuments_deprecated(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := e.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + version, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + apiType, err := getUnescapedStringParam(r, "apiType") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "apiType"}, + Debug: err.Error(), + }) + return + } + _, err = view.ParseApiType(apiType) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameterValue, + Message: exception.InvalidParameterValueMsg, + Params: map[string]interface{}{"param": "apiType", "value": apiType}, + Debug: err.Error(), + }) + return + } + groupName, err := getUnescapedStringParam(r, "groupName") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "groupName"}, + Debug: err.Error(), + }) + return + } + format, err := url.QueryUnescape(r.URL.Query().Get("format")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "format"}, + Debug: err.Error(), + }) + return + } + + e.monitoringService.IncreaseBusinessMetricCounter(ctx.GetUserId(), metrics.ExportsCalled, packageId) + + content, err := e.versionService.GetTransformedDocuments_deprecated(packageId, version, apiType, groupName, format) + if err != nil { + log.Errorf("Failed to export api changes error - %s", err.Error()) + RespondWithError(w, "Failed to export group name openAPI", err) + return + } + if content == nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.TransformedDocumentsNotFound, + Message: exception.TransformedDocumentsNotFoundMsg, + Params: map[string]interface{}{"packageId": packageId, "version": version, "apiType": apiType, "groupName": groupName}, + }) + return + } + w.Header().Set("Content-Type", "application/octet-stream") + w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=%s_%s_%s.zip", groupName, packageId, version)) + w.Header().Set("Content-Transfer-Encoding", "binary") + w.Header().Set("Expires", "0") + w.WriteHeader(http.StatusOK) + w.Write(content) +} + +func (e exportControllerImpl) ExportOperationGroupAsOpenAPIDocuments(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := e.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + version, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + apiType, err := getUnescapedStringParam(r, "apiType") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "apiType"}, + Debug: err.Error(), + }) + return + } + _, err = view.ParseApiType(apiType) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameterValue, + Message: exception.InvalidParameterValueMsg, + Params: map[string]interface{}{"param": "apiType", "value": apiType}, + Debug: err.Error(), + }) + return + } + groupName, err := getUnescapedStringParam(r, "groupName") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "groupName"}, + Debug: err.Error(), + }) + return + } + buildType, err := getUnescapedStringParam(r, "buildType") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "buildType"}, + Debug: err.Error(), + }) + return + } + format := r.URL.Query().Get("format") + + err = view.ValidateFormatForBuildType(buildType, format) + if err != nil { + RespondWithError(w, "buildType format validation failed", err) + return + } + e.monitoringService.IncreaseBusinessMetricCounter(ctx.GetUserId(), metrics.ExportsCalled, packageId) + + content, err := e.versionService.GetTransformedDocuments(packageId, version, apiType, groupName, buildType, format) + if err != nil { + RespondWithError(w, "Failed to export operations group", err) + return + } + if content == nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.TransformedDocumentsNotFound, + Message: exception.TransformedDocumentsNotFoundMsg, + Params: map[string]interface{}{"packageId": packageId, "version": version, "apiType": apiType, "groupName": groupName}, + }) + return + } + switch buildType { + case view.ReducedSourceSpecificationsType: + w.Header().Set("Content-Type", "application/zip") + w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=%s_%s_%s.zip", groupName, packageId, version)) + w.Header().Set("Content-Transfer-Encoding", "binary") + case view.MergedSpecificationType: + switch format { + // html format for mergedSpecification not supported yet + // case string(view.HtmlDocumentFormat): + // w.Header().Set("Content-Type", "application/zip") + // w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=%s_%s_%s.zip", groupName, packageId, version)) + // w.Header().Set("Content-Transfer-Encoding", "binary") + case string(view.JsonDocumentFormat): + w.Header().Set("Content-Type", "application/json") + w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=%s_%s_%s.json", groupName, packageId, version)) + case string(view.YamlDocumentFormat): + w.Header().Set("Content-Type", "application/yaml") + w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=%s_%s_%s.yaml", groupName, packageId, version)) + } + } + + w.Header().Set("Expires", "0") + w.WriteHeader(http.StatusOK) + w.Write(content) + +} + +func (e exportControllerImpl) GenerateVersionDoc(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := e.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + + docType := view.GetDtFromStr(r.URL.Query().Get("docType")) + + e.monitoringService.IncreaseBusinessMetricCounter(ctx.GetUserId(), metrics.ExportsCalled, packageId) + + var data []byte + var filename string + switch docType { + case view.DTInteractive: + data, filename, err = e.portalService.GenerateInteractivePageForPublishedVersion(packageId, versionName) + + if err != nil { + RespondWithError(w, fmt.Sprintf("Failed to generate interactive HTML page for version %s:%s", packageId, versionName), err) + return + } + + w.Header().Set("Content-Type", "application/octet-stream") + w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=%v", filename)) + + case view.DTRaw: + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Message: "Document type " + string(docType) + " is not applicable for version"}) + return + + case view.DTPdf, view.DTStatic: + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusNotImplemented, + Message: "Document type " + string(docType) + " is not supported yet"}) + return + default: + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Message: "Document type " + string(docType) + " is invalid"}) + return + } + + w.WriteHeader(http.StatusOK) + w.Write(data) +} + +func (e exportControllerImpl) GenerateFileDoc(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := e.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + slug := getStringParam(r, "slug") + + docType := view.GetDtFromStr(r.URL.Query().Get("docType")) + + e.monitoringService.IncreaseBusinessMetricCounter(ctx.GetUserId(), metrics.ExportsCalled, packageId) + + var data []byte + switch docType { + case view.DTInteractive: + var filename string + data, filename, err = e.portalService.GenerateInteractivePageForPublishedFile(packageId, versionName, slug) + if err != nil { + RespondWithError(w, fmt.Sprintf("Failed to generate interactive HTML page for file %s:%s:%s", packageId, versionName, slug), err) + return + } + w.Header().Set("Content-Type", "application/octet-stream") + w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=%v", filename)) + + case view.DTRaw: + content, cd, err := e.publishedService.GetLatestContentDataBySlug(packageId, versionName, slug) + if err != nil { + RespondWithError(w, "Failed to get published content as file", err) + return + } + data = cd.Data + w.Header().Set("Content-Type", "application/octet-stream") + w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=%v", content.Name)) + + case view.DTPdf, view.DTStatic: + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusNotImplemented, + Message: "Document type " + string(docType) + " is not supported yet"}) + return + default: + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Message: "Document type " + string(docType) + " is invalid"}) + return + } + + w.WriteHeader(http.StatusOK) + w.Write(data) +} + +// GenerateApiChangesExcelReport deprecated +func (e exportControllerImpl) GenerateApiChangesExcelReport(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := e.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + version, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + + format, err := url.QueryUnescape(r.URL.Query().Get("format")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "format"}, + Debug: err.Error(), + }) + return + } + if format == "" { + format = view.ExportFormatXlsx + } else { + supportedFormat := view.ValidateApiChangesExportFormat(format) + if !supportedFormat { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.UnsupportedFormat, + Message: exception.UnsupportedFormatMsg, + Params: map[string]interface{}{"format": format}, + }) + return + } + } + previousVersion, err := url.QueryUnescape(r.URL.Query().Get("previousVersion")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "previousVersion"}, + Debug: err.Error(), + }) + return + } + previousVersionPackageId, err := url.QueryUnescape(r.URL.Query().Get("previousVersionPackageId")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "previousVersionPackageId"}, + Debug: err.Error(), + }) + return + } + + e.monitoringService.IncreaseBusinessMetricCounter(ctx.GetUserId(), metrics.ExportsCalled, packageId) + + exportApiChangesRequestView := view.ExportApiChangesRequestView{ + PreviousVersionPackageId: previousVersionPackageId, + PreviousVersion: previousVersion, + } + apiChangesReport, versionName, err := e.excelService.ExportApiChanges(packageId, version, "", []string{}, exportApiChangesRequestView) + if err != nil { + log.Errorf("Failed to export api changes error - %s", err.Error()) + RespondWithError(w, "Failed to export api changes", err) + return + } + if apiChangesReport == nil { + log.Info("ApiChangeReport is empty") + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ChangesAreEmpty, + Message: exception.ChangesAreEmptyMsg, + }) + return + } + w.Header().Set("Content-Type", "application/octet-stream") + w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=APIChanges_%s_%s.xlsx", packageId, versionName)) + w.Header().Set("Content-Transfer-Encoding", "binary") + w.Header().Set("Expires", "0") + apiChangesReport.Write(w) +} + +func (e exportControllerImpl) GenerateApiChangesExcelReportV3(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := e.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + version, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + apiType, err := getUnescapedStringParam(r, "apiType") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "apiType"}, + Debug: err.Error(), + }) + return + } + previousVersion, err := url.QueryUnescape(r.URL.Query().Get("previousVersion")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "previousVersion"}, + Debug: err.Error(), + }) + return + } + previousVersionPackageId, err := url.QueryUnescape(r.URL.Query().Get("previousVersionPackageId")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "previousVersionPackageId"}, + Debug: err.Error(), + }) + return + } + textFilter, err := url.QueryUnescape(r.URL.Query().Get("textFilter")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "textFilter"}, + Debug: err.Error(), + }) + return + } + + apiKind := r.URL.Query().Get("apiKind") + apiAudience := r.URL.Query().Get("apiAudience") + if apiAudience == "all" { + apiAudience = "" + } + if apiAudience != "" && !view.ValidApiAudience(apiAudience) { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameterValue, + Message: exception.InvalidParameterValueMsg, + Params: map[string]interface{}{"param": "apiAudience", "value": apiAudience}, + }) + return + } + refPackageId := r.URL.Query().Get("refPackageId") + + emptyTag := false + emptyTagStr := r.URL.Query().Get("emptyTag") + if emptyTagStr != "" { + emptyTag, err = strconv.ParseBool(emptyTagStr) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "emptyTag", "type": "bool"}, + Debug: err.Error(), + }) + return + } + } + tags := make([]string, 0) + if !emptyTag { + tags, err = getListFromParam(r, "tag") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "tag"}, + Debug: err.Error(), + }) + return + } + } + + emptyGroup := false + if r.URL.Query().Get("emptyGroup") != "" { + emptyGroup, err = strconv.ParseBool(r.URL.Query().Get("emptyGroup")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "emptyGroup", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + group := r.URL.Query().Get("group") + if emptyGroup && group != "" { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.OverlappingQueryParameter, + Message: exception.OverlappingQueryParameterMsg, + Params: map[string]interface{}{"param1": "emptyGroup", "param2": "group"}, + }) + return + } + + severities, err := getListFromParam(r, "severity") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "severity"}, + Debug: err.Error(), + }) + return + } + for _, severity := range severities { + if !view.ValidSeverity(severity) { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameterValue, + Message: exception.InvalidParameterValueMsg, + Params: map[string]interface{}{"param": "severity", "value": severity}, + }) + return + } + } + + e.monitoringService.IncreaseBusinessMetricCounter(ctx.GetUserId(), metrics.ExportsCalled, packageId) + + exportApiChangesRequestView := view.ExportApiChangesRequestView{ + PreviousVersionPackageId: previousVersionPackageId, + PreviousVersion: previousVersion, + ApiKind: apiKind, + Tags: tags, + RefPackageId: refPackageId, + TextFilter: textFilter, + EmptyTag: emptyTag, + Group: group, + EmptyGroup: emptyGroup, + ApiAudience: apiAudience, + } + apiChangesReport, versionName, err := e.excelService.ExportApiChanges(packageId, version, apiType, severities, exportApiChangesRequestView) + if err != nil { + log.Errorf("Failed to export api changes error - %s", err.Error()) + RespondWithError(w, "Failed to export api changes", err) + return + } + if apiChangesReport == nil { + log.Info("ApiChangeReport is empty") + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ChangesAreEmpty, + Message: exception.ChangesAreEmptyMsg, + }) + return + } + w.Header().Set("Content-Type", "application/octet-stream") + w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=APIChanges_%s_%s.xlsx", packageId, versionName)) + w.Header().Set("Content-Transfer-Encoding", "binary") + w.Header().Set("Expires", "0") + apiChangesReport.Write(w) +} + +func (e exportControllerImpl) GenerateOperationsExcelReport(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := e.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + version, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + apiType, err := getUnescapedStringParam(r, "apiType") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "apiType"}, + Debug: err.Error(), + }) + return + } + textFilter, err := url.QueryUnescape(r.URL.Query().Get("textFilter")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "textFilter"}, + Debug: err.Error(), + }) + return + } + + kind, err := url.QueryUnescape(r.URL.Query().Get("kind")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "kind"}, + Debug: err.Error(), + }) + return + } + + apiAudience := r.URL.Query().Get("apiAudience") + if apiAudience == "all" { + apiAudience = "" + } + if apiAudience != "" && !view.ValidApiAudience(apiAudience) { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameterValue, + Message: exception.InvalidParameterValueMsg, + Params: map[string]interface{}{"param": "apiAudience", "value": apiAudience}, + }) + return + } + + tag, err := url.QueryUnescape(r.URL.Query().Get("tag")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "tag"}, + Debug: err.Error(), + }) + return + } + + emptyTag := false + if r.URL.Query().Get("emptyTag") != "" { + emptyTag, err = strconv.ParseBool(r.URL.Query().Get("emptyTag")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "emptyTag", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + + if emptyTag { + tag = "" + } + + refPackageId, err := url.QueryUnescape(r.URL.Query().Get("refPackageId")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "refPackageId"}, + Debug: err.Error(), + }) + return + } + + emptyGroup := false + if r.URL.Query().Get("emptyGroup") != "" { + emptyGroup, err = strconv.ParseBool(r.URL.Query().Get("emptyGroup")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "emptyGroup", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + group := r.URL.Query().Get("group") + if emptyGroup && group != "" { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.OverlappingQueryParameter, + Message: exception.OverlappingQueryParameterMsg, + Params: map[string]interface{}{"param1": "emptyGroup", "param2": "group"}, + }) + return + } + + e.monitoringService.IncreaseBusinessMetricCounter(ctx.GetUserId(), metrics.ExportsCalled, packageId) + + exportOperationsRequestView := view.ExportOperationRequestView{ + Tag: tag, + TextFilter: textFilter, + EmptyTag: emptyTag, + Kind: kind, + RefPackageId: refPackageId, + EmptyGroup: emptyGroup, + Group: group, + ApiAudience: apiAudience, + } + operationsReport, versionName, err := e.excelService.ExportOperations(packageId, version, apiType, exportOperationsRequestView) + if err != nil { + log.Errorf("Excel error - %s", err.Error()) + RespondWithError(w, "Failed to export operations", err) + return + } + if operationsReport == nil { + log.Info("Operations are empty") + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.OperationsAreEmpty, + Message: exception.OperationsAreEmptyMsg, + }) + return + } + w.Header().Set("Content-Type", "application/octet-stream") + w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=APIOperations_%s_%s.xlsx", packageId, versionName)) + w.Header().Set("Content-Transfer-Encoding", "binary") + w.Header().Set("Expires", "0") + operationsReport.Write(w) +} + +func (e exportControllerImpl) GenerateDeprecatedOperationsExcelReport(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := e.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + version, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + apiType, err := getUnescapedStringParam(r, "apiType") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "apiType"}, + Debug: err.Error(), + }) + return + } + textFilter, err := url.QueryUnescape(r.URL.Query().Get("textFilter")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "textFilter"}, + Debug: err.Error(), + }) + return + } + + kind, err := url.QueryUnescape(r.URL.Query().Get("kind")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "kind"}, + Debug: err.Error(), + }) + return + } + apiAudience := r.URL.Query().Get("apiAudience") + if apiAudience == "all" { + apiAudience = "" + } + if apiAudience != "" && !view.ValidApiAudience(apiAudience) { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameterValue, + Message: exception.InvalidParameterValueMsg, + Params: map[string]interface{}{"param": "apiAudience", "value": apiAudience}, + }) + return + } + refPackageId, err := url.QueryUnescape(r.URL.Query().Get("refPackageId")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "refPackageId"}, + Debug: err.Error(), + }) + return + } + + tags := make([]string, 0) + emptyTag := false + if r.URL.Query().Get("emptyTag") != "" { + emptyTag, err = strconv.ParseBool(r.URL.Query().Get("emptyTag")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "emptyTag", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + if !emptyTag { + tags, err = getListFromParam(r, "tag") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "tag"}, + Debug: err.Error(), + }) + return + } + } + + emptyGroup := false + if r.URL.Query().Get("emptyGroup") != "" { + emptyGroup, err = strconv.ParseBool(r.URL.Query().Get("emptyGroup")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "emptyGroup", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + group := r.URL.Query().Get("group") + if emptyGroup && group != "" { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.OverlappingQueryParameter, + Message: exception.OverlappingQueryParameterMsg, + Params: map[string]interface{}{"param1": "emptyGroup", "param2": "group"}, + }) + return + } + + e.monitoringService.IncreaseBusinessMetricCounter(ctx.GetUserId(), metrics.ExportsCalled, packageId) + + exportOperationsRequestView := view.ExportOperationRequestView{ + Tags: tags, + TextFilter: textFilter, + Kind: kind, + RefPackageId: refPackageId, + EmptyTag: emptyTag, + EmptyGroup: emptyGroup, + Group: group, + ApiAudience: apiAudience, + } + deprecatedOperationsReport, versionName, err := e.excelService.ExportDeprecatedOperations(packageId, version, apiType, exportOperationsRequestView) + if err != nil { + log.Errorf("Excel error - %s", err.Error()) + RespondWithError(w, "Failed to export operations", err) + return + } + if deprecatedOperationsReport == nil { + log.Info("Deprecated operations are empty") + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.OperationsAreEmpty, + Message: exception.OperationsAreEmptyMsg, + }) + return + } + w.Header().Set("Content-Type", "application/octet-stream") + w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=DeprecatedOperations_%s_%s.xlsx", packageId, versionName)) + w.Header().Set("Content-Transfer-Encoding", "binary") + w.Header().Set("Expires", "0") + deprecatedOperationsReport.Write(w) +} diff --git a/qubership-apihub-service/controller/FileWSController.go b/qubership-apihub-service/controller/FileWSController.go new file mode 100644 index 0000000..4e4cc7d --- /dev/null +++ b/qubership-apihub-service/controller/FileWSController.go @@ -0,0 +1,172 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package controller + +import ( + "encoding/json" + "io/ioutil" + "net/http" + "net/url" + + "strings" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/google/uuid" + log "github.com/sirupsen/logrus" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" + ws "github.com/gorilla/websocket" +) + +type FileWSController interface { + ConnectToFile(w http.ResponseWriter, r *http.Request) + TestLogWebsocketClient(w http.ResponseWriter, r *http.Request) + TestGetWebsocketClientMessages(w http.ResponseWriter, r *http.Request) + TestSendMessageToWebsocket(w http.ResponseWriter, r *http.Request) +} + +func NewFileWSController(wsFileEditService service.WsFileEditService, wsLoadBalancer service.WsLoadBalancer, internalWebsocketService service.InternalWebsocketService) FileWSController { + return &fileWSControllerImpl{ + wsFileEditService: wsFileEditService, + wsLoadBalancer: wsLoadBalancer, + internalWebsocketService: internalWebsocketService, + } +} + +type fileWSControllerImpl struct { + wsFileEditService service.WsFileEditService + wsLoadBalancer service.WsLoadBalancer + internalWebsocketService service.InternalWebsocketService +} + +func (c fileWSControllerImpl) ConnectToFile(w http.ResponseWriter, r *http.Request) { + projectId := getStringParam(r, "projectId") + branchName, err := getUnescapedStringParam(r, "branchName") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "branchName"}, + Debug: err.Error(), + }) + return + } + fileId, err := getUnescapedStringParam(r, "fileId") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "fileId"}, + Debug: err.Error(), + }) + return + } + + srv, err := c.wsLoadBalancer.SelectWsServer(projectId, branchName, fileId) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.UnableToSelectWsServer, + Message: exception.UnableToSelectWsServerMsg, + Debug: err.Error(), + }) + return + } + + var upgrader = ws.Upgrader{ + //skip origin check + CheckOrigin: func(r *http.Request) bool { return true }, + } + websocket, err := upgrader.Upgrade(w, r, nil) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.ConnectionNotUpgraded, + Message: exception.ConnectionNotUpgradedMsg, + Debug: err.Error(), + }) + return + } + wsId := uuid.New().String() + if srv != service.LocalServer { + token := "?token=" + strings.TrimPrefix(r.Header.Get("Authorization"), "Bearer ") + c.wsLoadBalancer.RedirectWs("ws://"+srv+":8080/ws/v1/projects/"+projectId+ + "/branches/"+getStringParam(r, "branchName")+"/files/"+getStringParam(r, "fileId")+token, websocket, r.Header.Get("Sec-Websocket-Key")) + return + } + + err = c.wsFileEditService.ConnectToFileEditSession(context.Create(r), projectId, branchName, fileId, wsId, websocket) + if err != nil { + log.Error("Failed to ConnectToFileEditSession: ", err.Error()) + //don't send error response, it doesn't work on upgraded connection + return + } + //DO NOT ADD w.Write... since it's not suitable for websocket! +} + +func (c fileWSControllerImpl) TestLogWebsocketClient(w http.ResponseWriter, r *http.Request) { + projectId := r.URL.Query().Get("projectId") + branchName := url.PathEscape(r.URL.Query().Get("branchName")) + fileId := url.PathEscape(r.URL.Query().Get("fileId")) + token := r.URL.Query().Get("token") + + c.internalWebsocketService.LogIncomingFileMessages(r.Host, projectId, branchName, fileId, token) + w.WriteHeader(http.StatusOK) +} + +func (c fileWSControllerImpl) TestGetWebsocketClientMessages(w http.ResponseWriter, r *http.Request) { + projectId := r.URL.Query().Get("projectId") + branchName := url.PathEscape(r.URL.Query().Get("branchName")) + fileId := url.PathEscape(r.URL.Query().Get("fileId")) + + messages := c.internalWebsocketService.GetFileSessionLogs(projectId, branchName, fileId) + RespondWithJson(w, http.StatusOK, messages) +} + +func (c fileWSControllerImpl) TestSendMessageToWebsocket(w http.ResponseWriter, r *http.Request) { + projectId := r.URL.Query().Get("projectId") + branchName := url.PathEscape(r.URL.Query().Get("branchName")) + fileId := url.PathEscape(r.URL.Query().Get("fileId")) + token := r.URL.Query().Get("token") + + defer r.Body.Close() + body, err := ioutil.ReadAll(r.Body) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + var message interface{} + err = json.Unmarshal(body, &message) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + + c.internalWebsocketService.SendMessageToFileWebsocket(r.Host, projectId, branchName, fileId, token, message) + w.WriteHeader(http.StatusOK) +} diff --git a/qubership-apihub-service/controller/GitHookController.go b/qubership-apihub-service/controller/GitHookController.go new file mode 100644 index 0000000..180b8e0 --- /dev/null +++ b/qubership-apihub-service/controller/GitHookController.go @@ -0,0 +1,127 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package controller + +import ( + "encoding/json" + "io" + "io/ioutil" + "net/http" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + log "github.com/sirupsen/logrus" + "github.com/xanzy/go-gitlab" +) + +type GitHookController interface { + SetGitLabToken(w http.ResponseWriter, r *http.Request) + HandleEvent(w http.ResponseWriter, r *http.Request) +} + +func NewGitHookController(gitHookService service.GitHookService) GitHookController { + return &gitHookController{ + gitHookService: gitHookService, + } +} + +type gitHookController struct { + gitHookService service.GitHookService +} + +func (c gitHookController) SetGitLabToken(w http.ResponseWriter, r *http.Request) { + projectId := getStringParam(r, "projectId") + ctx := context.Create(r) + defer r.Body.Close() + body, err := ioutil.ReadAll(r.Body) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + var webhookIntegration view.GitLabWebhookIntegration + err = json.Unmarshal(body, &webhookIntegration) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + validationErr := utils.ValidateObject(webhookIntegration) + if validationErr != nil { + if customError, ok := validationErr.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + return + } + } + + err = c.gitHookService.SetGitLabToken(ctx, projectId, webhookIntegration.SecretToken) + if err != nil { + RespondWithError(w, "SetGitLabToken failed", err) + } else { + w.WriteHeader(http.StatusOK) + } +} + +func (c gitHookController) HandleEvent(w http.ResponseWriter, r *http.Request) { + payload, err := io.ReadAll(r.Body) + if err != nil || len(payload) == 0 { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + log.Debug("Received git hooks event: " + string(payload)) + + eventType := gitlab.HookEventType(r) + event, err := gitlab.ParseWebhook(eventType, payload) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + + secretToken := r.Header.Get("X-Gitlab-Token") + if len(secretToken) == 0 { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusUnauthorized, + Message: http.StatusText(http.StatusUnauthorized), + }) + } + + result, err := c.gitHookService.HandleGitLabEvent(eventType, event, secretToken) + if err != nil { + RespondWithError(w, "Handle event failed", err) + } else { + RespondWithJson(w, http.StatusOK, result) + } +} diff --git a/qubership-apihub-service/controller/GroupController.go b/qubership-apihub-service/controller/GroupController.go new file mode 100644 index 0000000..526b147 --- /dev/null +++ b/qubership-apihub-service/controller/GroupController.go @@ -0,0 +1,243 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package controller + +import ( + "encoding/json" + "io/ioutil" + "net/http" + "strconv" + "strings" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + log "github.com/sirupsen/logrus" +) + +type GroupController interface { + AddGroup(w http.ResponseWriter, r *http.Request) + GetAllGroups(w http.ResponseWriter, r *http.Request) + GetGroupInfo(w http.ResponseWriter, r *http.Request) + FavorGroup(w http.ResponseWriter, r *http.Request) + DisfavorGroup(w http.ResponseWriter, r *http.Request) +} + +func NewGroupController(service service.GroupService, publishedService service.PublishedService, roleService service.RoleService) GroupController { + return &groupControllerImpl{service: service, publishedService: publishedService, roleService: roleService} +} + +type groupControllerImpl struct { + service service.GroupService + publishedService service.PublishedService + roleService service.RoleService +} + +func (g groupControllerImpl) AddGroup(w http.ResponseWriter, r *http.Request) { + defer r.Body.Close() + body, err := ioutil.ReadAll(r.Body) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + var group view.Group + err = json.Unmarshal(body, &group) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + + ctx := context.Create(r) + var sufficientPrivileges bool + if group.ParentId == "" { + sufficientPrivileges = g.roleService.IsSysadm(ctx) + } else { + sufficientPrivileges, err = g.roleService.HasRequiredPermissions(ctx, group.ParentId, view.CreateAndUpdatePackagePermission) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + + validationErr := utils.ValidateObject(group) + if validationErr != nil { + if customError, ok := validationErr.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + return + } + } + + if !IsAcceptableAlias(group.Alias) { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.AliasContainsForbiddenChars, + Message: exception.AliasContainsForbiddenCharsMsg, + }) + return + } + + if !strings.Contains(group.ParentId, ".") && strings.ToLower(group.Alias) == "runenv" && !g.roleService.IsSysadm(ctx) { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.AliasContainsRunenvChars, + Message: exception.AliasContainsRunenvCharsMsg, + }) + return + } + + newGroup, err := g.service.AddGroup(context.Create(r), &group) + if err != nil { + log.Error("Failed to add group: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to add group", + Debug: err.Error()}) + } + return + } + RespondWithJson(w, http.StatusCreated, newGroup) +} + +func (g groupControllerImpl) GetAllGroups(w http.ResponseWriter, r *http.Request) { + depth := 1 + var err error + if r.URL.Query().Get("depth") != "" { + depth, err = strconv.Atoi(r.URL.Query().Get("depth")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "depth", "type": "int"}, + Debug: err.Error()}) + return + } + } + + onlyFavoriteStr := r.URL.Query().Get("onlyFavorite") + onlyFavorite := false + if onlyFavoriteStr != "" { + onlyFavorite, err = strconv.ParseBool(onlyFavoriteStr) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "onlyFavorite", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + + groupId := r.URL.Query().Get("groupId") + groupName := r.URL.Query().Get("name") + + groups, err := g.service.GetAllGroups(context.Create(r), depth, groupId, groupName, onlyFavorite) + if err != nil { + log.Error("Failed to get all groups: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to get all groups", + Debug: err.Error()}) + } + return + } + RespondWithJson(w, http.StatusOK, groups) +} + +func (g groupControllerImpl) GetGroupInfo(w http.ResponseWriter, r *http.Request) { + groupId := getStringParam(r, "groupId") + + groupInfo, err := g.service.GetGroupInfo(context.Create(r), groupId) + if err != nil { + log.Error("Failed to get group info: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to get group info", + Debug: err.Error()}) + } + return + } + RespondWithJson(w, http.StatusOK, groupInfo) +} + +func (g groupControllerImpl) FavorGroup(w http.ResponseWriter, r *http.Request) { + groupId := getStringParam(r, "groupId") + + err := g.service.FavorGroup(context.Create(r), groupId) + if err != nil { + log.Error("Failed to add group to favorites: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to add group to favorites", + Debug: err.Error()}) + } + return + } + w.WriteHeader(http.StatusOK) +} + +func (g groupControllerImpl) DisfavorGroup(w http.ResponseWriter, r *http.Request) { + groupId := getStringParam(r, "groupId") + + err := g.service.DisfavorGroup(context.Create(r), groupId) + if err != nil { + log.Error("Failed to remove group from favorites: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to remove group from favorites", + Debug: err.Error()}) + } + return + } + w.WriteHeader(http.StatusOK) +} diff --git a/qubership-apihub-service/controller/HealthController.go b/qubership-apihub-service/controller/HealthController.go new file mode 100644 index 0000000..5cc3d4e --- /dev/null +++ b/qubership-apihub-service/controller/HealthController.go @@ -0,0 +1,57 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package controller + +import ( + "net/http" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" +) + +type HealthController interface { + HandleReadyRequest(w http.ResponseWriter, r *http.Request) + HandleLiveRequest(w http.ResponseWriter, r *http.Request) +} + +func NewHealthController(readyChan chan bool) HealthController { + c := healthControllerImpl{ready: false} + utils.SafeAsync(func() { + c.watchReady(readyChan) + }) + return &c +} + +type healthControllerImpl struct { + ready bool +} + +func (h healthControllerImpl) HandleReadyRequest(w http.ResponseWriter, r *http.Request) { + if h.ready { + w.WriteHeader(http.StatusOK) // any code in (>=200 & <400) + return + } else { + w.WriteHeader(http.StatusNotFound) // any code >= 400 + } +} + +func (h healthControllerImpl) HandleLiveRequest(w http.ResponseWriter, r *http.Request) { + // Just return 200 at this moment + // TODO: but maybe need to check some internal status + w.WriteHeader(http.StatusOK) +} + +func (h *healthControllerImpl) watchReady(readyChan chan bool) { + h.ready = <-readyChan +} diff --git a/qubership-apihub-service/controller/IntegrationsController.go b/qubership-apihub-service/controller/IntegrationsController.go new file mode 100644 index 0000000..dae9eba --- /dev/null +++ b/qubership-apihub-service/controller/IntegrationsController.go @@ -0,0 +1,245 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package controller + +import ( + "encoding/json" + "io/ioutil" + "net/http" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + "github.com/shaj13/go-guardian/v2/auth" + log "github.com/sirupsen/logrus" +) + +type IntegrationsController interface { + GetUserApiKeyStatus(w http.ResponseWriter, r *http.Request) + SetUserApiKey(w http.ResponseWriter, r *http.Request) + ListRepositories(w http.ResponseWriter, r *http.Request) + ListBranchesAndTags(w http.ResponseWriter, r *http.Request) +} + +func NewIntegrationsController(service service.IntegrationsService) IntegrationsController { + return &integrationsControllerImpl{service: service} +} + +type integrationsControllerImpl struct { + service service.IntegrationsService +} + +func (c integrationsControllerImpl) GetUserApiKeyStatus(w http.ResponseWriter, r *http.Request) { + integration, err := view.GitIntegrationTypeFromStr(getStringParam(r, "integrationId")) + if err != nil { + log.Error("Failed to read integration type: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to get user api key status", + Debug: err.Error()}) + } + return + } + + user := auth.User(r) + userId := user.GetID() + if userId == "" { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.UserIdNotFound, + Message: exception.UserIdNotFoundMsg, + }) + return + } + + status, err := c.service.GetUserApiKeyStatus(integration, userId) + if err != nil { + log.Error("Failed to get user api key status: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to get user api key status", + Debug: err.Error()}) + } + return + } + + RespondWithJson(w, http.StatusOK, status) +} + +func (c integrationsControllerImpl) SetUserApiKey(w http.ResponseWriter, r *http.Request) { + integration, err := view.GitIntegrationTypeFromStr(getStringParam(r, "integrationId")) + if err != nil { + log.Error("Failed to read integration type: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to set user api key", + Debug: err.Error()}) + } + return + } + + defer r.Body.Close() + body, err := ioutil.ReadAll(r.Body) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + var request view.ApiKeyRequest + err = json.Unmarshal(body, &request) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + + if request.ApiKey == "" { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.EmptyParameter, + Message: exception.EmptyParameterMsg, + Params: map[string]interface{}{"param": "apikey"}, + }) + return + } + + user := auth.User(r) + userId := user.GetID() + if userId == "" { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.UserIdNotFound, + Message: exception.UserIdNotFoundMsg, + }) + return + } + + err = c.service.SetUserApiKey(integration, userId, request.ApiKey) + if err != nil { + log.Error("Failed to set api key: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to set api key", + Debug: err.Error()}) + } + return + } + //todo err is needed? err is always nil + status, err := c.service.GetUserApiKeyStatus(integration, userId) + if err != nil { + log.Error("Failed to get user api key status: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to set api key", + Debug: err.Error()}) + } + return + } + + RespondWithJson(w, http.StatusOK, status) +} + +func (c integrationsControllerImpl) ListRepositories(w http.ResponseWriter, r *http.Request) { + integration, err := view.GitIntegrationTypeFromStr(getStringParam(r, "integrationId")) + if err != nil { + log.Error("Failed to read integration type: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to list repositories", + Debug: err.Error()}) + } + return + } + + filter := r.URL.Query().Get("filter") + + repos, groups, err := c.service.ListRepositories(context.Create(r), integration, filter) + if err != nil { + log.Error("Failed to list repositories: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to list repositories", + Debug: err.Error()}) + } + return + } + + RespondWithJson(w, http.StatusOK, view.RepositoriesList{Repositories: repos, Groups: groups}) +} + +func (c integrationsControllerImpl) ListBranchesAndTags(w http.ResponseWriter, r *http.Request) { + filter := r.URL.Query().Get("filter") + integration, err := view.GitIntegrationTypeFromStr(getStringParam(r, "integrationId")) + if err != nil { + log.Error("Failed to read integration type: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to list branches", + Debug: err.Error()}) + } + return + } + + repoId := getStringParam(r, "repositoryId") + + branches, err := c.service.ListBranchesAndTags(context.Create(r), integration, repoId, filter) + if err != nil { + log.Error("Failed to list branches: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to list branches", + Debug: err.Error()}) + } + return + } + + RespondWithJson(w, http.StatusOK, branches) +} diff --git a/qubership-apihub-service/controller/LogsController.go b/qubership-apihub-service/controller/LogsController.go new file mode 100644 index 0000000..08cf3d0 --- /dev/null +++ b/qubership-apihub-service/controller/LogsController.go @@ -0,0 +1,135 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package controller + +import ( + "encoding/json" + "fmt" + "io/ioutil" + "net/http" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + log "github.com/sirupsen/logrus" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" +) + +type LogsController interface { + StoreLogs(w http.ResponseWriter, r *http.Request) + SetLogLevel(w http.ResponseWriter, r *http.Request) + CheckLogLevel(w http.ResponseWriter, r *http.Request) +} + +func NewLogsController(logsService service.LogsService, roleService service.RoleService) LogsController { + return &logsControllerImpl{ + logsService: logsService, + roleService: roleService, + } +} + +type logsControllerImpl struct { + logsService service.LogsService + roleService service.RoleService +} + +func (l logsControllerImpl) StoreLogs(w http.ResponseWriter, r *http.Request) { + defer r.Body.Close() + body, err := ioutil.ReadAll(r.Body) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + var obj map[string]interface{} + err = json.Unmarshal(body, &obj) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + l.logsService.StoreLogs(obj) + w.WriteHeader(http.StatusOK) +} + +func (l logsControllerImpl) SetLogLevel(w http.ResponseWriter, r *http.Request) { + defer r.Body.Close() + ctx := context.Create(r) + sufficientPrivileges := l.roleService.IsSysadm(ctx) + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + body, err := ioutil.ReadAll(r.Body) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + type SetLevelReq struct { + Level log.Level `json:"level"` + } + var req SetLevelReq + err = json.Unmarshal(body, &req) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + + log.SetLevel(req.Level) + log.Infof("Log level was set to %s", req.Level.String()) + w.WriteHeader(http.StatusOK) +} + +func (l logsControllerImpl) CheckLogLevel(w http.ResponseWriter, r *http.Request) { + ctx := context.Create(r) + sufficientPrivileges := l.roleService.IsSysadm(ctx) + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + + log.Error("Error level is enabled") + log.Warn("Warn level is enabled") + log.Info("Info level is enabled") + log.Debug("Debug level is enabled") + log.Trace("Trace level is enabled") + w.Write([]byte(fmt.Sprintf("Current log level is '%s'. See logs for details", log.GetLevel()))) +} diff --git a/qubership-apihub-service/controller/MinioStorageController.go b/qubership-apihub-service/controller/MinioStorageController.go new file mode 100644 index 0000000..6625801 --- /dev/null +++ b/qubership-apihub-service/controller/MinioStorageController.go @@ -0,0 +1,63 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package controller + +import ( + "net/http" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + log "github.com/sirupsen/logrus" +) + +type MinioStorageController interface { + DownloadFilesFromMinioToDatabase(w http.ResponseWriter, r *http.Request) +} + +func NewMinioStorageController(minioCreds *view.MinioStorageCreds, minioStorageService service.MinioStorageService) MinioStorageController { + return &minioStorageControllerImpl{ + minioStorageService: minioStorageService, + minioCreds: minioCreds, + } +} + +type minioStorageControllerImpl struct { + minioStorageService service.MinioStorageService + minioCreds *view.MinioStorageCreds +} + +func (m minioStorageControllerImpl) DownloadFilesFromMinioToDatabase(w http.ResponseWriter, r *http.Request) { + if !m.minioCreds.IsActive { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusMethodNotAllowed, + Message: "Minio integration is inactive. Please check envs for configuration"}) + return + } + err := m.minioStorageService.DownloadFilesFromBucketToDatabase() + if err != nil { + log.Error("Failed to download data from minio: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to download data from minio", + Debug: err.Error()}) + } + return + } + w.WriteHeader(http.StatusAccepted) +} diff --git a/qubership-apihub-service/controller/OperationController.go b/qubership-apihub-service/controller/OperationController.go new file mode 100644 index 0000000..a694135 --- /dev/null +++ b/qubership-apihub-service/controller/OperationController.go @@ -0,0 +1,1363 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package controller + +import ( + "net/http" + "net/url" + "strconv" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/metrics" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type OperationController interface { + GetOperationList(w http.ResponseWriter, r *http.Request) + GetOperation(w http.ResponseWriter, r *http.Request) + GetOperationsTags(w http.ResponseWriter, r *http.Request) + GetOperationChanges(w http.ResponseWriter, r *http.Request) + GetOperationsChanges_deprecated(w http.ResponseWriter, r *http.Request) + GetOperationsChanges(w http.ResponseWriter, r *http.Request) + GetDeprecatedOperationsList(w http.ResponseWriter, r *http.Request) + GetOperationDeprecatedItems(w http.ResponseWriter, r *http.Request) + GetDeprecatedOperationsSummary(w http.ResponseWriter, r *http.Request) + GetOperationModelUsages(w http.ResponseWriter, r *http.Request) +} + +func NewOperationController(roleService service.RoleService, + operationService service.OperationService, + buildService service.BuildService, + monitoringService service.MonitoringService, + ptHandler service.PackageTransitionHandler) OperationController { + return &operationControllerImpl{ + roleService: roleService, + operationService: operationService, + buildService: buildService, + monitoringService: monitoringService, + ptHandler: ptHandler, + } +} + +type operationControllerImpl struct { + roleService service.RoleService + operationService service.OperationService + buildService service.BuildService + monitoringService service.MonitoringService + ptHandler service.PackageTransitionHandler +} + +func (o operationControllerImpl) GetOperationList(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := o.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, o.ptHandler, packageId, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + + apiType, err := getUnescapedStringParam(r, "apiType") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "apiType"}, + Debug: err.Error(), + }) + return + } + + textFilter, err := url.QueryUnescape(r.URL.Query().Get("textFilter")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "textFilter"}, + Debug: err.Error(), + }) + return + } + + kind, err := url.QueryUnescape(r.URL.Query().Get("kind")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "kind"}, + Debug: err.Error(), + }) + return + } + apiAudience := r.URL.Query().Get("apiAudience") + if apiAudience == "all" { + apiAudience = "" + } + if apiAudience != "" && !view.ValidApiAudience(apiAudience) { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameterValue, + Message: exception.InvalidParameterValueMsg, + Params: map[string]interface{}{"param": "apiAudience", "value": apiAudience}, + }) + return + } + + tag, err := url.QueryUnescape(r.URL.Query().Get("tag")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "tag"}, + Debug: err.Error(), + }) + return + } + + limit, customError := getLimitQueryParam(r) + if customError != nil { + RespondWithCustomError(w, customError) + return + } + + page := 0 + if r.URL.Query().Get("page") != "" { + page, err = strconv.Atoi(r.URL.Query().Get("page")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "page", "type": "int"}, + Debug: err.Error(), + }) + return + } + } + + var deprecated *bool + deprecatedStr := r.URL.Query().Get("deprecated") + if deprecatedStr != "" { + deprecatedBool, err := strconv.ParseBool(deprecatedStr) + if err == nil { + deprecated = &deprecatedBool + } + } + + hashList, err := getListFromParam(r, "hashList") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "hashList"}, + Debug: err.Error(), + }) + return + } + + ids, err := getListFromParam(r, "ids") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "ids"}, + Debug: err.Error(), + }) + return + } + + includeData := false + if r.URL.Query().Get("includeData") != "" { + includeData, err = strconv.ParseBool(r.URL.Query().Get("includeData")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "includeData", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + + emptyTag := false + if r.URL.Query().Get("emptyTag") != "" { + emptyTag, err = strconv.ParseBool(r.URL.Query().Get("emptyTag")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "emptyTag", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + + if emptyTag { + tag = "" + } + + skipRefs := false + if r.URL.Query().Get("skipRefs") != "" { + skipRefs, err = strconv.ParseBool(r.URL.Query().Get("skipRefs")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "skipRefs", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + + documentSlug, err := url.QueryUnescape(r.URL.Query().Get("documentSlug")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "documentSlug"}, + Debug: err.Error(), + }) + return + } + + emptyGroup := false + if r.URL.Query().Get("emptyGroup") != "" { + emptyGroup, err = strconv.ParseBool(r.URL.Query().Get("emptyGroup")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "emptyGroup", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + group := r.URL.Query().Get("group") + if emptyGroup && group != "" { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.OverlappingQueryParameter, + Message: exception.OverlappingQueryParameterMsg, + Params: map[string]interface{}{"param1": "emptyGroup", "param2": "group"}, + }) + return + } + refPackageId, err := url.QueryUnescape(r.URL.Query().Get("refPackageId")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "refPackageId"}, + Debug: err.Error(), + }) + return + } + + restOperationListReq := view.OperationListReq{ + Deprecated: deprecated, + HashList: hashList, + Ids: ids, + IncludeData: includeData, + Kind: kind, + EmptyTag: emptyTag, + Tag: tag, + Limit: limit, + Page: page, + TextFilter: textFilter, + ApiType: apiType, + DocumentSlug: documentSlug, + EmptyGroup: emptyGroup, + Group: group, + RefPackageId: refPackageId, + ApiAudience: apiAudience, + } + + operations, err := o.operationService.GetOperations(packageId, versionName, skipRefs, restOperationListReq) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, o.ptHandler, packageId, "Failed to get operations", err) + return + } + RespondWithJson(w, http.StatusOK, operations) +} + +func (o operationControllerImpl) GetOperation(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := o.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, o.ptHandler, packageId, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + apiType, err := getUnescapedStringParam(r, "apiType") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "apiType"}, + Debug: err.Error(), + }) + return + } + operationId, err := getUnescapedStringParam(r, "operationId") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "operationId"}, + Debug: err.Error(), + }) + return + } + + o.monitoringService.AddOperationOpenCount(packageId, versionName, operationId) + + basicSearchFilter := view.OperationBasicSearchReq{ + PackageId: packageId, + Version: versionName, + ApiType: apiType, + OperationId: operationId, + } + + operation, err := o.operationService.GetOperation(basicSearchFilter) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, o.ptHandler, packageId, "Failed to get operation", err) + return + } + RespondWithJson(w, http.StatusOK, operation) +} + +func (o operationControllerImpl) GetOperationsTags(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := o.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, o.ptHandler, packageId, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + apiType, err := getUnescapedStringParam(r, "apiType") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "apiType"}, + Debug: err.Error(), + }) + return + } + limit, customError := getLimitQueryParamWithIncreasedMax(r) + if customError != nil { + RespondWithCustomError(w, customError) + return + } + page := 0 + if r.URL.Query().Get("page") != "" { + page, err = strconv.Atoi(r.URL.Query().Get("page")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "page", "type": "int"}, + Debug: err.Error(), + }) + return + } + } + textFilter, err := url.QueryUnescape(r.URL.Query().Get("textFilter")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "textFilter"}, + Debug: err.Error(), + }) + return + } + kind, err := url.QueryUnescape(r.URL.Query().Get("kind")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "kind"}, + Debug: err.Error(), + }) + return + } + apiAudience := r.URL.Query().Get("apiAudience") + if apiAudience == "all" { + apiAudience = "" + } + if apiAudience != "" && !view.ValidApiAudience(apiAudience) { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameterValue, + Message: exception.InvalidParameterValueMsg, + Params: map[string]interface{}{"param": "apiAudience", "value": apiAudience}, + }) + return + } + skipRefs := false + if r.URL.Query().Get("skipRefs") != "" { + skipRefs, err = strconv.ParseBool(r.URL.Query().Get("skipRefs")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "skipRefs", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + + basicSearchFilter := view.OperationBasicSearchReq{ + PackageId: packageId, + Version: versionName, + ApiType: apiType, + ApiKind: kind, + Limit: limit, + Offset: limit * page, + TextFilter: textFilter, + ApiAudience: apiAudience, + } + + tags, err := o.operationService.GetOperationsTags(basicSearchFilter, skipRefs) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, o.ptHandler, packageId, "Failed to get operations tags", err) + return + } + RespondWithJson(w, http.StatusOK, tags) +} + +func (o operationControllerImpl) GetOperationChanges(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := o.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, o.ptHandler, packageId, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + operationId, err := getUnescapedStringParam(r, "operationId") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "operationId"}, + Debug: err.Error(), + }) + return + } + + previousVersion := r.URL.Query().Get("previousVersion") + previousVersionPackageId := r.URL.Query().Get("previousVersionPackageId") + severities, err := getListFromParam(r, "severity") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "severity"}, + Debug: err.Error(), + }) + return + } + for _, severity := range severities { + if !view.ValidSeverity(severity) { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameterValue, + Message: exception.InvalidParameterValueMsg, + Params: map[string]interface{}{"param": "severity", "value": severity}, + }) + return + } + } + changes, err := o.operationService.GetOperationChanges(packageId, versionName, operationId, previousVersionPackageId, previousVersion, severities) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, o.ptHandler, packageId, "Failed to get operation changes", err) + return + } + RespondWithJson(w, http.StatusOK, changes) +} + +func (o operationControllerImpl) GetOperationsChanges_deprecated(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := o.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, o.ptHandler, packageId, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + apiType, err := getUnescapedStringParam(r, "apiType") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "apiType"}, + Debug: err.Error(), + }) + return + } + limit, customError := getLimitQueryParam(r) + if customError != nil { + RespondWithCustomError(w, customError) + return + } + page := 0 + if r.URL.Query().Get("page") != "" { + page, err = strconv.Atoi(r.URL.Query().Get("page")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "page", "type": "int"}, + Debug: err.Error(), + }) + return + } + } + textFilter, err := url.QueryUnescape(r.URL.Query().Get("textFilter")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "textFilter"}, + Debug: err.Error(), + }) + return + } + apiKind := r.URL.Query().Get("apiKind") + documentSlug := r.URL.Query().Get("documentSlug") + refPackageId := r.URL.Query().Get("refPackageId") + previousVersion := r.URL.Query().Get("previousVersion") + previousVersionPackageId := r.URL.Query().Get("previousVersionPackageId") + + emptyTag := false + emptyTagStr := r.URL.Query().Get("emptyTag") + if emptyTagStr != "" { + emptyTag, err = strconv.ParseBool(emptyTagStr) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "emptyTag", "type": "bool"}, + Debug: err.Error(), + }) + return + } + } + tags := make([]string, 0) + if !emptyTag { + tags, err = getListFromParam(r, "tag") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "tag"}, + Debug: err.Error(), + }) + return + } + } + emptyGroup := false + if r.URL.Query().Get("emptyGroup") != "" { + emptyGroup, err = strconv.ParseBool(r.URL.Query().Get("emptyGroup")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "emptyGroup", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + group := r.URL.Query().Get("group") + if emptyGroup && group != "" { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.OverlappingQueryParameter, + Message: exception.OverlappingQueryParameterMsg, + Params: map[string]interface{}{"param1": "emptyGroup", "param2": "group"}, + }) + return + } + + severities := make([]string, 0) + severities, err = getListFromParam(r, "severity") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "severity"}, + Debug: err.Error(), + }) + return + } + if len(severities) > 0 { + for _, severity := range severities { + if !view.ValidSeverity(severity) { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameterValue, + Message: exception.InvalidParameterValueMsg, + Params: map[string]interface{}{"param": "severity", "value": severity}, + }) + return + } + } + } + + versionChangesSearchReq := view.VersionChangesReq{ + PreviousVersion: previousVersion, + PreviousVersionPackageId: previousVersionPackageId, + DocumentSlug: documentSlug, + ApiKind: apiKind, + EmptyTag: emptyTag, + RefPackageId: refPackageId, + Tags: tags, + TextFilter: textFilter, + Limit: limit, + Offset: limit * page, + EmptyGroup: emptyGroup, + Group: group, + Severities: severities, + } + + changelog, err := o.operationService.GetVersionChanges_deprecated(packageId, versionName, apiType, versionChangesSearchReq) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, o.ptHandler, packageId, "Failed to get operations changelog", err) + return + } + RespondWithJson(w, http.StatusOK, changelog) +} + +func (o operationControllerImpl) GetOperationsChanges(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := o.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, o.ptHandler, packageId, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + apiType, err := getUnescapedStringParam(r, "apiType") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "apiType"}, + Debug: err.Error(), + }) + return + } + limit, customError := getLimitQueryParam(r) + if customError != nil { + RespondWithCustomError(w, customError) + return + } + page := 0 + if r.URL.Query().Get("page") != "" { + page, err = strconv.Atoi(r.URL.Query().Get("page")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "page", "type": "int"}, + Debug: err.Error(), + }) + return + } + } + textFilter, err := url.QueryUnescape(r.URL.Query().Get("textFilter")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "textFilter"}, + Debug: err.Error(), + }) + return + } + apiKind := r.URL.Query().Get("apiKind") + apiAudience := r.URL.Query().Get("apiAudience") + if apiAudience == "all" { + apiAudience = "" + } + if apiAudience != "" && !view.ValidApiAudience(apiAudience) { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameterValue, + Message: exception.InvalidParameterValueMsg, + Params: map[string]interface{}{"param": "apiAudience", "value": apiAudience}, + }) + return + } + documentSlug := r.URL.Query().Get("documentSlug") + refPackageId := r.URL.Query().Get("refPackageId") + previousVersion := r.URL.Query().Get("previousVersion") + previousVersionPackageId := r.URL.Query().Get("previousVersionPackageId") + + emptyTag := false + emptyTagStr := r.URL.Query().Get("emptyTag") + if emptyTagStr != "" { + emptyTag, err = strconv.ParseBool(emptyTagStr) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "emptyTag", "type": "bool"}, + Debug: err.Error(), + }) + return + } + } + tags := make([]string, 0) + if !emptyTag { + tags, err = getListFromParam(r, "tag") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "tag"}, + Debug: err.Error(), + }) + return + } + } + emptyGroup := false + if r.URL.Query().Get("emptyGroup") != "" { + emptyGroup, err = strconv.ParseBool(r.URL.Query().Get("emptyGroup")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "emptyGroup", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + group := r.URL.Query().Get("group") + if emptyGroup && group != "" { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.OverlappingQueryParameter, + Message: exception.OverlappingQueryParameterMsg, + Params: map[string]interface{}{"param1": "emptyGroup", "param2": "group"}, + }) + return + } + + severities := make([]string, 0) + severities, err = getListFromParam(r, "severity") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "severity"}, + Debug: err.Error(), + }) + return + } + if len(severities) > 0 { + for _, severity := range severities { + if !view.ValidSeverity(severity) { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameterValue, + Message: exception.InvalidParameterValueMsg, + Params: map[string]interface{}{"param": "severity", "value": severity}, + }) + return + } + } + } + + versionChangesSearchReq := view.VersionChangesReq{ + PreviousVersion: previousVersion, + PreviousVersionPackageId: previousVersionPackageId, + DocumentSlug: documentSlug, + ApiKind: apiKind, + EmptyTag: emptyTag, + RefPackageId: refPackageId, + Tags: tags, + TextFilter: textFilter, + Limit: limit, + Offset: limit * page, + EmptyGroup: emptyGroup, + Group: group, + Severities: severities, + ApiAudience: apiAudience, + } + + changelog, err := o.operationService.GetVersionChanges(packageId, versionName, apiType, versionChangesSearchReq) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, o.ptHandler, packageId, "Failed to get operations changelog", err) + return + } + RespondWithJson(w, http.StatusOK, changelog) +} + +func (o operationControllerImpl) GetDeprecatedOperationsList(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := o.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, o.ptHandler, packageId, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + + apiType, err := getUnescapedStringParam(r, "apiType") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "apiType"}, + Debug: err.Error(), + }) + return + } + + textFilter, err := url.QueryUnescape(r.URL.Query().Get("textFilter")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "textFilter"}, + Debug: err.Error(), + }) + return + } + + kind, err := url.QueryUnescape(r.URL.Query().Get("apiKind")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "apiKind"}, + Debug: err.Error(), + }) + return + } + apiAudience := r.URL.Query().Get("apiAudience") + if apiAudience == "all" { + apiAudience = "" + } + if apiAudience != "" && !view.ValidApiAudience(apiAudience) { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameterValue, + Message: exception.InvalidParameterValueMsg, + Params: map[string]interface{}{"param": "apiAudience", "value": apiAudience}, + }) + return + } + + tags := make([]string, 0) + tags, err = getListFromParam(r, "tag") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "tag"}, + Debug: err.Error(), + }) + return + } + + limit, customError := getLimitQueryParam(r) + if customError != nil { + RespondWithCustomError(w, customError) + return + } + + page := 0 + if r.URL.Query().Get("page") != "" { + page, err = strconv.Atoi(r.URL.Query().Get("page")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "page", "type": "int"}, + Debug: err.Error(), + }) + return + } + } + + ids, err := getListFromParam(r, "ids") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "ids"}, + Debug: err.Error(), + }) + return + } + + includeDeprecatedItems := false + if r.URL.Query().Get("includeDeprecatedItems") != "" { + includeDeprecatedItems, err = strconv.ParseBool(r.URL.Query().Get("includeDeprecatedItems")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "includeDeprecatedItems", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + + documentSlug, err := url.QueryUnescape(r.URL.Query().Get("documentSlug")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "documentSlug"}, + Debug: err.Error(), + }) + return + } + + refPackageId, err := url.QueryUnescape(r.URL.Query().Get("refPackageId")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "refPackageId"}, + Debug: err.Error(), + }) + return + } + + emptyTag := false + if r.URL.Query().Get("emptyTag") != "" { + emptyTag, err = strconv.ParseBool(r.URL.Query().Get("emptyTag")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "emptyTag", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + emptyGroup := false + if r.URL.Query().Get("emptyGroup") != "" { + emptyGroup, err = strconv.ParseBool(r.URL.Query().Get("emptyGroup")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "emptyGroup", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + group := r.URL.Query().Get("group") + if emptyGroup && group != "" { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.OverlappingQueryParameter, + Message: exception.OverlappingQueryParameterMsg, + Params: map[string]interface{}{"param1": "emptyGroup", "param2": "group"}, + }) + return + } + + o.monitoringService.IncreaseBusinessMetricCounter(ctx.GetUserId(), metrics.DeprecatedOperationsCalled, packageId) + + deprecatedOperationListReq := view.DeprecatedOperationListReq{ + Ids: ids, + Kind: kind, + Tags: tags, + Limit: limit, + Page: page, + TextFilter: textFilter, + ApiType: apiType, + DocumentSlug: documentSlug, + IncludeDeprecatedItems: includeDeprecatedItems, + RefPackageId: refPackageId, + EmptyTag: emptyTag, + EmptyGroup: emptyGroup, + Group: group, + ApiAudience: apiAudience, + } + + operations, err := o.operationService.GetDeprecatedOperations(packageId, versionName, deprecatedOperationListReq) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, o.ptHandler, packageId, "Failed to get operations", err) + return + } + RespondWithJson(w, http.StatusOK, operations) +} + +func (o operationControllerImpl) GetOperationDeprecatedItems(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := o.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, o.ptHandler, packageId, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + + apiType, err := getUnescapedStringParam(r, "apiType") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "apiType"}, + Debug: err.Error(), + }) + return + } + operationId, err := getUnescapedStringParam(r, "operationId") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "operationId"}, + Debug: err.Error(), + }) + return + } + + o.monitoringService.AddOperationOpenCount(packageId, versionName, operationId) + + basicSearchFilter := view.OperationBasicSearchReq{ + PackageId: packageId, + Version: versionName, + ApiType: apiType, + OperationId: operationId, + } + + operationDeprecatedItems, err := o.operationService.GetOperationDeprecatedItems(basicSearchFilter) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, o.ptHandler, packageId, "Failed to get operation deprecated items", err) + return + } + RespondWithJson(w, http.StatusOK, operationDeprecatedItems) +} + +func (o operationControllerImpl) GetDeprecatedOperationsSummary(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := o.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, o.ptHandler, packageId, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + deprecatedOperationsSummary, err := o.operationService.GetDeprecatedOperationsSummary(packageId, versionName) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, o.ptHandler, packageId, "Failed to get operation deprecated summary", err) + return + } + RespondWithJson(w, http.StatusOK, deprecatedOperationsSummary) + +} + +func (o operationControllerImpl) GetOperationModelUsages(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := o.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, o.ptHandler, packageId, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + version, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + apiType, err := getUnescapedStringParam(r, "apiType") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "apiType"}, + Debug: err.Error(), + }) + return + } + operationId, err := getUnescapedStringParam(r, "operationId") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "operationId"}, + Debug: err.Error(), + }) + return + } + modelName, err := getUnescapedStringParam(r, "modelName") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "modelName"}, + Debug: err.Error(), + }) + return + } + modelUsages, err := o.operationService.GetOperationModelUsages(packageId, version, apiType, operationId, modelName) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, o.ptHandler, packageId, "Failed to get operation model usages", err) + return + } + RespondWithJson(w, http.StatusOK, modelUsages) +} diff --git a/qubership-apihub-service/controller/OperationGroupController.go b/qubership-apihub-service/controller/OperationGroupController.go new file mode 100644 index 0000000..bb2f01a --- /dev/null +++ b/qubership-apihub-service/controller/OperationGroupController.go @@ -0,0 +1,1312 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package controller + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "strconv" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + log "github.com/sirupsen/logrus" +) + +type OperationGroupController interface { + GetGroupedOperations(w http.ResponseWriter, r *http.Request) + GetGroupedOperationGhosts_deprecated(w http.ResponseWriter, r *http.Request) + CreateOperationGroup_deprecated(w http.ResponseWriter, r *http.Request) + CreateOperationGroup(w http.ResponseWriter, r *http.Request) + DeleteOperationGroup(w http.ResponseWriter, r *http.Request) + ReplaceOperationGroup_deprecated(w http.ResponseWriter, r *http.Request) + ReplaceOperationGroup(w http.ResponseWriter, r *http.Request) + UpdateOperationGroup_deprecated(w http.ResponseWriter, r *http.Request) + UpdateOperationGroup(w http.ResponseWriter, r *http.Request) + GetGroupExportTemplate(w http.ResponseWriter, r *http.Request) + StartOperationGroupPublish(w http.ResponseWriter, r *http.Request) + GetOperationGroupPublishStatus(w http.ResponseWriter, r *http.Request) +} + +func NewOperationGroupController(roleService service.RoleService, operationGroupService service.OperationGroupService, versionService service.VersionService) OperationGroupController { + return &operationGroupControllerImpl{ + roleService: roleService, + operationGroupService: operationGroupService, + versionService: versionService, + } +} + +type operationGroupControllerImpl struct { + roleService service.RoleService + operationGroupService service.OperationGroupService + versionService service.VersionService +} + +func (o operationGroupControllerImpl) GetGroupedOperations(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := o.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + apiType, err := getUnescapedStringParam(r, "apiType") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "apiType"}, + Debug: err.Error(), + }) + return + } + _, err = view.ParseApiType(apiType) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameterValue, + Message: exception.InvalidParameterValueMsg, + Params: map[string]interface{}{"param": "apiType", "value": apiType}, + Debug: err.Error(), + }) + return + } + groupName, err := getUnescapedStringParam(r, "groupName") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "groupName"}, + Debug: err.Error(), + }) + return + } + + textFilter, err := url.QueryUnescape(r.URL.Query().Get("textFilter")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "textFilter"}, + Debug: err.Error(), + }) + return + } + + kind, err := url.QueryUnescape(r.URL.Query().Get("kind")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "kind"}, + Debug: err.Error(), + }) + return + } + apiAudience := r.URL.Query().Get("apiAudience") + if apiAudience == "all" { + apiAudience = "" + } + if apiAudience != "" && !view.ValidApiAudience(apiAudience) { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameterValue, + Message: exception.InvalidParameterValueMsg, + Params: map[string]interface{}{"param": "apiAudience", "value": apiAudience}, + }) + return + } + tag, err := url.QueryUnescape(r.URL.Query().Get("tag")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "tag"}, + Debug: err.Error(), + }) + return + } + + limit, customError := getLimitQueryParamWithIncreasedMax(r) + if customError != nil { + RespondWithCustomError(w, customError) + return + } + + page := 0 + if r.URL.Query().Get("page") != "" { + page, err = strconv.Atoi(r.URL.Query().Get("page")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "page", "type": "int"}, + Debug: err.Error(), + }) + return + } + } + var deprecated *bool + deprecatedStr := r.URL.Query().Get("deprecated") + if deprecatedStr != "" { + deprecatedBool, err := strconv.ParseBool(deprecatedStr) + if err == nil { + deprecated = &deprecatedBool + } + } + + emptyTag := false + if r.URL.Query().Get("emptyTag") != "" { + emptyTag, err = strconv.ParseBool(r.URL.Query().Get("emptyTag")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "emptyTag", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + if emptyTag { + tag = "" + } + + documentSlug, err := url.QueryUnescape(r.URL.Query().Get("documentSlug")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "documentSlug"}, + Debug: err.Error(), + }) + return + } + + refPackageId, err := url.QueryUnescape(r.URL.Query().Get("refPackageId")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "refPackageId"}, + Debug: err.Error(), + }) + return + } + onlyAddable := false + if r.URL.Query().Get("onlyAddable") != "" { + onlyAddable, err = strconv.ParseBool(r.URL.Query().Get("onlyAddable")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "onlyAddable", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + + groupedOperationListReq := view.OperationListReq{ + Deprecated: deprecated, + Kind: kind, + EmptyTag: emptyTag, + Tag: tag, + Limit: limit, + Page: page, + TextFilter: textFilter, + ApiType: apiType, + DocumentSlug: documentSlug, + RefPackageId: refPackageId, + OnlyAddable: onlyAddable, + ApiAudience: apiAudience, + } + + groupedOperations, err := o.operationGroupService.GetGroupedOperations(packageId, versionName, apiType, groupName, groupedOperationListReq) + if err != nil { + RespondWithError(w, "Failed to get operations from group", err) + return + } + RespondWithJson(w, http.StatusOK, groupedOperations) +} + +func (o operationGroupControllerImpl) GetGroupedOperationGhosts_deprecated(w http.ResponseWriter, r *http.Request) { + RespondWithJson(w, http.StatusOK, view.GroupedGhostOperations_deprecated{ + GhostOperations: []interface{}{}, + Packages: map[string]view.PackageVersionRef{}, + }) +} + +func (o operationGroupControllerImpl) CreateOperationGroup_deprecated(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + apiType, err := getUnescapedStringParam(r, "apiType") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "apiType"}, + Debug: err.Error(), + }) + return + } + _, err = view.ParseApiType(apiType) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameterValue, + Message: exception.InvalidParameterValueMsg, + Params: map[string]interface{}{"param": "apiType", "value": apiType}, + Debug: err.Error(), + }) + return + } + + versionStatus, err := o.versionService.GetVersionStatus(packageId, versionName) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + sufficientPrivileges, err := o.roleService.HasManageVersionPermission(ctx, packageId, versionStatus) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + + defer r.Body.Close() + body, err := io.ReadAll(r.Body) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + var createOperationGroupReq view.CreateOperationGroupReq_deprecated + err = json.Unmarshal(body, &createOperationGroupReq) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + validationErr := utils.ValidateObject(createOperationGroupReq) + if validationErr != nil { + if customError, ok := validationErr.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + return + } + } + + err = o.operationGroupService.CreateOperationGroup_deprecated(packageId, versionName, apiType, createOperationGroupReq) + if err != nil { + RespondWithError(w, "Failed to create operation group", err) + return + } + w.WriteHeader(http.StatusCreated) +} + +func (o operationGroupControllerImpl) CreateOperationGroup(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + apiType, err := getUnescapedStringParam(r, "apiType") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "apiType"}, + Debug: err.Error(), + }) + return + } + _, err = view.ParseApiType(apiType) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameterValue, + Message: exception.InvalidParameterValueMsg, + Params: map[string]interface{}{"param": "apiType", "value": apiType}, + Debug: err.Error(), + }) + return + } + + versionStatus, err := o.versionService.GetVersionStatus(packageId, versionName) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + sufficientPrivileges, err := o.roleService.HasManageVersionPermission(ctx, packageId, versionStatus) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + + err = r.ParseMultipartForm(0) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + defer func() { + err := r.MultipartForm.RemoveAll() + if err != nil { + log.Debugf("failed to remove temporal data: %+v", err) + } + }() + createOperationGroupReq := view.CreateOperationGroupReq{} + createOperationGroupReq.GroupName = r.FormValue("groupName") + createOperationGroupReq.Description = r.FormValue("description") + template, templateFileHeader, err := r.FormFile("template") + if err != http.ErrMissingFile { + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectMultipartFile, + Message: exception.IncorrectMultipartFileMsg, + Debug: err.Error()}) + return + } + templateData, err := io.ReadAll(template) + closeErr := template.Close() + if closeErr != nil { + log.Debugf("failed to close temporal file: %+v", err) + } + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectMultipartFile, + Message: exception.IncorrectMultipartFileMsg, + Debug: err.Error()}) + return + } + createOperationGroupReq.Template = templateData + createOperationGroupReq.TemplateFilename = templateFileHeader.Filename + } else if r.FormValue("template") != "" { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidGroupExportTemplateType, + Message: exception.InvalidGroupExportTemplateTypeMsg, + }) + return + } + + validationErr := utils.ValidateObject(createOperationGroupReq) + if validationErr != nil { + if customError, ok := validationErr.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + return + } + } + + err = o.operationGroupService.CreateOperationGroup(ctx, packageId, versionName, apiType, createOperationGroupReq) + if err != nil { + RespondWithError(w, "Failed to create operation group", err) + return + } + w.WriteHeader(http.StatusCreated) +} + +func (o operationGroupControllerImpl) DeleteOperationGroup(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + apiType, err := getUnescapedStringParam(r, "apiType") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "apiType"}, + Debug: err.Error(), + }) + return + } + _, err = view.ParseApiType(apiType) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameterValue, + Message: exception.InvalidParameterValueMsg, + Params: map[string]interface{}{"param": "apiType", "value": apiType}, + Debug: err.Error(), + }) + return + } + groupName, err := getUnescapedStringParam(r, "groupName") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "groupName"}, + Debug: err.Error(), + }) + return + } + + versionStatus, err := o.versionService.GetVersionStatus(packageId, versionName) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + sufficientPrivileges, err := o.roleService.HasManageVersionPermission(ctx, packageId, versionStatus) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + + err = o.operationGroupService.DeleteOperationGroup(ctx, packageId, versionName, apiType, groupName) + if err != nil { + RespondWithError(w, "Failed to delete operation group", err) + return + } + w.WriteHeader(http.StatusNoContent) +} + +func (o operationGroupControllerImpl) ReplaceOperationGroup_deprecated(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + apiType, err := getUnescapedStringParam(r, "apiType") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "apiType"}, + Debug: err.Error(), + }) + return + } + _, err = view.ParseApiType(apiType) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameterValue, + Message: exception.InvalidParameterValueMsg, + Params: map[string]interface{}{"param": "apiType", "value": apiType}, + Debug: err.Error(), + }) + return + } + groupName, err := getUnescapedStringParam(r, "groupName") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "groupName"}, + Debug: err.Error(), + }) + return + } + + versionStatus, err := o.versionService.GetVersionStatus(packageId, versionName) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + sufficientPrivileges, err := o.roleService.HasManageVersionPermission(ctx, packageId, versionStatus) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + + defer r.Body.Close() + body, err := io.ReadAll(r.Body) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + var replaceOperationGroupReq view.ReplaceOperationGroupReq_deprecated + err = json.Unmarshal(body, &replaceOperationGroupReq) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + validationErr := utils.ValidateObject(replaceOperationGroupReq) + if validationErr != nil { + if customError, ok := validationErr.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + return + } + } + + err = o.operationGroupService.ReplaceOperationGroup_deprecated(packageId, versionName, apiType, groupName, replaceOperationGroupReq) + if err != nil { + RespondWithError(w, "Failed to update operation group", err) + return + } + w.WriteHeader(http.StatusOK) +} + +func (o operationGroupControllerImpl) ReplaceOperationGroup(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + apiType, err := getUnescapedStringParam(r, "apiType") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "apiType"}, + Debug: err.Error(), + }) + return + } + _, err = view.ParseApiType(apiType) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameterValue, + Message: exception.InvalidParameterValueMsg, + Params: map[string]interface{}{"param": "apiType", "value": apiType}, + Debug: err.Error(), + }) + return + } + groupName, err := getUnescapedStringParam(r, "groupName") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "groupName"}, + Debug: err.Error(), + }) + return + } + + versionStatus, err := o.versionService.GetVersionStatus(packageId, versionName) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + sufficientPrivileges, err := o.roleService.HasManageVersionPermission(ctx, packageId, versionStatus) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + + err = r.ParseMultipartForm(0) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + defer func() { + err := r.MultipartForm.RemoveAll() + if err != nil { + log.Debugf("failed to remove temporal data: %+v", err) + } + }() + replaceOperationGroupReq := view.ReplaceOperationGroupReq{} + replaceOperationGroupReq.GroupName = r.FormValue("groupName") + replaceOperationGroupReq.Description = r.FormValue("description") + template, templateFileHeader, err := r.FormFile("template") + if err != http.ErrMissingFile { + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectMultipartFile, + Message: exception.IncorrectMultipartFileMsg, + Debug: err.Error()}) + return + } + templateData, err := io.ReadAll(template) + closeErr := template.Close() + if closeErr != nil { + log.Debugf("failed to close temporal file: %+v", err) + } + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectMultipartFile, + Message: exception.IncorrectMultipartFileMsg, + Debug: err.Error()}) + return + } + replaceOperationGroupReq.Template = templateData + replaceOperationGroupReq.TemplateFilename = templateFileHeader.Filename + } else if r.FormValue("template") != "" { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidGroupExportTemplateType, + Message: exception.InvalidGroupExportTemplateTypeMsg, + }) + return + } + operationsArrStr := r.FormValue("operations") + if operationsArrStr != "" { + err = json.Unmarshal([]byte(operationsArrStr), &replaceOperationGroupReq.Operations) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: fmt.Sprintf("failed to unmarshal operations field: %v", err.Error()), + }) + return + } + } + validationErr := utils.ValidateObject(replaceOperationGroupReq) + if validationErr != nil { + if customError, ok := validationErr.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + return + } + } + err = o.operationGroupService.ReplaceOperationGroup(ctx, packageId, versionName, apiType, groupName, replaceOperationGroupReq) + if err != nil { + RespondWithError(w, "Failed to update operation group", err) + return + } + w.WriteHeader(http.StatusOK) +} + +func (o operationGroupControllerImpl) UpdateOperationGroup_deprecated(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + apiType, err := getUnescapedStringParam(r, "apiType") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "apiType"}, + Debug: err.Error(), + }) + return + } + _, err = view.ParseApiType(apiType) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameterValue, + Message: exception.InvalidParameterValueMsg, + Params: map[string]interface{}{"param": "apiType", "value": apiType}, + Debug: err.Error(), + }) + return + } + groupName, err := getUnescapedStringParam(r, "groupName") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "groupName"}, + Debug: err.Error(), + }) + return + } + + versionStatus, err := o.versionService.GetVersionStatus(packageId, versionName) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + sufficientPrivileges, err := o.roleService.HasManageVersionPermission(ctx, packageId, versionStatus) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + + defer r.Body.Close() + body, err := io.ReadAll(r.Body) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + var updateOperationGroupReq view.UpdateOperationGroupReq_deprecated + err = json.Unmarshal(body, &updateOperationGroupReq) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + + err = o.operationGroupService.UpdateOperationGroup_deprecated(packageId, versionName, apiType, groupName, updateOperationGroupReq) + if err != nil { + RespondWithError(w, "Failed to update operation group", err) + return + } + w.WriteHeader(http.StatusNoContent) +} + +func (o operationGroupControllerImpl) UpdateOperationGroup(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + apiType, err := getUnescapedStringParam(r, "apiType") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "apiType"}, + Debug: err.Error(), + }) + return + } + _, err = view.ParseApiType(apiType) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameterValue, + Message: exception.InvalidParameterValueMsg, + Params: map[string]interface{}{"param": "apiType", "value": apiType}, + Debug: err.Error(), + }) + return + } + groupName, err := getUnescapedStringParam(r, "groupName") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "groupName"}, + Debug: err.Error(), + }) + return + } + + versionStatus, err := o.versionService.GetVersionStatus(packageId, versionName) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + sufficientPrivileges, err := o.roleService.HasManageVersionPermission(ctx, packageId, versionStatus) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + + err = r.ParseMultipartForm(0) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + defer func() { + err := r.MultipartForm.RemoveAll() + if err != nil { + log.Debugf("failed to remove temporal data: %+v", err) + } + }() + updateOperationGroupReq := view.UpdateOperationGroupReq{} + newGroupName := r.FormValue("groupName") + if newGroupName != "" { + updateOperationGroupReq.GroupName = &newGroupName + } + description := r.FormValue("description") + if description != "" { + updateOperationGroupReq.Description = &description + } + template, templateFileHeader, err := r.FormFile("template") + if err != http.ErrMissingFile { + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectMultipartFile, + Message: exception.IncorrectMultipartFileMsg, + Debug: err.Error()}) + return + } + templateData, err := io.ReadAll(template) + closeErr := template.Close() + if closeErr != nil { + log.Debugf("failed to close temporal file: %+v", err) + } + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectMultipartFile, + Message: exception.IncorrectMultipartFileMsg, + Debug: err.Error()}) + return + } + updateOperationGroupReq.Template = &view.OperationGroupTemplate{ + TemplateData: templateData, + TemplateFilename: templateFileHeader.Filename, + } + } else if r.Form.Has("template") { + if r.FormValue("template") != "" { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidGroupExportTemplateType, + Message: exception.InvalidGroupExportTemplateTypeMsg, + }) + return + } + updateOperationGroupReq.Template = &view.OperationGroupTemplate{ + TemplateData: nil, + TemplateFilename: "", + } + } + operationsArrStr := r.FormValue("operations") + if operationsArrStr != "" { + var operations []view.GroupOperations + err = json.Unmarshal([]byte(operationsArrStr), &operations) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: fmt.Sprintf("failed to unmarshal operations field: %v", err.Error()), + }) + return + } + updateOperationGroupReq.Operations = &operations + } + validationErr := utils.ValidateObject(updateOperationGroupReq) + if validationErr != nil { + if customError, ok := validationErr.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + return + } + } + + err = o.operationGroupService.UpdateOperationGroup(ctx, packageId, versionName, apiType, groupName, updateOperationGroupReq) + if err != nil { + RespondWithError(w, "Failed to update operation group", err) + return + } + w.WriteHeader(http.StatusNoContent) +} + +func (o operationGroupControllerImpl) GetGroupExportTemplate(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + apiType, err := getUnescapedStringParam(r, "apiType") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "apiType"}, + Debug: err.Error(), + }) + return + } + _, err = view.ParseApiType(apiType) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameterValue, + Message: exception.InvalidParameterValueMsg, + Params: map[string]interface{}{"param": "apiType", "value": apiType}, + Debug: err.Error(), + }) + return + } + groupName, err := getUnescapedStringParam(r, "groupName") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "groupName"}, + Debug: err.Error(), + }) + return + } + + sufficientPrivileges, err := o.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + + template, templateFilename, err := o.operationGroupService.GetOperationGroupExportTemplate(packageId, versionName, apiType, groupName) + if err != nil { + RespondWithError(w, "Failed to get group export template", err) + return + } + w.Header().Set("Content-Type", "application/octet-stream") + w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=%v", templateFilename)) + w.WriteHeader(http.StatusOK) + w.Write(template) +} + +func (o operationGroupControllerImpl) StartOperationGroupPublish(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + version, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + apiType, err := getUnescapedStringParam(r, "apiType") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "apiType"}, + Debug: err.Error(), + }) + return + } + //todo add support for different apiTypes when reducedSourceSpecifications is supported for them + if apiType != string(view.RestApiType) { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.UnsupportedApiType, + Message: exception.UnsupportedApiTypeMsg, + Params: map[string]interface{}{"param": "apiType", "value": apiType}, + }) + return + } + + groupName, err := getUnescapedStringParam(r, "groupName") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "groupName"}, + Debug: err.Error(), + }) + return + } + ctx := context.Create(r) + sufficientPrivileges, err := o.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + + defer r.Body.Close() + body, err := io.ReadAll(r.Body) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + var req view.OperationGroupPublishReq + err = json.Unmarshal(body, &req) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + validationErr := utils.ValidateObject(req) + if validationErr != nil { + if customError, ok := validationErr.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + return + } + } + _, err = view.ParseVersionStatus(req.Status) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameter, + Message: err.Error(), + }) + return + } + sufficientPrivileges, err = o.roleService.HasManageVersionPermission(ctx, req.PackageId, req.Status) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + + publishId, err := o.operationGroupService.StartOperationGroupPublish(ctx, packageId, version, apiType, groupName, req) + if err != nil { + RespondWithError(w, "Failed to start operation group publish process", err) + return + } + RespondWithJson(w, http.StatusAccepted, view.OperationGroupPublishResp{PublishId: publishId}) +} + +func (o operationGroupControllerImpl) GetOperationGroupPublishStatus(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + publishId := getStringParam(r, "publishId") + ctx := context.Create(r) + sufficientPrivileges, err := o.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + + publishStatus, err := o.operationGroupService.GetOperationGroupPublishStatus(publishId) + if err != nil { + RespondWithError(w, "Failed to get operation group publish status", err) + return + } + RespondWithJson(w, http.StatusOK, publishStatus) +} diff --git a/qubership-apihub-service/controller/PackageController.go b/qubership-apihub-service/controller/PackageController.go new file mode 100644 index 0000000..68dcee8 --- /dev/null +++ b/qubership-apihub-service/controller/PackageController.go @@ -0,0 +1,550 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package controller + +import ( + "encoding/json" + "io/ioutil" + "net/http" + "net/url" + "strconv" + "strings" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/metrics" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type PackageController interface { + UpdatePackage(w http.ResponseWriter, r *http.Request) + CreatePackage(w http.ResponseWriter, r *http.Request) + DeletePackage(w http.ResponseWriter, r *http.Request) + DisfavorPackage(w http.ResponseWriter, r *http.Request) + FavorPackage(w http.ResponseWriter, r *http.Request) + GetPackage(w http.ResponseWriter, r *http.Request) + GetPackageStatus(w http.ResponseWriter, r *http.Request) + GetPackagesList(w http.ResponseWriter, r *http.Request) + GetAvailableVersionStatusesForPublish(w http.ResponseWriter, r *http.Request) + RecalculateOperationGroups(w http.ResponseWriter, r *http.Request) + CalculateOperationGroups(w http.ResponseWriter, r *http.Request) +} + +func NewPackageController(packageService service.PackageService, + versionService service.PublishedService, + portalService service.PortalService, + searchService service.SearchService, + roleService service.RoleService, + monitoringService service.MonitoringService, + ptHandler service.PackageTransitionHandler) PackageController { + return &packageControllerImpl{ + publishedService: versionService, + portalService: portalService, + searchService: searchService, + packageService: packageService, + roleService: roleService, + monitoringService: monitoringService, + ptHandler: ptHandler, + } +} + +type packageControllerImpl struct { + publishedService service.PublishedService + portalService service.PortalService + searchService service.SearchService + packageService service.PackageService + roleService service.RoleService + monitoringService service.MonitoringService + ptHandler service.PackageTransitionHandler +} + +func (p packageControllerImpl) DeletePackage(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := p.roleService.HasRequiredPermissions(ctx, packageId, view.DeletePackagePermission) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, p.ptHandler, packageId, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + err = p.packageService.DeletePackage(ctx, packageId) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, p.ptHandler, packageId, "Failed to delete package", err) + return + } + w.WriteHeader(http.StatusNoContent) +} + +func (p packageControllerImpl) DisfavorPackage(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := p.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, p.ptHandler, packageId, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + err = p.packageService.DisfavorPackage(ctx, packageId) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, p.ptHandler, packageId, "Failed to remove group from favorites", err) + return + } + w.WriteHeader(http.StatusNoContent) +} + +func (p packageControllerImpl) FavorPackage(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := p.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, p.ptHandler, packageId, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + err = p.packageService.FavorPackage(ctx, packageId) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, p.ptHandler, packageId, "Failed to add package to favorites", err) + return + } + + w.WriteHeader(http.StatusNoContent) +} + +func (p packageControllerImpl) GetPackage(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + + sufficientPrivileges, err := p.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, p.ptHandler, packageId, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + showParentsString := r.URL.Query().Get("showParents") + showParents, err := strconv.ParseBool(showParentsString) + + packageInfo, err := p.packageService.GetPackage(ctx, packageId, showParents) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, p.ptHandler, packageId, "Failed to get package info", err) + return + } + RespondWithJson(w, http.StatusOK, packageInfo) +} + +func (p packageControllerImpl) GetPackageStatus(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := p.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, p.ptHandler, packageId, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + packageStatus, err := p.packageService.GetPackageStatus(packageId) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, p.ptHandler, packageId, "Failed to get package status", err) + return + } + RespondWithJson(w, http.StatusOK, packageStatus) +} + +func (p packageControllerImpl) GetPackagesList(w http.ResponseWriter, r *http.Request) { + var err error + filter := r.URL.Query().Get("textFilter") + parentId := r.URL.Query().Get("parentId") + kind, err := getListFromParam(r, "kind") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "kind"}, + Debug: err.Error(), + }) + return + } + onlyFavorite := false + if r.URL.Query().Get("onlyFavorite") != "" { + onlyFavorite, err = strconv.ParseBool(r.URL.Query().Get("onlyFavorite")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "onlyFavorite", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + + onlyShared := false + if r.URL.Query().Get("onlyShared") != "" { + onlyShared, err = strconv.ParseBool(r.URL.Query().Get("onlyShared")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "onlyShared", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + + showParents := false + if r.URL.Query().Get("showParents") != "" { + showParents, err = strconv.ParseBool(r.URL.Query().Get("showParents")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "showParents", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + + lastReleaseVersionDetails := false + if r.URL.Query().Get("lastReleaseVersionDetails") != "" { + lastReleaseVersionDetails, err = strconv.ParseBool(r.URL.Query().Get("lastReleaseVersionDetails")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "lastReleaseVersionDetails", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + + limit, customError := getLimitQueryParam(r) + if customError != nil { + RespondWithCustomError(w, customError) + return + } + + page := 0 + if r.URL.Query().Get("page") != "" { + page, err = strconv.Atoi(r.URL.Query().Get("page")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "page", "type": "int"}, + Debug: err.Error(), + }) + return + } + } + serviceName := r.URL.Query().Get("serviceName") + + showAllDescendants := false + if r.URL.Query().Get("showAllDescendants") != "" { + showAllDescendants, err = strconv.ParseBool(r.URL.Query().Get("showAllDescendants")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "showAllDescendants", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + + packageListReq := view.PackageListReq{ + Kind: kind, + Limit: limit, + OnlyFavorite: onlyFavorite, + OnlyShared: onlyShared, + Offset: limit * page, + ParentId: parentId, + ShowParents: showParents, + TextFilter: filter, + LastReleaseVersionDetails: lastReleaseVersionDetails, + ServiceName: serviceName, + ShowAllDescendants: showAllDescendants, + } + + packages, err := p.packageService.GetPackagesList(context.Create(r), packageListReq) + + if err != nil { + RespondWithError(w, "Failed to get packages", err) + return + } + RespondWithJson(w, http.StatusOK, packages) +} + +func (p packageControllerImpl) CreatePackage(w http.ResponseWriter, r *http.Request) { + defer r.Body.Close() + body, err := ioutil.ReadAll(r.Body) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + var packg view.SimplePackage + err = json.Unmarshal(body, &packg) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + validationErr := utils.ValidateObject(packg) + if validationErr != nil { + if customError, ok := validationErr.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + return + } + } + + ctx := context.Create(r) + var sufficientPrivileges bool + if packg.ParentId == "" { + sufficientPrivileges = p.roleService.IsSysadm(ctx) + } else { + sufficientPrivileges, err = p.roleService.HasRequiredPermissions(ctx, packg.ParentId, view.CreateAndUpdatePackagePermission) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + + if !IsAcceptableAlias(packg.Alias) { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.AliasContainsForbiddenChars, + Message: exception.AliasContainsForbiddenCharsMsg, + }) + return + } + + if !strings.Contains(packg.ParentId, ".") && strings.ToLower(packg.Alias) == "runenv" && !p.roleService.IsSysadm(ctx) { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.AliasContainsRunenvChars, + Message: exception.AliasContainsRunenvCharsMsg, + }) + return + } + + newPackage, err := p.packageService.CreatePackage(ctx, packg) + if err != nil { + RespondWithError(w, "Failed to create package", err) + return + } + if newPackage.ParentId != "" && (newPackage.Kind == entity.KIND_PACKAGE || newPackage.Kind == entity.KIND_DASHBOARD) { + p.monitoringService.IncreaseBusinessMetricCounter(ctx.GetUserId(), metrics.PackagesAndDashboardsCreated, newPackage.ParentId) + } + + RespondWithJson(w, http.StatusCreated, newPackage) +} + +func (p packageControllerImpl) UpdatePackage(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := p.roleService.HasRequiredPermissions(ctx, packageId, view.CreateAndUpdatePackagePermission) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, p.ptHandler, packageId, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + defer r.Body.Close() + body, err := ioutil.ReadAll(r.Body) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + var patchPackage view.PatchPackageReq + + err = json.Unmarshal(body, &patchPackage) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + + updatedPackage, err := p.packageService.UpdatePackage(ctx, &patchPackage, packageId) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, p.ptHandler, packageId, "Failed to update Package info", err) + return + } + + RespondWithJson(w, http.StatusOK, updatedPackage) +} + +func (p packageControllerImpl) GetAvailableVersionStatusesForPublish(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := p.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, p.ptHandler, packageId, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + availableVersionStatusesForPublish, err := p.packageService.GetAvailableVersionPublishStatuses(ctx, packageId) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, p.ptHandler, packageId, "Failed to get available version statuses for publish", err) + return + } + RespondWithJson(w, http.StatusOK, availableVersionStatusesForPublish) +} + +func (p packageControllerImpl) RecalculateOperationGroups(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := p.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, p.ptHandler, packageId, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + err = p.packageService.RecalculateOperationGroups(ctx, packageId) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, p.ptHandler, packageId, "Failed to recalculate operation groups", err) + return + } + + w.WriteHeader(http.StatusOK) +} + +func (p packageControllerImpl) CalculateOperationGroups(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := p.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, p.ptHandler, packageId, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + groupingPrefix, err := url.QueryUnescape(r.URL.Query().Get("groupingPrefix")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "groupingPrefix"}, + Debug: err.Error(), + }) + return + } + + groups, err := p.packageService.CalculateOperationGroups(packageId, groupingPrefix) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, p.ptHandler, packageId, "Failed to calculate operation groups", err) + return + } + + RespondWithJson(w, http.StatusOK, groups) +} diff --git a/qubership-apihub-service/controller/PlaygroundProxyController.go b/qubership-apihub-service/controller/PlaygroundProxyController.go new file mode 100644 index 0000000..42049a0 --- /dev/null +++ b/qubership-apihub-service/controller/PlaygroundProxyController.go @@ -0,0 +1,97 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package controller + +import ( + "crypto/tls" + "io" + "net/http" + "net/url" + "strings" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" +) + +func NewPlaygroundProxyController(systemInfoService service.SystemInfoService) ProxyController { + return &playgroundProxyControllerImpl{ + tr: http.Transport{TLSClientConfig: &tls.Config{InsecureSkipVerify: true}}, + systemInfoService: systemInfoService} +} + +type playgroundProxyControllerImpl struct { + tr http.Transport + systemInfoService service.SystemInfoService +} + +const CustomProxyUrlHeader = "X-Apihub-Proxy-Url" + +func (p *playgroundProxyControllerImpl) Proxy(w http.ResponseWriter, r *http.Request) { + proxyUrlStr := r.Header.Get(CustomProxyUrlHeader) + if proxyUrlStr == "" { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.RequiredParamsMissing, + Message: exception.RequiredParamsMissingMsg, + Params: map[string]interface{}{"params": CustomProxyUrlHeader}, + }) + return + } + r.Header.Del(CustomProxyUrlHeader) + proxyURL, err := url.Parse(proxyUrlStr) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURL, + Message: exception.InvalidURLMsg, + Params: map[string]interface{}{"url": proxyUrlStr}, + Debug: err.Error(), + }) + return + } + var validHost bool + for _, host := range p.systemInfoService.GetAllowedHosts() { + if strings.Contains(proxyURL.Host, host) { + validHost = true + break + } + } + if !validHost { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.HostNotAllowed, + Message: exception.HostNotAllowedMsg, + Params: map[string]interface{}{"host": proxyUrlStr}, + }) + return + } + r.URL = proxyURL + r.Host = proxyURL.Host + resp, err := p.tr.RoundTrip(r) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusFailedDependency, + Code: exception.ProxyFailed, + Message: exception.ProxyFailedMsg, + Params: map[string]interface{}{"url": r.URL.String()}, + Debug: err.Error(), + }) + return + } + defer resp.Body.Close() + copyHeader(w.Header(), resp.Header) + w.WriteHeader(resp.StatusCode) + io.Copy(w, resp.Body) +} diff --git a/qubership-apihub-service/controller/ProjectController.go b/qubership-apihub-service/controller/ProjectController.go new file mode 100644 index 0000000..b312336 --- /dev/null +++ b/qubership-apihub-service/controller/ProjectController.go @@ -0,0 +1,327 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package controller + +import ( + "encoding/json" + "io/ioutil" + "net/http" + "strconv" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + log "github.com/sirupsen/logrus" +) + +type ProjectController interface { + AddProject(w http.ResponseWriter, r *http.Request) + GetProject(w http.ResponseWriter, r *http.Request) + GetFilteredProjects(w http.ResponseWriter, r *http.Request) + UpdateProject(w http.ResponseWriter, r *http.Request) + DeleteProject(w http.ResponseWriter, r *http.Request) + FavorProject(w http.ResponseWriter, r *http.Request) + DisfavorProject(w http.ResponseWriter, r *http.Request) +} + +func NewProjectController(projectService service.ProjectService, groupService service.GroupService, searchService service.SearchService) ProjectController { + return &projectControllerImpl{ + projectService: projectService, + groupService: groupService, + searchService: searchService} +} + +type projectControllerImpl struct { + projectService service.ProjectService + groupService service.GroupService + searchService service.SearchService +} + +func (p projectControllerImpl) AddProject(w http.ResponseWriter, r *http.Request) { + defer r.Body.Close() + body, err := ioutil.ReadAll(r.Body) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + var project view.Project + err = json.Unmarshal(body, &project) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + + group, err := p.groupService.GetGroup(project.GroupId) + if err != nil { + log.Error("Failed to add project: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to add project", + Debug: err.Error()}) + } + return + } + validationErr := utils.ValidateObject(project) + if validationErr != nil { + if customError, ok := validationErr.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + return + } + } + + if !IsAcceptableAlias(project.Alias) { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.AliasContainsForbiddenChars, + Message: exception.AliasContainsForbiddenCharsMsg, + }) + return + } + + resultProject, err := p.projectService.AddProject(context.Create(r), &project, group.Id) + if err != nil { + log.Error("Failed to add project: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to add project", + Debug: err.Error()}) + } + return + } + + RespondWithJson(w, http.StatusCreated, resultProject) +} + +func (p projectControllerImpl) GetProject(w http.ResponseWriter, r *http.Request) { + id := getStringParam(r, "projectId") + + var project interface{} //todo remove this + project, err := p.projectService.GetProject(context.Create(r), id) + //todo remove this + if err != nil { + if customError, ok := err.(*exception.CustomError); ok { + if customError.Status == 404 { + project, err = p.searchService.GetPackage(context.Create(r), id) + } + } + } + + if err != nil { + log.Error("Failed to get project: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to get project", + Debug: err.Error()}) + } + return + } + + RespondWithJson(w, http.StatusOK, project) +} + +func (p projectControllerImpl) GetFilteredProjects(w http.ResponseWriter, r *http.Request) { + var err error + filter := r.URL.Query().Get("textFilter") + groupId := r.URL.Query().Get("groupId") + onlyFavoriteStr := r.URL.Query().Get("onlyFavorite") + onlyFavorite := false + if onlyFavoriteStr != "" { + onlyFavorite, err = strconv.ParseBool(onlyFavoriteStr) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "onlyFavorite", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + onlyPublishedStr := r.URL.Query().Get("onlyPublished") + onlyPublished := false + if onlyPublishedStr != "" { + onlyPublished, err = strconv.ParseBool(onlyPublishedStr) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "onlyPublished", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + limit, customError := getLimitQueryParam(r) + if customError != nil { + RespondWithCustomError(w, customError) + return + } + page := 0 + if r.URL.Query().Get("page") != "" { + page, err = strconv.Atoi(r.URL.Query().Get("page")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "page", "type": "int"}, + Debug: err.Error()}) + return + } + } + var filteredObjects interface{} + if onlyPublished { + filteredObjects, err = p.searchService.GetFilteredPackages(context.Create(r), filter, groupId, onlyFavorite, onlyPublished) + } else { + filteredObjects, err = p.searchService.GetFilteredProjects(context.Create(r), filter, groupId, onlyFavorite, onlyPublished, limit, page) + } + if err != nil { + log.Error("Failed to get projects: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to get projects", + Debug: err.Error()}) + } + return + } + RespondWithJson(w, http.StatusOK, filteredObjects) +} + +func (p projectControllerImpl) UpdateProject(w http.ResponseWriter, r *http.Request) { + projectId := getStringParam(r, "projectId") + + defer r.Body.Close() + body, err := ioutil.ReadAll(r.Body) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + var newProject *view.Project + + err = json.Unmarshal(body, &newProject) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + newProject.Id = projectId + + savedProject, err := p.projectService.UpdateProject(context.Create(r), newProject) + if err != nil { + log.Errorf("Failed to update Project info: %s", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to update Project info", + Debug: err.Error()}) + } + return + } + + RespondWithJson(w, http.StatusOK, savedProject) +} + +func (p projectControllerImpl) DeleteProject(w http.ResponseWriter, r *http.Request) { + id := getStringParam(r, "projectId") + err := p.projectService.DeleteProject(context.Create(r), id) + if err != nil { + log.Error("Failed to delete project: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to delete project", + Debug: err.Error()}) + } + return + } + w.WriteHeader(http.StatusOK) +} + +func (p projectControllerImpl) FavorProject(w http.ResponseWriter, r *http.Request) { + projectId := getStringParam(r, "projectId") + + err := p.projectService.FavorProject(context.Create(r), projectId) + if err != nil { + log.Error("Failed to add project to favorites: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to add project to favorites", + Debug: err.Error()}) + } + return + } + w.WriteHeader(http.StatusOK) +} + +func (p projectControllerImpl) DisfavorProject(w http.ResponseWriter, r *http.Request) { + projectId := getStringParam(r, "projectId") + + err := p.projectService.DisfavorProject(context.Create(r), projectId) + if err != nil { + log.Error("Failed to remove project from favorites: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to remove project from favorites", + Debug: err.Error()}) + } + return + } + w.WriteHeader(http.StatusOK) +} diff --git a/qubership-apihub-service/controller/PublishController.go b/qubership-apihub-service/controller/PublishController.go new file mode 100644 index 0000000..de75ae0 --- /dev/null +++ b/qubership-apihub-service/controller/PublishController.go @@ -0,0 +1,763 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package controller + +import ( + "encoding/base64" + "encoding/json" + "fmt" + "io/ioutil" + "net/http" + "strconv" + "strings" + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + log "github.com/sirupsen/logrus" +) + +type PublishV2Controller interface { + Publish(w http.ResponseWriter, r *http.Request) + GetPublishStatus(w http.ResponseWriter, r *http.Request) + GetPublishStatuses(w http.ResponseWriter, r *http.Request) + GetFreeBuild(w http.ResponseWriter, r *http.Request) + SetPublishStatus_deprecated(w http.ResponseWriter, r *http.Request) + SetPublishStatus(w http.ResponseWriter, r *http.Request) +} + +func NewPublishV2Controller(buildService service.BuildService, + publishedService service.PublishedService, + buildResultService service.BuildResultService, + roleService service.RoleService, + systemInfoService service.SystemInfoService) PublishV2Controller { + + publishArchiveSizeLimit := systemInfoService.GetPublishArchiveSizeLimitMB() + publishFileSizeLimit := systemInfoService.GetPublishFileSizeLimitMB() + + return &publishV2ControllerImpl{ + buildService: buildService, + publishedService: publishedService, + buildResultService: buildResultService, + roleService: roleService, + publishArchiveSizeLimit: publishArchiveSizeLimit, + publishFileSizeLimit: publishFileSizeLimit, + systemInfoService: systemInfoService, + } +} + +type publishV2ControllerImpl struct { + buildService service.BuildService + publishedService service.PublishedService + buildResultService service.BuildResultService + roleService service.RoleService + systemInfoService service.SystemInfoService + + publishArchiveSizeLimit int64 + publishFileSizeLimit int64 +} + +func (p publishV2ControllerImpl) Publish(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + r.Body = http.MaxBytesReader(w, r.Body, p.publishArchiveSizeLimit) + + if r.ContentLength > p.publishArchiveSizeLimit { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.ArchiveSizeExceeded, + Message: exception.ArchiveSizeExceededMsg, + Params: map[string]interface{}{"size": p.publishArchiveSizeLimit}, + }) + return + } + + err := r.ParseMultipartForm(0) + if err != nil { + if strings.Contains(err.Error(), "http: request body too large") { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.ArchiveSizeExceeded, + Message: exception.ArchiveSizeExceededMsg, + Params: map[string]interface{}{"size": p.publishArchiveSizeLimit}, + }) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + } + return + } + defer func() { + err := r.MultipartForm.RemoveAll() + if err != nil { + log.Debugf("failed to remove temporal data: %+v", err) + } + }() + + clientBuild := false + clientBuildStr := r.FormValue("clientBuild") + if clientBuildStr != "" { + clientBuild, err = strconv.ParseBool(clientBuildStr) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameter, + Message: exception.InvalidParameterMsg, + Params: map[string]interface{}{"param": "clientBuild"}, + Debug: err.Error(), + }) + return + } + } + + resolveRefs := true + resolveRefsStr := r.FormValue("resolveRefs") + if resolveRefsStr != "" { + resolveRefs, err = strconv.ParseBool(resolveRefsStr) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameter, + Message: exception.InvalidParameterMsg, + Params: map[string]interface{}{"param": "resolveRefs"}, + Debug: err.Error(), + }) + return + } + } + + resolveConflicts := true + resolveConflictsStr := r.FormValue("resolveConflicts") + if resolveConflictsStr != "" { + resolveConflicts, err = strconv.ParseBool(resolveConflictsStr) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameter, + Message: exception.InvalidParameterMsg, + Params: map[string]interface{}{"param": "resolveConflicts"}, + Debug: err.Error(), + }) + return + } + } + + var sourcesData []byte + _, srcExists := r.MultipartForm.File["sources"] + if srcExists { + sourcesFile, archiveFileHeader, err := r.FormFile("sources") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectMultipartFile, + Message: exception.IncorrectMultipartFileMsg, + Debug: err.Error()}) + return + } + + sourcesData, err = ioutil.ReadAll(sourcesFile) + closeErr := sourcesFile.Close() + if closeErr != nil { + log.Debugf("failed to close temporal file: %+v", err) + } + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectMultipartFile, + Message: exception.IncorrectMultipartFileMsg, + Debug: err.Error()}) + return + } + + if !strings.HasSuffix(archiveFileHeader.Filename, ".zip") { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameter, + Message: exception.InvalidParameterMsg, + Params: map[string]interface{}{"param": "sources file name, expecting .zip archive"}, + }) + return + } + encoding := r.Header.Get("Content-Transfer-Encoding") + if strings.EqualFold(encoding, "base64") { + _, err := base64.StdEncoding.Decode(sourcesData, sourcesData) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectMultipartFile, + Message: exception.IncorrectMultipartFileMsg, + Debug: err.Error()}) + return + } + } + } + + configStr := r.FormValue("config") + if configStr == "" { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.EmptyParameter, + Message: exception.EmptyParameterMsg, + Params: map[string]interface{}{"param": "config"}, + }) + return + } + + var config view.BuildConfig + err = json.Unmarshal([]byte(configStr), &config) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameter, + Message: exception.InvalidParameterMsg, + Params: map[string]interface{}{"param": "config"}, + Debug: err.Error(), + }) + return + } + if config.PackageId == "" { + config.PackageId = packageId + } else { + if packageId != config.PackageId { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.PackageIdMismatch, + Message: exception.PackageIdMismatchMsg, + Params: map[string]interface{}{"configPackageId": config.PackageId, "packageId": packageId}, + }) + } + } + + config.CreatedBy = ctx.GetUserId() + config.BuildType = view.BuildType + + for i, file := range config.Files { + if file.Publish == nil { + deflt := true + config.Files[i].Publish = &deflt + } + } + + _, err = view.ParseVersionStatus(config.Status) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameter, + Message: err.Error(), + }) + return + } + + sufficientPrivileges, err := p.roleService.HasManageVersionPermission(ctx, packageId, config.Status) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + var dependencies []string + dependenciesStr := r.FormValue("dependencies") + if dependenciesStr != "" { + err = json.Unmarshal([]byte(dependenciesStr), &dependencies) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameter, + Message: exception.InvalidParameterMsg, + Params: map[string]interface{}{"param": "dependencies"}, + Debug: err.Error(), + }) + return + } + } + builderId := r.FormValue("builderId") + if clientBuild && builderId == "" { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.RequiredParamsMissing, + Message: exception.RequiredParamsMissingMsg, + Params: map[string]interface{}{"params": "builderId"}, + }) + return + } + result, err := p.buildService.PublishVersion(ctx, config, sourcesData, clientBuild, builderId, dependencies, resolveRefs, resolveConflicts) + if err != nil { + RespondWithError(w, "Failed to publish package", err) + return + } + if result.PublishId == "" { + w.WriteHeader(http.StatusNoContent) + } else { + RespondWithJson(w, http.StatusAccepted, result) + } +} + +func (p publishV2ControllerImpl) GetPublishStatus(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := p.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + publishId := getStringParam(r, "publishId") + + status, details, err := p.buildService.GetStatus(publishId) + if err != nil { + RespondWithError(w, "Failed to get publish status", err) + return + } + + if status == "" && details == "" { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusNotFound, + Message: "build not found", + }) + return + } + + RespondWithJson(w, http.StatusOK, view.PublishStatusResponse{ + PublishId: publishId, + Status: status, + Message: details, + }) +} + +func (p publishV2ControllerImpl) GetPublishStatuses(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := p.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + + defer r.Body.Close() + body, err := ioutil.ReadAll(r.Body) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + var req view.BuildsStatusRequest + err = json.Unmarshal(body, &req) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + + result, err := p.buildService.GetStatuses(req.PublishIds) + if err != nil { + RespondWithError(w, "Failed to get publish statuses", err) + return + } + + if result == nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusNotFound, + Message: "builds not found", + }) + return + } + + RespondWithJson(w, http.StatusOK, result) +} + +func (p publishV2ControllerImpl) SetPublishStatus_deprecated(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + publishId := getStringParam(r, "publishId") //buildId + + ctx := context.Create(r) + sufficientPrivileges, err := p.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + + err = r.ParseMultipartForm(1024 * 1024) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + defer func() { + err := r.MultipartForm.RemoveAll() + if err != nil { + log.Debugf("failed to remove temporal data: %+v", err) + } + }() + + var status view.BuildStatusEnum + statusStr := r.FormValue("status") + status, err = view.BuildStatusFromString(statusStr) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameter, + Message: exception.InvalidParameterMsg, + Params: map[string]interface{}{"param": "status"}, + Debug: err.Error(), + }) + return + } + + builderId := r.FormValue("builderId") + if builderId == "" { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.RequiredParamsMissing, + Message: exception.RequiredParamsMissingMsg, + Params: map[string]interface{}{"params": "builderId"}, + }) + return + } + err = p.buildService.ValidateBuildOwnership(publishId, builderId) + if err != nil { + RespondWithError(w, "Failed to validate build ownership", err) + return + } + + details := "" + switch status { + case view.StatusError: + details = r.FormValue("errors") + err = p.buildService.UpdateBuildStatus(publishId, status, details) + if err != nil { + RespondWithError(w, "Failed to update build status", err) + return + } + case view.StatusComplete: + var packageData []byte + sourcesFile, archiveFileHeader, err := r.FormFile("data") + if err != nil { + if err == http.ErrMissingFile { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.RequiredParamsMissing, + Message: exception.RequiredParamsMissingMsg, + Params: map[string]interface{}{"params": "data"}, + }) + return + } + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectMultipartFile, + Message: exception.IncorrectMultipartFileMsg, + Debug: err.Error()}) + return + } + packageData, err = ioutil.ReadAll(sourcesFile) + closeErr := sourcesFile.Close() + if closeErr != nil { + log.Debugf("failed to close temporal file: %+v", err) + } + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectMultipartFile, + Message: exception.IncorrectMultipartFileMsg, + Debug: err.Error()}) + return + } + if !strings.HasSuffix(archiveFileHeader.Filename, ".zip") { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameter, + Message: exception.InvalidParameterMsg, + Params: map[string]interface{}{"param": "data file name, expecting .zip archive"}, + }) + return + } + encoding := r.Header.Get("Content-Transfer-Encoding") + if strings.EqualFold(encoding, "base64") { + _, err := base64.StdEncoding.Decode(packageData, packageData) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectMultipartFile, + Message: exception.IncorrectMultipartFileMsg, + Debug: err.Error()}) + return + } + } + availableVersionStatuses, err := p.roleService.GetAvailableVersionPublishStatuses(ctx, packageId) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + // TODO: enable for debug only? + utils.SafeAsync(func() { + err = p.buildResultService.StoreBuildResult(publishId, packageData) + if err != nil { + log.Errorf("Failed to save build result for %s: %s", publishId, err.Error()) + return + } + }) + err = p.publishedService.SaveBuildResult_deprecated(packageId, packageData, publishId, availableVersionStatuses) + if err != nil { + RespondWithError(w, "Failed to publish build package", err) + return + } + case view.StatusNotStarted: + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Message: fmt.Sprintf("Value '%v' is not acceptable for status", status), + }) + return + case view.StatusRunning: + err = p.buildService.UpdateBuildStatus(publishId, status, details) + if err != nil { + RespondWithError(w, "Failed to update build status", err) + return + } + } + + w.WriteHeader(http.StatusNoContent) +} + +func (p publishV2ControllerImpl) SetPublishStatus(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + publishId := getStringParam(r, "publishId") //buildId + + ctx := context.Create(r) + sufficientPrivileges, err := p.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + + err = r.ParseMultipartForm(1024 * 1024) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + defer func() { + err := r.MultipartForm.RemoveAll() + if err != nil { + log.Debugf("failed to remove temporal data: %+v", err) + } + }() + + var status view.BuildStatusEnum + statusStr := r.FormValue("status") + status, err = view.BuildStatusFromString(statusStr) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameter, + Message: exception.InvalidParameterMsg, + Params: map[string]interface{}{"param": "status"}, + Debug: err.Error(), + }) + return + } + + builderId := r.FormValue("builderId") + if builderId == "" { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.RequiredParamsMissing, + Message: exception.RequiredParamsMissingMsg, + Params: map[string]interface{}{"params": "builderId"}, + }) + return + } + err = p.buildService.ValidateBuildOwnership(publishId, builderId) + if err != nil { + RespondWithError(w, "Failed to validate build ownership", err) + return + } + + details := "" + switch status { + case view.StatusError: + details = r.FormValue("errors") + err = p.buildService.UpdateBuildStatus(publishId, status, details) + if err != nil { + RespondWithError(w, "Failed to update build status", err) + return + } + case view.StatusComplete: + var packageData []byte + sourcesFile, archiveFileHeader, err := r.FormFile("data") + if err != nil { + if err == http.ErrMissingFile { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.RequiredParamsMissing, + Message: exception.RequiredParamsMissingMsg, + Params: map[string]interface{}{"params": "data"}, + }) + return + } + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectMultipartFile, + Message: exception.IncorrectMultipartFileMsg, + Debug: err.Error()}) + return + } + packageData, err = ioutil.ReadAll(sourcesFile) + closeErr := sourcesFile.Close() + if closeErr != nil { + log.Debugf("failed to close temporal file: %+v", err) + } + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectMultipartFile, + Message: exception.IncorrectMultipartFileMsg, + Debug: err.Error()}) + return + } + if !strings.HasSuffix(archiveFileHeader.Filename, ".zip") { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameter, + Message: exception.InvalidParameterMsg, + Params: map[string]interface{}{"param": "data file name, expecting .zip archive"}, + }) + return + } + encoding := r.Header.Get("Content-Transfer-Encoding") + if strings.EqualFold(encoding, "base64") { + _, err := base64.StdEncoding.Decode(packageData, packageData) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectMultipartFile, + Message: exception.IncorrectMultipartFileMsg, + Debug: err.Error()}) + return + } + } + availableVersionStatuses, err := p.roleService.GetAvailableVersionPublishStatuses(ctx, packageId) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + // TODO: enable for debug only? + utils.SafeAsync(func() { + err = p.buildResultService.StoreBuildResult(publishId, packageData) + if err != nil { + log.Errorf("Failed to save build result for %s: %s", publishId, err.Error()) + return + } + }) + err = p.publishedService.SaveBuildResult(packageId, packageData, publishId, availableVersionStatuses) + if err != nil { + RespondWithError(w, "Failed to publish build package", err) + return + } + case view.StatusNotStarted: + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Message: fmt.Sprintf("Value '%v' is not acceptable for status", status), + }) + return + case view.StatusRunning: + err = p.buildService.UpdateBuildStatus(publishId, status, details) + if err != nil { + RespondWithError(w, "Failed to update build status", err) + return + } + } + + w.WriteHeader(http.StatusNoContent) +} + +func (p publishV2ControllerImpl) GetFreeBuild(w http.ResponseWriter, r *http.Request) { + builderId := getStringParam(r, "builderId") + start := time.Now() + + src, err := p.buildService.GetFreeBuild(builderId) + + if err != nil { + RespondWithError(w, "Failed to get free build", err) + return + } + + if src != nil { + w.Header().Set("Content-Type", "application/zip") + w.Write(src) + } else { + w.WriteHeader(http.StatusNoContent) + } + log.Debugf("GetFreeBuild took %dms", time.Since(start).Milliseconds()) +} diff --git a/qubership-apihub-service/controller/PublishedController.go b/qubership-apihub-service/controller/PublishedController.go new file mode 100644 index 0000000..860502a --- /dev/null +++ b/qubership-apihub-service/controller/PublishedController.go @@ -0,0 +1,360 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package controller + +import ( + "fmt" + "net/http" + "net/url" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" + log "github.com/sirupsen/logrus" +) + +type PublishedController interface { + GetVersion(w http.ResponseWriter, r *http.Request) + GetVersionSources(w http.ResponseWriter, r *http.Request) + GetPublishedVersionSourceDataConfig(w http.ResponseWriter, r *http.Request) + GetPublishedVersionBuildConfig(w http.ResponseWriter, r *http.Request) + GetSharedContentFile(w http.ResponseWriter, r *http.Request) + SharePublishedFile(w http.ResponseWriter, r *http.Request) + GenerateFileDocumentation(w http.ResponseWriter, r *http.Request) + GenerateVersionDocumentation(w http.ResponseWriter, r *http.Request) +} + +func NewPublishedController(versionService service.PublishedService, portalService service.PortalService, searchService service.SearchService) PublishedController { + return &publishControllerImpl{ + publishedService: versionService, + portalService: portalService, + searchService: searchService, + } +} + +type publishControllerImpl struct { + publishedService service.PublishedService + portalService service.PortalService + searchService service.SearchService +} + +func (v publishControllerImpl) GetVersion(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + importFiles := r.URL.Query().Get("importFiles") == "true" + //dependFiles := r.URL.Query().Get("dependFiles") == "true" + dependFiles := false //TODO for now this option is disabled + version, err := v.publishedService.GetVersion(packageId, versionName, importFiles, dependFiles) + if err != nil { + log.Error("Failed to get package version: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to get package version", + Debug: err.Error()}) + } + return + } + RespondWithJson(w, http.StatusOK, version) +} + +func (v publishControllerImpl) GetVersionSources(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + srcArchive, err := v.publishedService.GetVersionSources(packageId, versionName) + if err != nil { + log.Error("Failed to get package version sources: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to get package version sources", + Debug: err.Error()}) + } + return + } + + w.Header().Set("Content-Type", "application/octet-stream") + w.WriteHeader(http.StatusOK) + w.Write(srcArchive) +} + +func (v publishControllerImpl) GetPublishedVersionSourceDataConfig(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + publishedVersionSourceDataConfig, err := v.publishedService.GetPublishedVersionSourceDataConfig(packageId, versionName) + if err != nil { + log.Error("Failed to get package version sources: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to get package version sources", + Debug: err.Error()}) + } + return + } + + RespondWithJson(w, http.StatusOK, publishedVersionSourceDataConfig) +} + +func (v publishControllerImpl) GetPublishedVersionBuildConfig(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + + publishedVersionBuildConfig, err := v.publishedService.GetPublishedVersionBuildConfig(packageId, versionName) + if err != nil { + log.Error("Failed to get package version build config: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to get package version build config", + Debug: err.Error()}) + } + return + } + + RespondWithJson(w, http.StatusOK, publishedVersionBuildConfig) +} + +func (v publishControllerImpl) GetSharedContentFile(w http.ResponseWriter, r *http.Request) { + sharedUrl := getStringParam(r, "shared_id") + + contentData, err := v.publishedService.GetSharedFile(sharedUrl) + if err != nil { + log.Error("Failed to get published content by shared ID: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to get published content by shared ID", + Debug: err.Error()}) + } + return + } + w.Header().Set("Content-Type", "text/plain") // For frontend it's convenient to get all types as plain text + w.WriteHeader(http.StatusOK) + w.Write(contentData) +} + +func (v publishControllerImpl) SharePublishedFile(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + fileSlug, err := url.QueryUnescape(getStringParam(r, "fileSlug")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "fileSlug"}, + Debug: err.Error(), + }) + return + } + + sharedUrlInfo, err := v.publishedService.SharePublishedFile(packageId, versionName, fileSlug) + if err != nil { + log.Error("Failed to create shared URL for content: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to create shared URL for content", + Debug: err.Error()}) + } + return + } + RespondWithJson(w, http.StatusOK, sharedUrlInfo) +} + +func (v publishControllerImpl) GenerateFileDocumentation(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + slug := getStringParam(r, "slug") + + docType := view.GetDtFromStr(r.URL.Query().Get("docType")) + + var data []byte + switch docType { + case view.DTInteractive: + var filename string + data, filename, err = v.portalService.GenerateInteractivePageForPublishedFile(packageId, versionName, slug) + if err != nil { + log.Error("Failed to generate interactive HTML page for file ", packageId+":"+versionName+":"+slug, " ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to generate interactive HTML page for file", + Debug: err.Error()}) + } + return + } + w.Header().Set("Content-Type", "application/octet-stream") + w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=%v", filename)) + + case view.DTRaw: + content, cd, err := v.publishedService.GetLatestContentDataBySlug(packageId, versionName, slug) + if err != nil { + log.Error("Failed to get published content as file: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to get published content as file", + Debug: err.Error()}) + } + return + } + data = cd.Data + w.Header().Set("Content-Type", "application/octet-stream") + w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=%v", content.Name)) + + case view.DTPdf, view.DTStatic: + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusNotImplemented, + Message: "Document type " + string(docType) + " is not supported yet"}) + return + } + + w.WriteHeader(http.StatusOK) + w.Write(data) +} + +func (v publishControllerImpl) GenerateVersionDocumentation(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + + docType := view.GetDtFromStr(r.URL.Query().Get("docType")) + + var data []byte + var filename string + switch docType { + case view.DTInteractive: + data, filename, err = v.portalService.GenerateInteractivePageForPublishedVersion(packageId, versionName) + + if err != nil { + log.Error("Failed to generate interactive HTML page for version ", packageId+":"+versionName, " ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to generate interactive HTML page for version", + Debug: err.Error()}) + } + return + } + + w.Header().Set("Content-Type", "application/octet-stream") + w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=%v", filename)) + + case view.DTRaw: + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Message: "Document type " + string(docType) + " is not applicable for version"}) + return + + case view.DTPdf, view.DTStatic: + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusNotImplemented, + Message: "Document type " + string(docType) + " is not supported yet"}) + return + } + + w.WriteHeader(http.StatusOK) + w.Write(data) +} diff --git a/qubership-apihub-service/controller/RefsController.go b/qubership-apihub-service/controller/RefsController.go new file mode 100644 index 0000000..252b46c --- /dev/null +++ b/qubership-apihub-service/controller/RefsController.go @@ -0,0 +1,106 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package controller + +import ( + "encoding/json" + "io/ioutil" + "net/http" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + log "github.com/sirupsen/logrus" +) + +type RefController interface { + UpdateRefs(w http.ResponseWriter, r *http.Request) +} + +func NewRefController(draftRefService service.DraftRefService, + wsBranchService service.WsBranchService) RefController { + return &refControllerImpl{ + draftRefService: draftRefService, + wsBranchService: wsBranchService, + } +} + +type refControllerImpl struct { + draftRefService service.DraftRefService + wsBranchService service.WsBranchService +} + +func (c refControllerImpl) UpdateRefs(w http.ResponseWriter, r *http.Request) { + var err error + projectId := getStringParam(r, "projectId") + branchName, err := getUnescapedStringParam(r, "branchName") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "branchName"}, + Debug: err.Error(), + }) + return + } + defer r.Body.Close() + body, err := ioutil.ReadAll(r.Body) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + var refPatch view.RefPatch + err = json.Unmarshal(body, &refPatch) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + validationErr := utils.ValidateObject(refPatch) + if validationErr != nil { + if customError, ok := validationErr.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + return + } + } + + err = c.draftRefService.UpdateRefs(context.Create(r), projectId, branchName, refPatch) + if err != nil { + log.Error("Failed to update refs: ", err.Error()) + if customError, ok := err.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + } else { + c.wsBranchService.DisconnectClients(projectId, branchName) + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to update refs", + Debug: err.Error()}) + } + return + } + w.WriteHeader(http.StatusOK) +} diff --git a/qubership-apihub-service/controller/RoleController.go b/qubership-apihub-service/controller/RoleController.go new file mode 100644 index 0000000..2898fda --- /dev/null +++ b/qubership-apihub-service/controller/RoleController.go @@ -0,0 +1,527 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package controller + +import ( + "encoding/json" + "io/ioutil" + "net/http" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type RoleController interface { + GetPackageMembers(w http.ResponseWriter, r *http.Request) + DeletePackageMember(w http.ResponseWriter, r *http.Request) + AddPackageMembers(w http.ResponseWriter, r *http.Request) + UpdatePackageMembers(w http.ResponseWriter, r *http.Request) + CreateRole(w http.ResponseWriter, r *http.Request) + DeleteRole(w http.ResponseWriter, r *http.Request) + UpdateRole(w http.ResponseWriter, r *http.Request) + GetExistingRoles(w http.ResponseWriter, r *http.Request) + GetAvailablePackageRoles(w http.ResponseWriter, r *http.Request) + SetRoleOrder(w http.ResponseWriter, r *http.Request) + GetExistingPermissions(w http.ResponseWriter, r *http.Request) + + GetAvailableUserPackagePromoteStatuses(w http.ResponseWriter, r *http.Request) + TestSetUserSystemRole(w http.ResponseWriter, r *http.Request) +} + +func NewRoleController(roleService service.RoleService) RoleController { + return &roleControllerImpl{ + roleService: roleService, + } +} + +type roleControllerImpl struct { + roleService service.RoleService +} + +func (c roleControllerImpl) GetPackageMembers(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := c.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + members, err := c.roleService.GetPackageMembers(packageId) + if err != nil { + RespondWithError(w, "Failed to get package members", err) + return + } + RespondWithJson(w, http.StatusOK, members) +} + +func (c roleControllerImpl) DeletePackageMember(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := c.roleService.HasRequiredPermissions(ctx, packageId, view.UserAccessManagementPermission) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + userId, err := getUnescapedStringParam(r, "userId") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "userId"}, + Debug: err.Error(), + }) + return + } + + indirectMemberRole, err := c.roleService.DeletePackageMember(ctx, packageId, userId) + if err != nil { + RespondWithError(w, "Failed to delete package member", err) + return + } + if indirectMemberRole == nil { + w.WriteHeader(http.StatusNoContent) + } else { + RespondWithJson(w, http.StatusOK, indirectMemberRole) + } +} + +func (c roleControllerImpl) AddPackageMembers(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := c.roleService.HasRequiredPermissions(ctx, packageId, view.UserAccessManagementPermission) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + defer r.Body.Close() + body, err := ioutil.ReadAll(r.Body) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + var packageMembersReq view.PackageMembersAddReq + err = json.Unmarshal(body, &packageMembersReq) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + validationErr := utils.ValidateObject(packageMembersReq) + if validationErr != nil { + if customError, ok := validationErr.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + return + } + } + + members, err := c.roleService.AddPackageMembers(ctx, packageId, packageMembersReq.Emails, packageMembersReq.RoleIds) + if err != nil { + RespondWithError(w, "Failed to add package members", err) + return + } + RespondWithJson(w, http.StatusCreated, members) +} + +func (c roleControllerImpl) UpdatePackageMembers(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := c.roleService.HasRequiredPermissions(ctx, packageId, view.UserAccessManagementPermission) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + userId, err := getUnescapedStringParam(r, "userId") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "userId"}, + Debug: err.Error(), + }) + return + } + + defer r.Body.Close() + body, err := ioutil.ReadAll(r.Body) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + var packageMemberUpdatePatch view.PackageMemberUpdatePatch + err = json.Unmarshal(body, &packageMemberUpdatePatch) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + validationErr := utils.ValidateObject(packageMemberUpdatePatch) + if validationErr != nil { + if customError, ok := validationErr.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + return + } + } + + err = c.roleService.UpdatePackageMember(ctx, packageId, userId, packageMemberUpdatePatch.RoleId, packageMemberUpdatePatch.Action) + if err != nil { + RespondWithError(w, "Failed to update package member", err) + return + } + w.WriteHeader(http.StatusNoContent) +} + +func (c roleControllerImpl) CreateRole(w http.ResponseWriter, r *http.Request) { + ctx := context.Create(r) + if !c.roleService.IsSysadm(ctx) { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + defer r.Body.Close() + body, err := ioutil.ReadAll(r.Body) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + var createRoleReq view.PackageRoleCreateReq + err = json.Unmarshal(body, &createRoleReq) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + validationErr := utils.ValidateObject(createRoleReq) + if validationErr != nil { + if customError, ok := validationErr.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + return + } + } + + createdRole, err := c.roleService.CreateRole(createRoleReq.Role, createRoleReq.Permissions) + if err != nil { + RespondWithError(w, "Failed to create new role", err) + return + } + RespondWithJson(w, http.StatusCreated, createdRole) +} + +func (c roleControllerImpl) DeleteRole(w http.ResponseWriter, r *http.Request) { + ctx := context.Create(r) + if !c.roleService.IsSysadm(ctx) { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + roleId := getStringParam(r, "roleId") + err := c.roleService.DeleteRole(roleId) + if err != nil { + RespondWithError(w, "Failed to delete role", err) + return + } + w.WriteHeader(http.StatusNoContent) +} + +func (c roleControllerImpl) UpdateRole(w http.ResponseWriter, r *http.Request) { + ctx := context.Create(r) + if !c.roleService.IsSysadm(ctx) { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + roleId := getStringParam(r, "roleId") + defer r.Body.Close() + body, err := ioutil.ReadAll(r.Body) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + var updateRoleReq view.PackageRoleUpdateReq + err = json.Unmarshal(body, &updateRoleReq) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + if updateRoleReq.Permissions != nil { + err = c.roleService.SetRolePermissions(roleId, *updateRoleReq.Permissions) + if err != nil { + RespondWithError(w, "Failed to update role permissions", err) + return + } + } + w.WriteHeader(http.StatusNoContent) +} + +func (c roleControllerImpl) GetExistingRoles(w http.ResponseWriter, r *http.Request) { + roles, err := c.roleService.GetExistingRolesExcludingNone() + if err != nil { + RespondWithError(w, "Failed to get existing roles", err) + return + } + RespondWithJson(w, http.StatusOK, roles) +} + +func (c roleControllerImpl) GetExistingPermissions(w http.ResponseWriter, r *http.Request) { + permissions, err := c.roleService.GetExistingPermissions() + if err != nil { + RespondWithError(w, "Failed to get permissions list", err) + return + } + RespondWithJson(w, http.StatusOK, permissions) +} + +func (c roleControllerImpl) GetAvailablePackageRoles(w http.ResponseWriter, r *http.Request) { + ctx := context.Create(r) + packageId := getStringParam(r, "packageId") + sufficientPrivileges, err := c.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + availableRoles, err := c.roleService.GetAvailablePackageRoles(ctx, packageId, true) + if err != nil { + RespondWithError(w, "Failed to get available package roles", err) + return + } + RespondWithJson(w, http.StatusOK, availableRoles) +} + +func (c roleControllerImpl) SetRoleOrder(w http.ResponseWriter, r *http.Request) { + ctx := context.Create(r) + if !c.roleService.IsSysadm(ctx) { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + defer r.Body.Close() + body, err := ioutil.ReadAll(r.Body) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + var setRoleOrderReq view.PackageRoleOrderReq + err = json.Unmarshal(body, &setRoleOrderReq) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + validationErr := utils.ValidateObject(setRoleOrderReq) + if validationErr != nil { + if customError, ok := validationErr.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + return + } + } + + err = c.roleService.SetRoleOrder(setRoleOrderReq.Roles) + if err != nil { + RespondWithError(w, "Failed to update role permissions", err) + return + } + w.WriteHeader(http.StatusNoContent) +} + +func (c roleControllerImpl) GetAvailableUserPackagePromoteStatuses(w http.ResponseWriter, r *http.Request) { + userId, err := getUnescapedStringParam(r, "userId") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "userId"}, + Debug: err.Error(), + }) + return + } + + defer r.Body.Close() + body, err := ioutil.ReadAll(r.Body) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + var packages view.PackagesReq + err = json.Unmarshal(body, &packages) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + + availablePackagePromoteStatuses, err := c.roleService.GetUserPackagePromoteStatuses(packages.Packages, userId) + if err != nil { + RespondWithError(w, "Failed to get package promote statuses available for user", err) + return + } + RespondWithJson(w, http.StatusOK, availablePackagePromoteStatuses) +} + +func (c roleControllerImpl) TestSetUserSystemRole(w http.ResponseWriter, r *http.Request) { + userId, err := getUnescapedStringParam(r, "userId") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "userId"}, + Debug: err.Error(), + }) + return + } + + defer r.Body.Close() + params, err := getParamsFromBody(r) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + + role, err := getBodyStringParam(params, "role") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameter, + Message: exception.InvalidParameterMsg, + Params: map[string]interface{}{"param": "role"}, + Debug: err.Error(), + }) + return + } + + err = c.roleService.SetUserSystemRole(userId, role) + if err != nil { + RespondWithError(w, "Failed to set user system role", err) + return + } + w.WriteHeader(http.StatusNoContent) +} diff --git a/qubership-apihub-service/controller/SearchController.go b/qubership-apihub-service/controller/SearchController.go new file mode 100644 index 0000000..b16f997 --- /dev/null +++ b/qubership-apihub-service/controller/SearchController.go @@ -0,0 +1,232 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package controller + +import ( + "encoding/json" + "io/ioutil" + "net/http" + "strconv" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type SearchController interface { + Search_deprecated(w http.ResponseWriter, r *http.Request) + Search(w http.ResponseWriter, r *http.Request) +} + +func NewSearchController(operationService service.OperationService, versionService service.VersionService, monitoringService service.MonitoringService) SearchController { + return &searchControllerImpl{ + operationService: operationService, + versionService: versionService, + monitoringService: monitoringService, + } +} + +type searchControllerImpl struct { + operationService service.OperationService + versionService service.VersionService + monitoringService service.MonitoringService +} + +// deprecated +func (s searchControllerImpl) Search_deprecated(w http.ResponseWriter, r *http.Request) { + defer r.Body.Close() + body, err := ioutil.ReadAll(r.Body) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + var searchQuery view.SearchQueryReq + + err = json.Unmarshal(body, &searchQuery) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + validationErr := utils.ValidateObject(searchQuery) + if validationErr != nil { + if customError, ok := validationErr.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + return + } + } + limit, customError := getLimitQueryParam(r) + if customError != nil { + RespondWithCustomError(w, customError) + return + } + page := 0 + if r.URL.Query().Get("page") != "" { + page, err = strconv.Atoi(r.URL.Query().Get("page")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "page", "type": "int"}, + Debug: err.Error()}) + return + } + } + searchLevel := getStringParam(r, "searchLevel") + searchQuery.Limit = limit + searchQuery.Page = page + + switch searchLevel { + case view.SearchLevelOperations: + { + result, err := s.operationService.SearchForOperations_deprecated(searchQuery) + if err != nil { + RespondWithError(w, "Failed to perform search for operations", err) + return + } + RespondWithJson(w, http.StatusOK, result) + } + case view.SearchLevelPackages: + { + result, err := s.versionService.SearchForPackages(searchQuery) + if err != nil { + RespondWithError(w, "Failed to perform search for packages", err) + return + } + RespondWithJson(w, http.StatusOK, result) + } + case view.SearchLevelDocuments: + { + result, err := s.versionService.SearchForDocuments(searchQuery) + if err != nil { + RespondWithError(w, "Failed to perform search for documents", err) + return + } + RespondWithJson(w, http.StatusOK, result) + } + default: + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameterValue, + Message: exception.InvalidParameterValueMsg, + Params: map[string]interface{}{"param": "searchLevel", "value": searchLevel}, + }) + return + } +} + +func (s searchControllerImpl) Search(w http.ResponseWriter, r *http.Request) { + defer r.Body.Close() + body, err := ioutil.ReadAll(r.Body) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + var searchQuery view.SearchQueryReq + + err = json.Unmarshal(body, &searchQuery) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + validationErr := utils.ValidateObject(searchQuery) + if validationErr != nil { + if customError, ok := validationErr.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + return + } + } + limit, customError := getLimitQueryParam(r) + if customError != nil { + RespondWithCustomError(w, customError) + return + } + page := 0 + if r.URL.Query().Get("page") != "" { + page, err = strconv.Atoi(r.URL.Query().Get("page")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "page", "type": "int"}, + Debug: err.Error()}) + return + } + } + searchLevel := getStringParam(r, "searchLevel") + searchQuery.Limit = limit + searchQuery.Page = page + + s.monitoringService.AddEndpointCall(getTemplatePath(r), view.MakeSearchEndpointOptions(searchLevel, searchQuery.OperationSearchParams)) + switch searchLevel { + case view.SearchLevelOperations: + { + result, err := s.operationService.SearchForOperations(searchQuery) + if err != nil { + RespondWithError(w, "Failed to perform search for operations", err) + return + } + RespondWithJson(w, http.StatusOK, result) + } + case view.SearchLevelPackages: + { + result, err := s.versionService.SearchForPackages(searchQuery) + if err != nil { + RespondWithError(w, "Failed to perform search for packages", err) + return + } + RespondWithJson(w, http.StatusOK, result) + } + case view.SearchLevelDocuments: + { + result, err := s.versionService.SearchForDocuments(searchQuery) + if err != nil { + RespondWithError(w, "Failed to perform search for documents", err) + return + } + RespondWithJson(w, http.StatusOK, result) + } + default: + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameterValue, + Message: exception.InvalidParameterValueMsg, + Params: map[string]interface{}{"param": "searchLevel", "value": searchLevel}, + }) + return + } +} diff --git a/qubership-apihub-service/controller/SysAdminController.go b/qubership-apihub-service/controller/SysAdminController.go new file mode 100644 index 0000000..a96fe67 --- /dev/null +++ b/qubership-apihub-service/controller/SysAdminController.go @@ -0,0 +1,131 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package controller + +import ( + "encoding/json" + "io/ioutil" + "net/http" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type SysAdminController interface { + GetSystemAdministrators(w http.ResponseWriter, r *http.Request) + AddSystemAdministrator(w http.ResponseWriter, r *http.Request) + DeleteSystemAdministrator(w http.ResponseWriter, r *http.Request) +} + +func NewSysAdminController(roleService service.RoleService) SysAdminController { + return &sysAdminControllerImpl{ + roleService: roleService, + } +} + +type sysAdminControllerImpl struct { + roleService service.RoleService +} + +func (a sysAdminControllerImpl) GetSystemAdministrators(w http.ResponseWriter, r *http.Request) { + ctx := context.Create(r) + sufficientPrivileges := a.roleService.IsSysadm(ctx) + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + admins, err := a.roleService.GetSystemAdministrators() + if err != nil { + RespondWithError(w, "Failed to get system administrators", err) + return + } + RespondWithJson(w, http.StatusOK, admins) +} + +func (a sysAdminControllerImpl) AddSystemAdministrator(w http.ResponseWriter, r *http.Request) { + ctx := context.Create(r) + sufficientPrivileges := a.roleService.IsSysadm(ctx) + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + defer r.Body.Close() + body, err := ioutil.ReadAll(r.Body) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + var addSysadmReq view.AddSysadmReq + err = json.Unmarshal(body, &addSysadmReq) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + validationErr := utils.ValidateObject(addSysadmReq) + if validationErr != nil { + if customError, ok := validationErr.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + return + } + } + + admins, err := a.roleService.AddSystemAdministrator(addSysadmReq.UserId) + if err != nil { + RespondWithError(w, "Failed to add system administrator", err) + return + } + RespondWithJson(w, http.StatusOK, admins) +} + +func (a sysAdminControllerImpl) DeleteSystemAdministrator(w http.ResponseWriter, r *http.Request) { + userId := getStringParam(r, "userId") + ctx := context.Create(r) + sufficientPrivileges := a.roleService.IsSysadm(ctx) + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + err := a.roleService.DeleteSystemAdministrator(userId) + if err != nil { + RespondWithError(w, "Failed to delete system administrator", err) + return + } + w.WriteHeader(http.StatusNoContent) +} diff --git a/qubership-apihub-service/controller/SystemInfoController.go b/qubership-apihub-service/controller/SystemInfoController.go new file mode 100644 index 0000000..a177bd3 --- /dev/null +++ b/qubership-apihub-service/controller/SystemInfoController.go @@ -0,0 +1,37 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package controller + +import ( + "net/http" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" +) + +type SystemInfoController interface { + GetSystemInfo(w http.ResponseWriter, r *http.Request) +} + +func NewSystemInfoController(service service.SystemInfoService) SystemInfoController { + return &systemInfoControllerImpl{service: service} +} + +type systemInfoControllerImpl struct { + service service.SystemInfoService +} + +func (g systemInfoControllerImpl) GetSystemInfo(w http.ResponseWriter, r *http.Request) { + RespondWithJson(w, http.StatusOK, g.service.GetSystemInfo()) +} diff --git a/qubership-apihub-service/controller/TransformationController.go b/qubership-apihub-service/controller/TransformationController.go new file mode 100644 index 0000000..1d753f1 --- /dev/null +++ b/qubership-apihub-service/controller/TransformationController.go @@ -0,0 +1,673 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package controller + +import ( + "fmt" + "net/http" + "net/url" + "strconv" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type TransformationController interface { + TransformDocuments_deprecated(w http.ResponseWriter, r *http.Request) + TransformDocuments(w http.ResponseWriter, r *http.Request) + GetDataForDocumentsTransformation(w http.ResponseWriter, r *http.Request) +} + +func NewTransformationController(roleService service.RoleService, buildService service.BuildService, versionService service.VersionService, transformationService service.TransformationService, operationGroupService service.OperationGroupService) TransformationController { + return transformationControllerImpl{roleService: roleService, buildService: buildService, versionService: versionService, transformationService: transformationService, operationGroupService: operationGroupService} +} + +type transformationControllerImpl struct { + roleService service.RoleService + buildService service.BuildService + versionService service.VersionService + transformationService service.TransformationService + operationGroupService service.OperationGroupService +} + +func (t transformationControllerImpl) TransformDocuments_deprecated(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := t.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + apiType, err := getUnescapedStringParam(r, "apiType") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "apiType"}, + Debug: err.Error(), + }) + return + } + _, err = view.ParseApiType(apiType) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameterValue, + Message: exception.InvalidParameterValueMsg, + Params: map[string]interface{}{"param": "apiType", "value": apiType}, + Debug: err.Error(), + }) + return + } + groupName, err := getUnescapedStringParam(r, "groupName") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "groupName"}, + Debug: err.Error(), + }) + return + } + + exists, err := t.operationGroupService.CheckOperationGroupExists(packageId, versionName, apiType, groupName) + if err != nil { + RespondWithError(w, "Failed to check if operation group exists", err) + return + } + if !exists { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.OperationGroupNotFound, + Message: exception.OperationGroupNotFoundMsg, + Params: map[string]interface{}{"groupName": groupName}, + }) + return + } + + builderId, err := url.QueryUnescape(r.URL.Query().Get("builderId")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "builderId"}, + Debug: err.Error(), + }) + return + } + clientBuild := false + if r.URL.Query().Get("clientBuild") != "" { + clientBuild, err = strconv.ParseBool(r.URL.Query().Get("clientBuild")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "clientBuild", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + if clientBuild && builderId == "" { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.RequiredParamsMissing, + Message: exception.RequiredParamsMissingMsg, + Params: map[string]interface{}{"params": "builderId"}, + }) + return + } + + reCalculate := false + if r.URL.Query().Get("reCalculate") != "" { + reCalculate, err = strconv.ParseBool(r.URL.Query().Get("reCalculate")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "reCalculate", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + + _, revision, err := service.SplitVersionRevision(versionName) + if err != nil { + RespondWithError(w, "Failed to split version revision", err) + return + } + if revision == 0 { + latestRevision, err := t.versionService.GetLatestRevision(packageId, versionName) + if err != nil { + RespondWithError(w, "Failed to get version", err) + return + } + versionName = fmt.Sprintf("%s@%d", versionName, latestRevision) + } + buildConfig := view.BuildConfig{ + PackageId: packageId, + Version: versionName, + BuildType: view.DocumentGroupType_deprecated, + CreatedBy: ctx.GetUserId(), + ApiType: apiType, + GroupName: groupName, + } + + if reCalculate { + buildId, err := t.buildService.CreateBuildWithoutDependencies(buildConfig, clientBuild, builderId) + if err != nil { + RespondWithError(w, "Failed to create documentGroup type build", err) + return + } + RespondWithJson(w, http.StatusCreated, view.DocumentTransformConfigView{ + PackageId: buildConfig.PackageId, + Version: buildConfig.Version, + ApiType: buildConfig.ApiType, + GroupName: buildConfig.GroupName, + BuildType: buildConfig.BuildType, + CreatedBy: buildConfig.CreatedBy, + BuildId: buildId, + }) + return + } + + content, err := t.versionService.GetTransformedDocuments_deprecated(packageId, versionName, apiType, groupName, string(view.JsonDocumentFormat)) + if err != nil { + RespondWithError(w, "Failed to get transformed documents", err) + return + } + if content != nil { + w.WriteHeader(http.StatusOK) + return + } + + searchRequest := view.DocumentGroupBuildSearchRequest{ + PackageId: packageId, + Version: versionName, + BuildType: view.DocumentGroupType_deprecated, + ApiType: apiType, + GroupName: groupName, + } + var calculationProcessStatus view.CalculationProcessStatus + buildView, err := t.buildService.GetBuildViewByDocumentGroupSearchQuery(searchRequest) + if err != nil { + if customError, ok := err.(*exception.CustomError); ok { + if customError.Status == http.StatusNotFound { + + buildId, err := t.buildService.CreateBuildWithoutDependencies(buildConfig, clientBuild, builderId) + if err != nil { + RespondWithError(w, "Failed to create documentGroup type build", err) + return + } + RespondWithJson(w, http.StatusCreated, view.DocumentTransformConfigView{ + PackageId: buildConfig.PackageId, + Version: buildConfig.Version, + ApiType: buildConfig.ApiType, + GroupName: buildConfig.GroupName, + BuildType: buildConfig.BuildType, + CreatedBy: buildConfig.CreatedBy, + BuildId: buildId, + }) + return + } + } + RespondWithError(w, "Failed to get buildStatus", err) + return + } + switch buildView.Status { + case string(view.StatusError): + calculationProcessStatus = view.CalculationProcessStatus{ + Status: string(view.StatusError), + Message: buildView.Details, + } + RespondWithJson(w, http.StatusAccepted, calculationProcessStatus) + return + case string(view.StatusComplete): + //this case is possible only if we have an old finished build for which we don't have a transformed documents (rebuild required) + //or if this build completed during this method execution (rebuild is not requried) + content, err := t.versionService.GetTransformedDocuments_deprecated(packageId, versionName, apiType, groupName, string(view.JsonDocumentFormat)) + if err != nil { + RespondWithError(w, "Failed to get transformed documents", err) + return + } + if content != nil { + w.WriteHeader(http.StatusOK) + return + } + buildId, err := t.buildService.CreateBuildWithoutDependencies(buildConfig, clientBuild, builderId) + if err != nil { + RespondWithError(w, "Failed to create documentGroup type build", err) + return + } + RespondWithJson(w, http.StatusCreated, view.DocumentTransformConfigView{ + PackageId: buildConfig.PackageId, + Version: buildConfig.Version, + ApiType: buildConfig.ApiType, + GroupName: buildConfig.GroupName, + BuildType: buildConfig.BuildType, + CreatedBy: buildConfig.CreatedBy, + BuildId: buildId, + }) + return + default: + calculationProcessStatus = view.CalculationProcessStatus{ + Status: string(view.StatusRunning), + } + RespondWithJson(w, http.StatusAccepted, calculationProcessStatus) + return + } +} + +func (t transformationControllerImpl) TransformDocuments(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := t.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + apiType, err := getUnescapedStringParam(r, "apiType") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "apiType"}, + Debug: err.Error(), + }) + return + } + _, err = view.ParseApiType(apiType) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameterValue, + Message: exception.InvalidParameterValueMsg, + Params: map[string]interface{}{"param": "apiType", "value": apiType}, + Debug: err.Error(), + }) + return + } + groupName, err := getUnescapedStringParam(r, "groupName") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "groupName"}, + Debug: err.Error(), + }) + return + } + buildType, err := getUnescapedStringParam(r, "buildType") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "buildType"}, + Debug: err.Error(), + }) + return + } + + format := r.URL.Query().Get("format") + if format == "" { + format = string(view.JsonDocumentFormat) + } + + err = view.ValidateFormatForBuildType(buildType, format) + if err != nil { + RespondWithError(w, "buildType format validation failed", err) + return + } + + exists, err := t.operationGroupService.CheckOperationGroupExists(packageId, versionName, apiType, groupName) + if err != nil { + RespondWithError(w, "Failed to check if operation group exists", err) + return + } + if !exists { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.OperationGroupNotFound, + Message: exception.OperationGroupNotFoundMsg, + Params: map[string]interface{}{"groupName": groupName}, + }) + return + } + + builderId, err := url.QueryUnescape(r.URL.Query().Get("builderId")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "builderId"}, + Debug: err.Error(), + }) + return + } + clientBuild := false + if r.URL.Query().Get("clientBuild") != "" { + clientBuild, err = strconv.ParseBool(r.URL.Query().Get("clientBuild")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "clientBuild", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + if clientBuild && builderId == "" { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.RequiredParamsMissing, + Message: exception.RequiredParamsMissingMsg, + Params: map[string]interface{}{"params": "builderId"}, + }) + return + } + + reCalculate := false + if r.URL.Query().Get("reCalculate") != "" { + reCalculate, err = strconv.ParseBool(r.URL.Query().Get("reCalculate")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "reCalculate", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + + _, revision, err := service.SplitVersionRevision(versionName) + if err != nil { + RespondWithError(w, "Failed to split version revision", err) + return + } + if revision == 0 { + latestRevision, err := t.versionService.GetLatestRevision(packageId, versionName) + if err != nil { + RespondWithError(w, "Failed to get version", err) + return + } + versionName = view.MakeVersionRefKey(versionName, latestRevision) + } + buildConfig := view.BuildConfig{ + PackageId: packageId, + Version: versionName, + BuildType: buildType, + Format: format, + CreatedBy: ctx.GetUserId(), + ApiType: apiType, + GroupName: groupName, + } + + if reCalculate { + buildId, err := t.buildService.CreateBuildWithoutDependencies(buildConfig, clientBuild, builderId) + if err != nil { + RespondWithError(w, "Failed to create documentGroup type build", err) + return + } + RespondWithJson(w, http.StatusCreated, view.DocumentTransformConfigView{ + PackageId: buildConfig.PackageId, + Version: buildConfig.Version, + ApiType: buildConfig.ApiType, + GroupName: buildConfig.GroupName, + BuildType: buildConfig.BuildType, + Format: buildConfig.Format, + CreatedBy: buildConfig.CreatedBy, + BuildId: buildId, + }) + return + } + + content, err := t.versionService.GetTransformedDocuments_deprecated(packageId, versionName, apiType, groupName, string(view.JsonDocumentFormat)) + if err != nil { + RespondWithError(w, "Failed to get transformed documents", err) + return + } + if content != nil { + w.WriteHeader(http.StatusOK) + return + } + + searchRequest := view.DocumentGroupBuildSearchRequest{ + PackageId: packageId, + Version: versionName, + BuildType: buildType, + Format: format, + ApiType: apiType, + GroupName: groupName, + } + var calculationProcessStatus view.CalculationProcessStatus + buildView, err := t.buildService.GetBuildViewByDocumentGroupSearchQuery(searchRequest) + if err != nil { + if customError, ok := err.(*exception.CustomError); ok { + if customError.Status == http.StatusNotFound { + + buildId, err := t.buildService.CreateBuildWithoutDependencies(buildConfig, clientBuild, builderId) + if err != nil { + RespondWithError(w, "Failed to create documentGroup type build", err) + return + } + RespondWithJson(w, http.StatusCreated, view.DocumentTransformConfigView{ + PackageId: buildConfig.PackageId, + Version: buildConfig.Version, + ApiType: buildConfig.ApiType, + GroupName: buildConfig.GroupName, + BuildType: buildConfig.BuildType, + Format: buildConfig.Format, + CreatedBy: buildConfig.CreatedBy, + BuildId: buildId, + }) + return + } + } + RespondWithError(w, "Failed to get buildStatus", err) + return + } + switch buildView.Status { + case string(view.StatusError): + calculationProcessStatus = view.CalculationProcessStatus{ + Status: string(view.StatusError), + Message: buildView.Details, + } + RespondWithJson(w, http.StatusAccepted, calculationProcessStatus) + return + case string(view.StatusComplete): + //this case is possible only if we have an old finished build for which we don't have a transformed documents (rebuild required) + //or if this build completed during this method execution (rebuild is not requried) + content, err := t.versionService.GetTransformedDocuments_deprecated(packageId, versionName, apiType, groupName, string(view.JsonDocumentFormat)) + if err != nil { + RespondWithError(w, "Failed to get transformed documents", err) + return + } + if content != nil { + w.WriteHeader(http.StatusOK) + return + } + buildId, err := t.buildService.CreateBuildWithoutDependencies(buildConfig, clientBuild, builderId) + if err != nil { + RespondWithError(w, "Failed to create documentGroup type build", err) + return + } + RespondWithJson(w, http.StatusCreated, view.DocumentTransformConfigView{ + PackageId: buildConfig.PackageId, + Version: buildConfig.Version, + ApiType: buildConfig.ApiType, + GroupName: buildConfig.GroupName, + BuildType: buildConfig.BuildType, + Format: buildConfig.Format, + CreatedBy: buildConfig.CreatedBy, + BuildId: buildId, + }) + return + default: + calculationProcessStatus = view.CalculationProcessStatus{ + Status: string(view.StatusRunning), + } + RespondWithJson(w, http.StatusAccepted, calculationProcessStatus) + return + } +} + +func (t transformationControllerImpl) GetDataForDocumentsTransformation(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := t.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + limit, customError := getLimitQueryParam(r) + if customError != nil { + RespondWithCustomError(w, customError) + return + } + page := 0 + if r.URL.Query().Get("page") != "" { + page, err = strconv.Atoi(r.URL.Query().Get("page")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "page", "type": "int"}, + Debug: err.Error(), + }) + return + } + } + apiType, err := getUnescapedStringParam(r, "apiType") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "apiType"}, + Debug: err.Error(), + }) + return + } + _, err = view.ParseApiType(apiType) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameterValue, + Message: exception.InvalidParameterValueMsg, + Params: map[string]interface{}{"param": "apiType", "value": apiType}, + Debug: err.Error(), + }) + return + } + groupName, err := getUnescapedStringParam(r, "groupName") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "groupName"}, + Debug: err.Error(), + }) + return + } + + documentsForTransformationFilterReq := view.DocumentsForTransformationFilterReq{ + Limit: limit, + Offset: limit * page, + FilterByOperationGroup: groupName, + ApiType: apiType, + } + + data, err := t.transformationService.GetDataForDocumentsTransformation(packageId, versionName, documentsForTransformationFilterReq) + if err != nil { + RespondWithError(w, "Failed to get version documents", err) + return + } + RespondWithJson(w, http.StatusOK, data) +} diff --git a/qubership-apihub-service/controller/TransitionController.go b/qubership-apihub-service/controller/TransitionController.go new file mode 100644 index 0000000..49e2dba --- /dev/null +++ b/qubership-apihub-service/controller/TransitionController.go @@ -0,0 +1,184 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package controller + +import ( + "encoding/json" + "io/ioutil" + "net/http" + "strconv" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type TransitionController interface { + MoveOrRenamePackage(w http.ResponseWriter, r *http.Request) + GetMoveStatus(w http.ResponseWriter, r *http.Request) + ListActivities(w http.ResponseWriter, r *http.Request) + ListPackageTransitions(w http.ResponseWriter, r *http.Request) +} + +func NewTransitionController(tService service.TransitionService, isSysadmFunc func(context.SecurityContext) bool) TransitionController { + return &transitionControllerImpl{ + tService: tService, + isSysadmFunc: isSysadmFunc, + } +} + +type transitionControllerImpl struct { + tService service.TransitionService + isSysadmFunc func(context.SecurityContext) bool +} + +func (t transitionControllerImpl) MoveOrRenamePackage(w http.ResponseWriter, r *http.Request) { + ctx := context.Create(r) + if !t.isSysadmFunc(ctx) { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + defer r.Body.Close() + body, err := ioutil.ReadAll(r.Body) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + + var transitionReq view.TransitionRequest + err = json.Unmarshal(body, &transitionReq) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + validationErr := utils.ValidateObject(transitionReq) + if validationErr != nil { + if customError, ok := validationErr.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + return + } + } + + id, err := t.tService.MoveOrRenamePackage(ctx, transitionReq.From, transitionReq.To, transitionReq.OverwriteHistory) + if err != nil { + RespondWithError(w, "Failed to move or rename package", err) + return + } + result := map[string]interface{}{} + result["id"] = id + RespondWithJson(w, http.StatusOK, result) +} + +func (t transitionControllerImpl) GetMoveStatus(w http.ResponseWriter, r *http.Request) { + ctx := context.Create(r) + if !t.isSysadmFunc(ctx) { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + + id := getStringParam(r, "id") + + status, err := t.tService.GetMoveStatus(id) + if err != nil { + RespondWithError(w, "Failed to get transition status", err) + return + } + RespondWithJson(w, http.StatusOK, status) +} + +func (t transitionControllerImpl) ListActivities(w http.ResponseWriter, r *http.Request) { + ctx := context.Create(r) + if !t.isSysadmFunc(ctx) { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + var offset int + if r.URL.Query().Get("offset") != "" { + var err error + offset, err = strconv.Atoi(r.URL.Query().Get("offset")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "offset", "type": "int"}, + Debug: err.Error(), + }) + } + if offset < 0 { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameterValue, + Message: exception.InvalidParameterValueMsg, + Params: map[string]interface{}{"value": offset, "param": "offset"}, + }) + } + } + + limit, customErr := getLimitQueryParam(r) + if customErr != nil { + RespondWithCustomError(w, customErr) + return + } + + list, err := t.tService.ListCompletedActivities(offset, limit) + if err != nil { + RespondWithError(w, "Failed to list transition activities", err) + return + } + RespondWithJson(w, http.StatusOK, list) +} + +func (t transitionControllerImpl) ListPackageTransitions(w http.ResponseWriter, r *http.Request) { + ctx := context.Create(r) + if !t.isSysadmFunc(ctx) { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + list, err := t.tService.ListPackageTransitions() + if err != nil { + RespondWithError(w, "Failed to list package transitions", err) + return + } + RespondWithJson(w, http.StatusOK, list) +} diff --git a/qubership-apihub-service/controller/UserController.go b/qubership-apihub-service/controller/UserController.go new file mode 100644 index 0000000..b3055dd --- /dev/null +++ b/qubership-apihub-service/controller/UserController.go @@ -0,0 +1,238 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package controller + +import ( + "encoding/json" + "io/ioutil" + "net/http" + "net/url" + "strconv" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type UserController interface { + GetUserAvatar(w http.ResponseWriter, r *http.Request) + GetUsers(w http.ResponseWriter, r *http.Request) + GetUserById(w http.ResponseWriter, r *http.Request) + CreateInternalUser(w http.ResponseWriter, r *http.Request) + CreatePrivatePackageForUser(w http.ResponseWriter, r *http.Request) + CreatePrivateUserPackage(w http.ResponseWriter, r *http.Request) + GetPrivateUserPackage(w http.ResponseWriter, r *http.Request) +} + +func NewUserController(service service.UserService, privateUserPackageService service.PrivateUserPackageService, isSysadm func(context.SecurityContext) bool) UserController { + return &userControllerImpl{ + service: service, + privateUserPackageService: privateUserPackageService, + isSysadm: isSysadm, + } +} + +type userControllerImpl struct { + service service.UserService + privateUserPackageService service.PrivateUserPackageService + isSysadm func(context.SecurityContext) bool +} + +func (u userControllerImpl) GetUserAvatar(w http.ResponseWriter, r *http.Request) { + userId := getStringParam(r, "userId") + if userId == "" { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.EmptyParameter, + Message: exception.EmptyParameterMsg, + Params: map[string]interface{}{"param": "userId"}, + }) + } + userAvatar, err := u.service.GetUserAvatar(userId) + if err != nil { + RespondWithError(w, "Failed to get user avatar", err) + return + } + if userAvatar == nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.UserAvatarNotFound, + Message: exception.UserAvatarNotFoundMsg, + Params: map[string]interface{}{"userId": userId}, + }) + return + } + + w.Header().Set("Content-Disposition", "filename=\""+"image"+"\"") + w.Header().Set("Content-Type", "image/png") // TODO: what if avatar is not png? + w.Header().Set("Content-Length", string(rune(len(userAvatar.Avatar)))) + w.Write(userAvatar.Avatar) +} + +func (u userControllerImpl) GetUsers(w http.ResponseWriter, r *http.Request) { + var err error + limit, customError := getLimitQueryParam(r) + if customError != nil { + RespondWithCustomError(w, customError) + return + } + + page := 0 + if r.URL.Query().Get("page") != "" { + page, err = strconv.Atoi(r.URL.Query().Get("page")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "page", "type": "int"}, + Debug: err.Error(), + }) + return + } + } + filter, err := url.QueryUnescape(r.URL.Query().Get("filter")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "filter"}, + Debug: err.Error(), + }) + return + } + + usersListReq := view.UsersListReq{ + Filter: filter, + Limit: limit, + Page: page, + } + users, err := u.service.GetUsers(usersListReq) + if err != nil { + RespondWithError(w, "Failed to get users", err) + return + } + RespondWithJson(w, http.StatusOK, users) +} + +func (u userControllerImpl) GetUserById(w http.ResponseWriter, r *http.Request) { + userId := getStringParam(r, "userId") + + user, err := u.service.GetUserFromDB(userId) + if err != nil { + RespondWithError(w, "Failed to get user", err) + return + } + if user == nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.UserNotFound, + Message: exception.UserNotFoundMsg, + Params: map[string]interface{}{"userId": userId}, + }) + return + } + RespondWithJson(w, http.StatusOK, user) +} + +func (u userControllerImpl) CreateInternalUser(w http.ResponseWriter, r *http.Request) { + defer r.Body.Close() + body, err := ioutil.ReadAll(r.Body) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + var internalUser view.InternalUser + err = json.Unmarshal(body, &internalUser) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + validationErr := utils.ValidateObject(internalUser) + if validationErr != nil { + if customError, ok := validationErr.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + return + } + } + + user, err := u.service.CreateInternalUser(&internalUser) + if err != nil { + RespondWithError(w, "Failed to create internal user", err) + return + } + RespondWithJson(w, http.StatusCreated, user) +} + +func (u userControllerImpl) CreatePrivatePackageForUser(w http.ResponseWriter, r *http.Request) { + userId := getStringParam(r, "userId") + ctx := context.Create(r) + if userId != ctx.GetUserId() { + if !u.isSysadm(ctx) { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + Debug: "only sysadmin can create private package for another user", + }) + return + } + } + packageView, err := u.privateUserPackageService.CreatePrivateUserPackage(ctx, userId) + if err != nil { + RespondWithError(w, "Failed to create private package for user", err) + return + } + RespondWithJson(w, http.StatusCreated, packageView) +} + +func (u userControllerImpl) CreatePrivateUserPackage(w http.ResponseWriter, r *http.Request) { + ctx := context.Create(r) + packageView, err := u.privateUserPackageService.CreatePrivateUserPackage(ctx, ctx.GetUserId()) + if err != nil { + RespondWithError(w, "Failed to create private user package", err) + return + } + RespondWithJson(w, http.StatusCreated, packageView) +} + +func (u userControllerImpl) GetPrivateUserPackage(w http.ResponseWriter, r *http.Request) { + packageView, err := u.privateUserPackageService.GetPrivateUserPackage(context.Create(r).GetUserId()) + if err != nil { + if customError, ok := err.(*exception.CustomError); ok { + if customError.Code == exception.PrivateWorkspaceIdDoesntExist { + // do not use respondWithError because it prints annoying(and useless in this case) logs + RespondWithCustomError(w, customError) + return + } + } + RespondWithError(w, "Failed to get private user package", err) + return + } + RespondWithJson(w, http.StatusOK, packageView) +} diff --git a/qubership-apihub-service/controller/VersionController.go b/qubership-apihub-service/controller/VersionController.go new file mode 100644 index 0000000..3b51404 --- /dev/null +++ b/qubership-apihub-service/controller/VersionController.go @@ -0,0 +1,1619 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package controller + +import ( + "encoding/json" + "fmt" + "io" + "io/ioutil" + "net/http" + "net/url" + "strconv" + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/metrics" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + log "github.com/sirupsen/logrus" +) + +type VersionController interface { + GetPackageVersionContent_deprecated(w http.ResponseWriter, r *http.Request) + GetPackageVersionContent(w http.ResponseWriter, r *http.Request) + GetPackageVersionsList_deprecated(w http.ResponseWriter, r *http.Request) + GetPackageVersionsList(w http.ResponseWriter, r *http.Request) + DeleteVersion(w http.ResponseWriter, r *http.Request) + PatchVersion(w http.ResponseWriter, r *http.Request) + GetVersionedContentFileRaw(w http.ResponseWriter, r *http.Request) + GetVersionedDocument_deprecated(w http.ResponseWriter, r *http.Request) + GetVersionedDocument(w http.ResponseWriter, r *http.Request) + GetVersionDocuments(w http.ResponseWriter, r *http.Request) + GetSharedContentFile(w http.ResponseWriter, r *http.Request) + SharePublishedFile(w http.ResponseWriter, r *http.Request) + GetVersionChanges(w http.ResponseWriter, r *http.Request) + GetVersionProblems(w http.ResponseWriter, r *http.Request) + GetVersionReferences(w http.ResponseWriter, r *http.Request) //deprecated + GetVersionReferencesV3(w http.ResponseWriter, r *http.Request) + GetVersionRevisionsList_deprecated(w http.ResponseWriter, r *http.Request) + GetVersionRevisionsList(w http.ResponseWriter, r *http.Request) + DeleteVersionsRecursively(w http.ResponseWriter, r *http.Request) + CopyVersion(w http.ResponseWriter, r *http.Request) + GetPublishedVersionsHistory(w http.ResponseWriter, r *http.Request) + PublishFromCSV(w http.ResponseWriter, r *http.Request) + GetCSVDashboardPublishStatus(w http.ResponseWriter, r *http.Request) + GetCSVDashboardPublishReport(w http.ResponseWriter, r *http.Request) +} + +func NewVersionController(versionService service.VersionService, roleService service.RoleService, monitoringService service.MonitoringService, + ptHandler service.PackageTransitionHandler, isSysadm func(context.SecurityContext) bool) VersionController { + return &versionControllerImpl{ + versionService: versionService, + roleService: roleService, + monitoringService: monitoringService, + ptHandler: ptHandler, + isSysadm: isSysadm, + } +} + +type versionControllerImpl struct { + versionService service.VersionService + roleService service.RoleService + monitoringService service.MonitoringService + ptHandler service.PackageTransitionHandler + isSysadm func(context.SecurityContext) bool +} + +func (v versionControllerImpl) SharePublishedFile(w http.ResponseWriter, r *http.Request) { + defer r.Body.Close() + body, err := ioutil.ReadAll(r.Body) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + var sharedFilesReq view.SharedFilesReq + err = json.Unmarshal(body, &sharedFilesReq) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + validationErr := utils.ValidateObject(sharedFilesReq) + if validationErr != nil { + if customError, ok := validationErr.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + return + } + } + ctx := context.Create(r) + sufficientPrivileges, err := v.roleService.HasRequiredPermissions(ctx, sharedFilesReq.PackageId, view.ReadPermission) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + sharedUrlInfo, err := v.versionService.SharePublishedFile(sharedFilesReq.PackageId, sharedFilesReq.Version, sharedFilesReq.Slug) + if err != nil { + RespondWithError(w, "Failed to create shared URL for content", err) + return + } + RespondWithJson(w, http.StatusOK, sharedUrlInfo) +} + +func (v versionControllerImpl) GetSharedContentFile(w http.ResponseWriter, r *http.Request) { + sharedFileId := getStringParam(r, "sharedFileId") + + contentData, attachmentFileName, err := v.versionService.GetSharedFile(sharedFileId) + if err != nil { + RespondWithError(w, "Failed to get published content by shared ID", err) + return + } + w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", attachmentFileName)) + w.Header().Set("Content-Type", "text/plain") // For frontend it's convenient to get all types as plain text + w.WriteHeader(http.StatusOK) + w.Write(contentData) +} + +// deprecated +func (v versionControllerImpl) GetVersionedDocument_deprecated(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := v.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, v.ptHandler, packageId, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + slug := getStringParam(r, "slug") + + v.monitoringService.AddDocumentOpenCount(packageId, versionName, slug) + v.monitoringService.IncreaseBusinessMetricCounter(ctx.GetUserId(), metrics.DocumentsCalled, packageId) + + document, err := v.versionService.GetLatestDocumentBySlug_deprecated(packageId, versionName, slug) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, v.ptHandler, packageId, "Failed to get versioned document", err) + return + } + RespondWithJson(w, http.StatusOK, document) +} + +func (v versionControllerImpl) GetVersionedDocument(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := v.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, v.ptHandler, packageId, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + slug := getStringParam(r, "slug") + + v.monitoringService.AddDocumentOpenCount(packageId, versionName, slug) + v.monitoringService.IncreaseBusinessMetricCounter(ctx.GetUserId(), metrics.DocumentsCalled, packageId) + + document, err := v.versionService.GetLatestDocumentBySlug(packageId, versionName, slug) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, v.ptHandler, packageId, "Failed to get versioned document", err) + return + } + RespondWithJson(w, http.StatusOK, document) +} + +func (v versionControllerImpl) GetVersionDocuments(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := v.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, v.ptHandler, packageId, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + limit, customError := getLimitQueryParam(r) + if customError != nil { + RespondWithCustomError(w, customError) + return + } + page := 0 + if r.URL.Query().Get("page") != "" { + page, err = strconv.Atoi(r.URL.Query().Get("page")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "page", "type": "int"}, + Debug: err.Error(), + }) + return + } + } + textFilter, err := url.QueryUnescape(r.URL.Query().Get("textFilter")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "textFilter"}, + Debug: err.Error(), + }) + return + } + + apiType := r.URL.Query().Get("apiType") + if apiType != "" { + _, err = view.ParseApiType(apiType) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameterValue, + Message: exception.InvalidParameterValueMsg, + Params: map[string]interface{}{"param": "apiType", "value": apiType}, + Debug: err.Error(), + }) + return + } + } + + skipRefs := false + if r.URL.Query().Get("skipRefs") != "" { + skipRefs, err = strconv.ParseBool(r.URL.Query().Get("skipRefs")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "skipRefs", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + + versionDocumentsFilterReq := view.DocumentsFilterReq{ + Limit: limit, + Offset: limit * page, + TextFilter: textFilter, + ApiType: apiType, + } + + documents, err := v.versionService.GetLatestDocuments(packageId, versionName, skipRefs, versionDocumentsFilterReq) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, v.ptHandler, packageId, "Failed to get version documents", err) + return + } + RespondWithJson(w, http.StatusOK, documents) +} + +func (v versionControllerImpl) DeleteVersion(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + versionStatus, err := v.versionService.GetVersionStatus(packageId, versionName) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, v.ptHandler, packageId, "Failed to check user privileges(get version status)", err) + return + } + sufficientPrivileges, err := v.roleService.HasManageVersionPermission(ctx, packageId, versionStatus) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, v.ptHandler, packageId, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + err = v.versionService.DeleteVersion(ctx, packageId, versionName) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, v.ptHandler, packageId, "Failed to delete package version", err) + return + } + w.WriteHeader(http.StatusNoContent) +} + +func (v versionControllerImpl) PatchVersion(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + + defer r.Body.Close() + body, err := ioutil.ReadAll(r.Body) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + var req view.VersionPatchRequest + err = json.Unmarshal(body, &req) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + + if req.Status == nil && req.VersionLabels == nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: "All patch parameters are null which is not allowed", + }) + return + } + + statuses := make([]string, 0) + if req.Status != nil { + _, err := view.ParseVersionStatus(*req.Status) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameter, + Message: err.Error(), + }) + return + } + statuses = append(statuses, *req.Status) + } + + if req.VersionLabels != nil { + versionStatus, err := v.versionService.GetVersionStatus(packageId, versionName) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, v.ptHandler, packageId, "Failed to check user privileges (get version status)", err) + return + } + statuses = append(statuses, versionStatus) + } + sufficientPrivileges, err := v.roleService.HasManageVersionPermission(ctx, packageId, statuses...) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, v.ptHandler, packageId, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + + content, err := v.versionService.PatchVersion(context.Create(r), packageId, versionName, req.Status, req.VersionLabels) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, v.ptHandler, packageId, "Failed to patch version", err) + return + } + + RespondWithJson(w, http.StatusOK, content) +} + +func (v versionControllerImpl) GetPackageVersionsList_deprecated(w http.ResponseWriter, r *http.Request) { + var err error + + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := v.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, v.ptHandler, packageId, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + status, err := url.QueryUnescape(r.URL.Query().Get("status")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "status"}, + Debug: err.Error(), + }) + return + } + + limit, customError := getLimitQueryParam(r) + if customError != nil { + RespondWithCustomError(w, customError) + return + } + + page := 0 + if r.URL.Query().Get("page") != "" { + page, err = strconv.Atoi(r.URL.Query().Get("page")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "page", "type": "int"}, + Debug: err.Error(), + }) + return + } + } + + textFilter, err := url.QueryUnescape(r.URL.Query().Get("textFilter")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "textFilter"}, + Debug: err.Error(), + }) + return + } + + versionLabel, err := url.QueryUnescape(r.URL.Query().Get("versionLabel")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "versionLabel"}, + Debug: err.Error(), + }) + return + } + + checkRevisions := false + if r.URL.Query().Get("checkRevisions") != "" { + checkRevisions, err = strconv.ParseBool(r.URL.Query().Get("checkRevisions")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "checkRevisions", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + sortBy := r.URL.Query().Get("sortBy") + if sortBy == "" { + sortBy = view.VersionSortByVersion + } + sortOrder := r.URL.Query().Get("sortOrder") + if sortOrder == "" { + sortOrder = view.VersionSortOrderDesc + } + + versionListReq := view.VersionListReq{ + PackageId: packageId, + Status: status, + Limit: limit, + Page: page, + TextFilter: textFilter, + SortBy: sortBy, + SortOrder: sortOrder, + Label: versionLabel, + CheckRevisions: checkRevisions, + } + + versions, err := v.versionService.GetPackageVersionsView_deprecated(versionListReq) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, v.ptHandler, packageId, "Failed to get package versions", err) + return + } + RespondWithJson(w, http.StatusOK, versions) +} +func (v versionControllerImpl) GetPackageVersionsList(w http.ResponseWriter, r *http.Request) { + var err error + + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := v.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, v.ptHandler, packageId, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + status := r.URL.Query().Get("status") + + limit, customError := getLimitQueryParam(r) + if customError != nil { + RespondWithCustomError(w, customError) + return + } + + page := 0 + if r.URL.Query().Get("page") != "" { + page, err = strconv.Atoi(r.URL.Query().Get("page")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "page", "type": "int"}, + Debug: err.Error(), + }) + return + } + } + + textFilter := r.URL.Query().Get("textFilter") + versionLabel := r.URL.Query().Get("versionLabel") + sortBy := r.URL.Query().Get("sortBy") + if sortBy == "" { + sortBy = view.VersionSortByVersion + } + sortOrder := r.URL.Query().Get("sortOrder") + if sortOrder == "" { + sortOrder = view.VersionSortOrderDesc + } + + checkRevisions := false + if r.URL.Query().Get("checkRevisions") != "" { + checkRevisions, err = strconv.ParseBool(r.URL.Query().Get("checkRevisions")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "checkRevisions", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + + versionListReq := view.VersionListReq{ + PackageId: packageId, + Status: status, + Limit: limit, + Page: page, + TextFilter: textFilter, + Label: versionLabel, + CheckRevisions: checkRevisions, + SortBy: sortBy, + SortOrder: sortOrder, + } + + versions, err := v.versionService.GetPackageVersionsView(versionListReq) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, v.ptHandler, packageId, "Failed to get package versions", err) + return + } + RespondWithJson(w, http.StatusOK, versions) +} + +func (v versionControllerImpl) GetPackageVersionContent_deprecated(w http.ResponseWriter, r *http.Request) { + var err error + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := v.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, v.ptHandler, packageId, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + + version, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + + includeSummary := false + if r.URL.Query().Get("includeSummary") != "" { + includeSummary, err = strconv.ParseBool(r.URL.Query().Get("includeSummary")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "includeSummary", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + + includeOperations := false + if r.URL.Query().Get("includeOperations") != "" { + includeOperations, err = strconv.ParseBool(r.URL.Query().Get("includeOperations")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "includeOperations", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + + includeGroups := false + if r.URL.Query().Get("includeGroups") != "" { + includeGroups, err = strconv.ParseBool(r.URL.Query().Get("includeGroups")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "includeGroups", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + v.monitoringService.AddVersionOpenCount(packageId, version) + + content, err := v.versionService.GetPackageVersionContent_deprecated(packageId, version, includeSummary, includeOperations, includeGroups) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, v.ptHandler, packageId, "Failed to get package version content", err) + return + } + + RespondWithJson(w, http.StatusOK, content) +} + +func (v versionControllerImpl) GetPackageVersionContent(w http.ResponseWriter, r *http.Request) { + var err error + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := v.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, v.ptHandler, packageId, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + + version, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + + includeSummary := false + if r.URL.Query().Get("includeSummary") != "" { + includeSummary, err = strconv.ParseBool(r.URL.Query().Get("includeSummary")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "includeSummary", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + + includeOperations := false + if r.URL.Query().Get("includeOperations") != "" { + includeOperations, err = strconv.ParseBool(r.URL.Query().Get("includeOperations")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "includeOperations", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + + includeGroups := false + if r.URL.Query().Get("includeGroups") != "" { + includeGroups, err = strconv.ParseBool(r.URL.Query().Get("includeGroups")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "includeGroups", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + v.monitoringService.AddVersionOpenCount(packageId, version) + + content, err := v.versionService.GetPackageVersionContent(packageId, version, includeSummary, includeOperations, includeGroups) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, v.ptHandler, packageId, "Failed to get package version content", err) + return + } + + RespondWithJson(w, http.StatusOK, content) +} + +func (v versionControllerImpl) GetVersionedContentFileRaw(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := v.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, v.ptHandler, packageId, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + slug := getStringParam(r, "slug") + + v.monitoringService.AddDocumentOpenCount(packageId, versionName, slug) + v.monitoringService.IncreaseBusinessMetricCounter(ctx.GetUserId(), metrics.DocumentsCalled, packageId) + + _, contentData, err := v.versionService.GetLatestContentDataBySlug(packageId, versionName, slug) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, v.ptHandler, packageId, "Failed to get published content", err) + return + } + w.Header().Set("Content-Type", contentData.DataType) + w.WriteHeader(http.StatusOK) + w.Write(contentData.Data) +} + +func (v versionControllerImpl) GetVersionChanges(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := v.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, v.ptHandler, packageId, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + changes, err := v.versionService.GetVersionValidationChanges(packageId, versionName) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, v.ptHandler, packageId, "Failed to get version changes", err) + return + } + + RespondWithJson(w, http.StatusOK, changes) +} + +func (v versionControllerImpl) GetVersionProblems(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := v.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, v.ptHandler, packageId, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + problems, err := v.versionService.GetVersionValidationProblems(packageId, versionName) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, v.ptHandler, packageId, "Failed to get version problems", err) + return + } + + RespondWithJson(w, http.StatusOK, problems) +} + +// deprecated +func (v versionControllerImpl) GetVersionReferences(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := v.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, v.ptHandler, packageId, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + limit, customError := getLimitQueryParam(r) + if customError != nil { + RespondWithCustomError(w, customError) + return + } + page := 0 + if r.URL.Query().Get("page") != "" { + page, err = strconv.Atoi(r.URL.Query().Get("page")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "page", "type": "int"}, + Debug: err.Error(), + }) + return + } + } + textFilter, err := url.QueryUnescape(r.URL.Query().Get("textFilter")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "textFilter"}, + Debug: err.Error(), + }) + return + } + kind, err := url.QueryUnescape(r.URL.Query().Get("kind")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "kind"}, + Debug: err.Error(), + }) + return + } + showAllDescendants := false + if r.URL.Query().Get("showAllDescendants") != "" { + showAllDescendants, err = strconv.ParseBool(r.URL.Query().Get("showAllDescendants")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "showAllDescendants", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + + versionReferencesFilterReq := view.VersionReferencesReq{ + Limit: limit, + Page: page, + TextFilter: textFilter, + Kind: kind, + ShowAllDescendants: showAllDescendants, + } + + references, err := v.versionService.GetVersionReferences(packageId, versionName, versionReferencesFilterReq) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, v.ptHandler, packageId, "Failed to get version references", err) + return + } + RespondWithJson(w, http.StatusOK, references) +} + +func (v versionControllerImpl) GetVersionReferencesV3(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := v.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, v.ptHandler, packageId, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + + references, err := v.versionService.GetVersionReferencesV3(packageId, versionName) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, v.ptHandler, packageId, "Failed to get version references", err) + return + } + RespondWithJson(w, http.StatusOK, references) +} + +func (v versionControllerImpl) GetVersionRevisionsList_deprecated(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := v.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, v.ptHandler, packageId, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + limit, customError := getLimitQueryParam(r) + if customError != nil { + RespondWithCustomError(w, customError) + return + } + page := 0 + if r.URL.Query().Get("page") != "" { + page, err = strconv.Atoi(r.URL.Query().Get("page")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "page", "type": "int"}, + Debug: err.Error(), + }) + return + } + } + textFilter, err := url.QueryUnescape(r.URL.Query().Get("textFilter")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "textFilter"}, + Debug: err.Error(), + }) + return + } + + pagingFilter := view.PagingFilterReq{ + TextFilter: textFilter, + Limit: limit, + Offset: limit * page, + } + versionRevisionsList, err := v.versionService.GetVersionRevisionsList_deprecated(packageId, versionName, pagingFilter) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, v.ptHandler, packageId, "Failed to get version revisions list", err) + return + } + RespondWithJson(w, http.StatusOK, versionRevisionsList) +} +func (v versionControllerImpl) GetVersionRevisionsList(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := v.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, v.ptHandler, packageId, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + versionName, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + limit, customError := getLimitQueryParam(r) + if customError != nil { + RespondWithCustomError(w, customError) + return + } + page := 0 + if r.URL.Query().Get("page") != "" { + page, err = strconv.Atoi(r.URL.Query().Get("page")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "page", "type": "int"}, + Debug: err.Error(), + }) + return + } + } + textFilter, err := url.QueryUnescape(r.URL.Query().Get("textFilter")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "textFilter"}, + Debug: err.Error(), + }) + return + } + + pagingFilter := view.PagingFilterReq{ + TextFilter: textFilter, + Limit: limit, + Offset: limit * page, + } + versionRevisionsList, err := v.versionService.GetVersionRevisionsList(packageId, versionName, pagingFilter) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, v.ptHandler, packageId, "Failed to get version revisions list", err) + return + } + RespondWithJson(w, http.StatusOK, versionRevisionsList) +} + +func (v versionControllerImpl) DeleteVersionsRecursively(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := v.roleService.HasRequiredPermissions(ctx, packageId, view.ManageDraftVersionPermission) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, v.ptHandler, packageId, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + + defer r.Body.Close() + body, err := ioutil.ReadAll(r.Body) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + var req view.DeleteVersionsRecursivelyReq + err = json.Unmarshal(body, &req) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + + id, err := v.versionService.DeleteVersionsRecursively(ctx, packageId, req.OlderThanDate) + if err != nil { + RespondWithError(w, "failed to cleanup old versions", err) + return + } + RespondWithJson(w, http.StatusOK, map[string]string{"jobId": id}) +} + +func (v versionControllerImpl) CopyVersion(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + version, err := getUnescapedStringParam(r, "version") + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidURLEscape, + Message: exception.InvalidURLEscapeMsg, + Params: map[string]interface{}{"param": "version"}, + Debug: err.Error(), + }) + return + } + ctx := context.Create(r) + sufficientPrivileges, err := v.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, v.ptHandler, packageId, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + + defer r.Body.Close() + body, err := io.ReadAll(r.Body) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + var req view.CopyVersionReq + err = json.Unmarshal(body, &req) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + validationErr := utils.ValidateObject(req) + if validationErr != nil { + if customError, ok := validationErr.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + return + } + } + _, err = view.ParseVersionStatus(req.TargetStatus) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameter, + Message: err.Error(), + }) + return + } + sufficientPrivileges, err = v.roleService.HasManageVersionPermission(ctx, req.TargetPackageId, req.TargetStatus) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + + publishId, err := v.versionService.CopyVersion(ctx, packageId, version, req) + if err != nil { + RespondWithError(w, "Failed to copy published version", err) + return + } + RespondWithJson(w, http.StatusAccepted, view.CopyVersionResp{PublishId: publishId}) +} + +func (v versionControllerImpl) GetPublishedVersionsHistory(w http.ResponseWriter, r *http.Request) { + ctx := context.Create(r) + if !v.isSysadm(ctx) { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + var err error + limit, customError := getLimitQueryParam(r) + if customError != nil { + RespondWithCustomError(w, customError) + return + } + page := 0 + if r.URL.Query().Get("page") != "" { + page, err = strconv.Atoi(r.URL.Query().Get("page")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "page", "type": "int"}, + Debug: err.Error(), + }) + return + } + } + filter := view.PublishedVersionHistoryFilter{ + Limit: limit, + Page: page, + } + if r.URL.Query().Get("publishedBefore") != "" { + publishedBefore, err := time.Parse(time.RFC3339, r.URL.Query().Get("publishedBefore")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "publishedBefore", "type": "time"}, + Debug: err.Error(), + }) + return + } + filter.PublishedBefore = &publishedBefore + } + if r.URL.Query().Get("publishedAfter") != "" { + publishedAfter, err := time.Parse(time.RFC3339, r.URL.Query().Get("publishedAfter")) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "publishedAfter", "type": "time"}, + Debug: err.Error(), + }) + return + } + filter.PublishedAfter = &publishedAfter + } + status := r.URL.Query().Get("status") + if status != "" { + filter.Status = &status + } + + history, err := v.versionService.GetPublishedVersionsHistory(filter) + if err != nil { + RespondWithError(w, "Failed to get published versions history", err) + return + } + RespondWithJson(w, http.StatusOK, history) +} + +func (v versionControllerImpl) PublishFromCSV(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + ctx := context.Create(r) + sufficientPrivileges, err := v.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + handlePkgRedirectOrRespondWithError(w, r, v.ptHandler, packageId, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + + err = r.ParseMultipartForm(0) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + defer func() { + err := r.MultipartForm.RemoveAll() + if err != nil { + log.Debugf("failed to remove temporary data: %+v", err) + } + }() + csvPublishReq := view.PublishFromCSVReq{} + csvPublishReq.PackageId = packageId + csvPublishReq.Version = r.FormValue("version") + csvPublishReq.ServicesWorkspaceId = r.FormValue("servicesWorkspaceId") + csvPublishReq.PreviousVersion = r.FormValue("previousVersion") + csvPublishReq.PreviousVersionPackageId = r.FormValue("previousVersionPackageId") + csvPublishReq.Status = r.FormValue("status") + versionLabelsArrStr := r.FormValue("versionLabels") + if versionLabelsArrStr != "" { + err = json.Unmarshal([]byte(versionLabelsArrStr), &csvPublishReq.VersionLabels) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: fmt.Sprintf("failed to unmarshal versionLabels field: %v", err.Error()), + }) + return + } + } + csvFile, _, err := r.FormFile("csvFile") + if err != http.ErrMissingFile { + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectMultipartFile, + Message: exception.IncorrectMultipartFileMsg, + Debug: err.Error()}) + return + } + csvData, err := io.ReadAll(csvFile) + closeErr := csvFile.Close() + if closeErr != nil { + log.Errorf("failed to close temporary file: %+v", err) + } + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectMultipartFile, + Message: exception.IncorrectMultipartFileMsg, + Debug: err.Error()}) + return + } + csvPublishReq.CSVData = csvData + } else if r.FormValue("csvFile") != "" { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidMultipartFileType, + Message: exception.InvalidMultipartFileTypeMsg, + Params: map[string]interface{}{"field": "csvFile"}, + }) + return + } + validationErr := utils.ValidateObject(csvPublishReq) + if validationErr != nil { + if customError, ok := validationErr.(*exception.CustomError); ok { + RespondWithCustomError(w, customError) + return + } + } + + _, err = view.ParseVersionStatus(csvPublishReq.Status) + if err != nil { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameter, + Message: err.Error(), + }) + return + } + sufficientPrivileges, err = v.roleService.HasManageVersionPermission(ctx, csvPublishReq.PackageId, csvPublishReq.Status) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + + publishId, err := v.versionService.StartPublishFromCSV(ctx, csvPublishReq) + if err != nil { + RespondWithError(w, "Failed to start dashboard publish from csv", err) + return + } + RespondWithJson(w, http.StatusAccepted, view.PublishFromCSVResp{PublishId: publishId}) +} + +func (v versionControllerImpl) GetCSVDashboardPublishStatus(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + publishId := getStringParam(r, "publishId") + ctx := context.Create(r) + sufficientPrivileges, err := v.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + + publishStatus, err := v.versionService.GetCSVDashboardPublishStatus(publishId) + if err != nil { + RespondWithError(w, "Failed to get publish status", err) + return + } + RespondWithJson(w, http.StatusOK, publishStatus) +} + +func (v versionControllerImpl) GetCSVDashboardPublishReport(w http.ResponseWriter, r *http.Request) { + packageId := getStringParam(r, "packageId") + publishId := getStringParam(r, "publishId") + ctx := context.Create(r) + sufficientPrivileges, err := v.roleService.HasRequiredPermissions(ctx, packageId, view.ReadPermission) + if err != nil { + RespondWithError(w, "Failed to check user privileges", err) + return + } + if !sufficientPrivileges { + RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + + publishReport, err := v.versionService.GetCSVDashboardPublishReport(publishId) + if err != nil { + RespondWithError(w, "Failed to get publish report", err) + return + } + w.Header().Set("Content-Type", "text/csv") + w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=publish_report_%v.csv", time.Now().Format("2006-01-02 15-04-05"))) + w.Header().Set("Expires", "0") + w.WriteHeader(http.StatusOK) + w.Write(publishReport) +} diff --git a/qubership-apihub-service/crypto/Hash.go b/qubership-apihub-service/crypto/Hash.go new file mode 100644 index 0000000..5f0a35d --- /dev/null +++ b/qubership-apihub-service/crypto/Hash.go @@ -0,0 +1,32 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package crypto + +import ( + "crypto/rand" + "crypto/sha256" + "encoding/hex" +) + +func CreateRandomHash() string { + bytes := make([]byte, 32) //32 symbols + rand.Read(bytes) + return hex.EncodeToString(bytes[:]) +} + +func CreateSHA256Hash(data []byte) string { + hash := sha256.Sum256(data) + return hex.EncodeToString(hash[:]) +} diff --git a/qubership-apihub-service/db/ConnectionProvider.go b/qubership-apihub-service/db/ConnectionProvider.go new file mode 100644 index 0000000..7f0ce62 --- /dev/null +++ b/qubership-apihub-service/db/ConnectionProvider.go @@ -0,0 +1,90 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package db + +import ( + "fmt" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + "github.com/go-pg/pg/v10" +) + +type ConnectionProvider interface { + GetConnection() *pg.DB +} + +type connectionProviderImpl struct { + creds view.DbCredentials + db *pg.DB +} + +func NewConnectionProvider(creds *view.DbCredentials) ConnectionProvider { + return &connectionProviderImpl{creds: *creds} +} + +func (c *connectionProviderImpl) GetConnection() *pg.DB { + if c.db == nil { + c.db = pg.Connect(&pg.Options{ + Addr: fmt.Sprintf("%s:%d", c.creds.Host, c.creds.Port), + User: c.creds.Username, + Password: c.creds.Password, + Database: c.creds.Database, + PoolSize: 50, + MaxRetries: 5, + }) + } + //c.db.AddQueryHook(&dbLogger{cpi: c}) + return c.db +} + +/* +type dbLogger struct { + cpi *connectionProviderImpl +} + +func (d dbLogger) BeforeQuery(ctx context.Context, q *pg.QueryEvent) (context.Context, error) { + return ctx, nil +} + +func (d dbLogger) AfterQuery(ctx context.Context, q *pg.QueryEvent) error { + if query, _ := q.FormattedQuery(); bytes.Compare(query, []byte("SELECT 1")) != 0 { + log.Trace(string(query)) + } + + if q.Err != nil && strings.Contains(q.Err.Error(), "Conn is in a bad state") { + if d.cpi != nil { + if d.cpi.conn != nil { + err := d.cpi.conn.Close() + if err != nil { + log.Errorf("Failed to close conn for bad state: %s", err) + } + } + if d.cpi.db != nil { + err := d.cpi.db.Close() + if err != nil { + log.Errorf("Failed to close DB for bad state: %s", err) + } + } + } + d.cpi.db = nil + d.cpi.conn = nil + } + + // for dev purposes + //queryStr, _ := q.FormattedQuery() + //log.Infof("DB query: %s", queryStr) + + return nil +}*/ diff --git a/qubership-apihub-service/entity/ActivityTrackingEntity.go b/qubership-apihub-service/entity/ActivityTrackingEntity.go new file mode 100644 index 0000000..2af4f54 --- /dev/null +++ b/qubership-apihub-service/entity/ActivityTrackingEntity.go @@ -0,0 +1,94 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package entity + +import ( + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + "github.com/google/uuid" +) + +type ActivityTrackingEntity struct { + tableName struct{} `pg:"activity_tracking"` + + Id string `pg:"id, pk, type:varchar"` + Type string `pg:"e_type, type:varchar"` + Data map[string]interface{} `pg:"data, type:jsonb"` + PackageId string `pg:"package_id, type:varchar"` + Date time.Time `pg:"date, type:varchar"` + UserId string `pg:"user_id, type:timestamp without time zone"` +} + +type EnrichedActivityTrackingEntity_deprecated struct { + tableName struct{} `pg:"select:activity_tracking,alias:at"` + + ActivityTrackingEntity + PackageName string `pg:"pkg_name, type:varchar"` + PackageKind string `pg:"pkg_kind, type:varchar"` + UserName string `pg:"usr_name, type:varchar"` + NotLatestRevision bool `pg:"not_latest_revision, type:bool"` +} + +type EnrichedActivityTrackingEntity struct { + tableName struct{} `pg:"select:activity_tracking,alias:at"` + + ActivityTrackingEntity + PrincipalEntity + PackageName string `pg:"pkg_name, type:varchar"` + PackageKind string `pg:"pkg_kind, type:varchar"` + NotLatestRevision bool `pg:"not_latest_revision, type:bool"` +} + +func MakeActivityTrackingEventEntity(event view.ActivityTrackingEvent) ActivityTrackingEntity { + return ActivityTrackingEntity{ + Id: uuid.New().String(), + Type: string(event.Type), + Data: event.Data, + PackageId: event.PackageId, + Date: event.Date, + UserId: event.UserId, + } +} + +func MakeActivityTrackingEventView_depracated(ent EnrichedActivityTrackingEntity_deprecated) view.PkgActivityResponseItem_depracated { + return view.PkgActivityResponseItem_depracated{ + PackageName: ent.PackageName, + PackageKind: ent.PackageKind, + UserName: ent.UserName, + ActivityTrackingEvent: view.ActivityTrackingEvent{ + Type: view.ATEventType(ent.Type), + Data: ent.Data, + PackageId: ent.PackageId, + Date: ent.Date, + UserId: ent.UserId, + }, + } + +} +func MakeActivityTrackingEventView(ent EnrichedActivityTrackingEntity) view.PkgActivityResponseItem { + return view.PkgActivityResponseItem{ + PackageName: ent.PackageName, + PackageKind: ent.PackageKind, + Principal: *MakePrincipalView(&ent.PrincipalEntity), + ActivityTrackingEvent: view.ActivityTrackingEvent{ + Type: view.ATEventType(ent.Type), + Data: ent.Data, + PackageId: ent.PackageId, + Date: ent.Date, + UserId: ent.UserId, + }, + } +} diff --git a/qubership-apihub-service/entity/AgentEntity.go b/qubership-apihub-service/entity/AgentEntity.go new file mode 100644 index 0000000..e1736ff --- /dev/null +++ b/qubership-apihub-service/entity/AgentEntity.go @@ -0,0 +1,57 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package entity + +import ( + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type AgentEntity struct { + tableName struct{} `pg:"agent"` + + AgentId string `pg:"agent_id, pk, type:varchar"` + Cloud string `pg:"cloud, type:varchar"` + Namespace string `pg:"namespace, type:varchar"` + Url string `pg:"url, type:varchar"` + BackendVersion string `pg:"backend_version, type:varchar"` + LastActive time.Time `pg:"last_active, type:timestamp without time zone"` + Name string `pg:"name, type:varchar"` + AgentVersion string `pg:"agent_version, type:varchar"` +} + +func MakeAgentView(ent AgentEntity) view.AgentInstance { + status := view.AgentStatusActive + if time.Since(ent.LastActive) > time.Second*60 { + status = view.AgentStatusInactive + } + name := ent.Name + if name == "" { + name = ent.Namespace + "." + ent.Cloud + } + + return view.AgentInstance{ + AgentId: ent.AgentId, + AgentDeploymentCloud: ent.Cloud, + AgentDeploymentNamespace: ent.Namespace, + AgentUrl: ent.Url, + LastActive: ent.LastActive, + Status: status, + BackendVersion: ent.BackendVersion, + Name: name, + AgentVersion: ent.AgentVersion, + } +} diff --git a/qubership-apihub-service/entity/ApiKeyEntity.go b/qubership-apihub-service/entity/ApiKeyEntity.go new file mode 100644 index 0000000..282ea18 --- /dev/null +++ b/qubership-apihub-service/entity/ApiKeyEntity.go @@ -0,0 +1,34 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package entity + +import ( + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type ApiKeyEntity struct { + tableName struct{} `pg:"user_integration"` + + Integration view.GitIntegrationType `pg:"integration_type, pk, type:varchar"` + UserId string `pg:"user_id, pk, type:varchar"` + AccessToken string `pg:"key, type:varchar"` + RefreshToken string `pg:"refresh_token, type:varchar"` + FailedRefreshAttempts int `pg:"failed_refresh_attempts, type:integer, use_zero"` + ExpiresAt time.Time `pg:"expires_at, type:timestamp without time zone"` + RedirectUri string `pg:"redirect_uri, type:varchar"` + IsRevoked bool `pg:"is_revoked, type:boolean, use_zero"` +} diff --git a/qubership-apihub-service/entity/ApihubApiKeyEntity.go b/qubership-apihub-service/entity/ApihubApiKeyEntity.go new file mode 100644 index 0000000..6b44a50 --- /dev/null +++ b/qubership-apihub-service/entity/ApihubApiKeyEntity.go @@ -0,0 +1,157 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package entity + +import ( + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type ApihubApiKeyEntity_deprecated struct { + tableName struct{} `pg:"apihub_api_keys"` + + Id string `pg:"id, pk, type:varchar"` + PackageId string `pg:"package_id, type:varchar"` + Name string `pg:"name, type:varchar"` + CreatedBy string `pg:"created_by, type:varchar"` + CreatedAt time.Time `pg:"created_at, type:timestamp without time zone"` + DeletedBy string `pg:"deleted_by, type:varchar"` + DeletedAt *time.Time `pg:"deleted_at, type:timestamp without time zone"` + ApiKey string `pg:"api_key, type:varchar"` // hash + Roles []string `pg:"roles, type:varchar array, array"` +} +type ApihubApiKeyEntity struct { + tableName struct{} `pg:"apihub_api_keys"` + + Id string `pg:"id, pk, type:varchar"` + PackageId string `pg:"package_id, type:varchar"` + Name string `pg:"name, type:varchar"` + CreatedBy string `pg:"created_by, type:varchar"` + CreatedFor string `pg:"created_for, type:varchar"` + CreatedAt time.Time `pg:"created_at, type:timestamp without time zone"` + DeletedBy string `pg:"deleted_by, type:varchar"` + DeletedAt *time.Time `pg:"deleted_at, type:timestamp without time zone"` + ApiKey string `pg:"api_key, type:varchar"` // hash + Roles []string `pg:"roles, type:varchar array, array"` +} + +type ApihubApiKeyUserEntity_deprecated struct { + tableName struct{} `pg:"apihub_api_keys, alias:apihub_api_keys"` + + ApihubApiKeyEntity_deprecated + UserName string `pg:"user_name, type:varchar"` + UserEmail string `pg:"user_email, type:varchar"` + UserAvatarUrl string `pg:"user_avatar_url, type:varchar"` +} +type ApihubApiKeyUserEntity struct { + tableName struct{} `pg:"apihub_api_keys, alias:apihub_api_keys"` + + ApihubApiKeyEntity + UserName string `pg:"user_name, type:varchar"` + UserEmail string `pg:"user_email, type:varchar"` + UserAvatarUrl string `pg:"user_avatar_url, type:varchar"` + CreatedForUserName string `pg:"created_for_user_name, type:varchar"` + CreatedForUserEmail string `pg:"created_for_user_email, type:varchar"` + CreatedForUserAvatarUrl string `pg:"created_for_user_avatar_url, type:varchar"` +} + +func MakeApihubApiKeyView_deprecated(entity ApihubApiKeyEntity_deprecated) *view.ApihubApiKey_deprecated { + return &view.ApihubApiKey_deprecated{ + Id: entity.Id, + PackageId: entity.PackageId, + Name: entity.Name, + CreatedBy: entity.CreatedBy, + CreatedAt: entity.CreatedAt, + DeletedBy: entity.DeletedBy, + DeletedAt: entity.DeletedAt, + Roles: entity.Roles, + } +} + +func MakeApihubApiKeyView_v3_deprecated(entity ApihubApiKeyUserEntity_deprecated) *view.ApihubApiKey_v3_deprecated { + return &view.ApihubApiKey_v3_deprecated{ + Id: entity.Id, + PackageId: entity.PackageId, + Name: entity.Name, + CreatedBy: view.User{ + Id: entity.CreatedBy, + Name: entity.UserName, + Email: entity.UserEmail, + AvatarUrl: entity.UserAvatarUrl, + }, + CreatedAt: entity.CreatedAt, + DeletedBy: entity.DeletedBy, + DeletedAt: entity.DeletedAt, + Roles: entity.Roles, + } +} + +func MakeApihubApiKeyView(entity ApihubApiKeyUserEntity) *view.ApihubApiKey { + return &view.ApihubApiKey{ + Id: entity.Id, + PackageId: entity.PackageId, + Name: entity.Name, + CreatedBy: view.User{ + Id: entity.CreatedBy, + Name: entity.UserName, + Email: entity.UserEmail, + AvatarUrl: entity.UserAvatarUrl, + }, + CreatedFor: &view.User{ + Id: entity.CreatedFor, + Name: entity.CreatedForUserName, + Email: entity.CreatedForUserEmail, + AvatarUrl: entity.CreatedForUserAvatarUrl, + }, + CreatedAt: entity.CreatedAt, + DeletedBy: entity.DeletedBy, + DeletedAt: entity.DeletedAt, + Roles: entity.Roles, + } +} + +func MakeApihubApiKeyEntity_deprecated(apihubApiKeyView view.ApihubApiKey_deprecated, apiKey string) *ApihubApiKeyEntity_deprecated { + return &ApihubApiKeyEntity_deprecated{ + Id: apihubApiKeyView.Id, + PackageId: apihubApiKeyView.PackageId, + Name: apihubApiKeyView.Name, + CreatedBy: apihubApiKeyView.CreatedBy, + CreatedAt: apihubApiKeyView.CreatedAt, + DeletedBy: apihubApiKeyView.DeletedBy, + DeletedAt: apihubApiKeyView.DeletedAt, + ApiKey: apiKey, + Roles: apihubApiKeyView.Roles, + } +} + +func MakeApihubApiKeyEntity(apihubApiKeyView view.ApihubApiKey, apiKey string) *ApihubApiKeyEntity { + createdForId := "" + if apihubApiKeyView.CreatedFor != nil { + createdForId = apihubApiKeyView.CreatedFor.Id + } + return &ApihubApiKeyEntity{ + Id: apihubApiKeyView.Id, + PackageId: apihubApiKeyView.PackageId, + Name: apihubApiKeyView.Name, + CreatedBy: apihubApiKeyView.CreatedBy.Id, + CreatedFor: createdForId, + CreatedAt: apihubApiKeyView.CreatedAt, + DeletedBy: apihubApiKeyView.DeletedBy, + DeletedAt: apihubApiKeyView.DeletedAt, + ApiKey: apiKey, + Roles: apihubApiKeyView.Roles, + } +} diff --git a/qubership-apihub-service/entity/BranchDraftEntity.go b/qubership-apihub-service/entity/BranchDraftEntity.go new file mode 100644 index 0000000..0b88925 --- /dev/null +++ b/qubership-apihub-service/entity/BranchDraftEntity.go @@ -0,0 +1,26 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package entity + +type BranchDraftEntity struct { + tableName struct{} `pg:"drafted_branches"` + + ProjectId string `pg:"project_id, pk, type:varchar"` + BranchName string `pg:"branch_name, pk, type:varchar"` + ChangeType string `pg:"change_type, type:varchar"` + OriginalConfig []byte `pg:"original_config, type:bytea"` + Editors []string `pg:"editors, array, type:varchar[]"` + CommitId string `pg:"commit_id, type:varchar"` +} diff --git a/qubership-apihub-service/entity/BuildCleanupEntity.go b/qubership-apihub-service/entity/BuildCleanupEntity.go new file mode 100644 index 0000000..5b4ba4f --- /dev/null +++ b/qubership-apihub-service/entity/BuildCleanupEntity.go @@ -0,0 +1,38 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package entity + +import "time" + +type BuildCleanupEntity struct { + tableName struct{} `pg:"build_cleanup_run"` + + RunId int `pg:"run_id, pk, type:integer"` + DeletedRows int `pg:"deleted_rows, type:integer"` + ScheduledAt time.Time `pg:"scheduled_at, type:timestamp without time zone"` + + BuildResult int `pg:"build_result, type:integer"` + BuildSrc int `pg:"build_src, type:integer"` + OperationData int `pg:"operation_data, type:integer"` + TsOperationData int `pg:"ts_operation_data, type:integer"` + TsRestOperationData int `pg:"ts_rest_operation_data, type:integer"` + TsGQLOperationData int `pg:"ts_gql_operation_data, type:integer"` +} + +type BuildIdEntity struct { + tableName struct{} `pg:"build"` + + Id string `pg:"build_id, type:varchar"` +} diff --git a/qubership-apihub-service/entity/BuildEntity.go b/qubership-apihub-service/entity/BuildEntity.go new file mode 100644 index 0000000..4efd1c6 --- /dev/null +++ b/qubership-apihub-service/entity/BuildEntity.go @@ -0,0 +1,93 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package entity + +import ( + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type BuildEntity struct { + tableName struct{} `pg:"build"` + + BuildId string `pg:"build_id, pk, type:varchar"` + Status string `pg:"status, type:varchar"` + Details string `pg:"details, type:varchar"` + ClientBuild bool `pg:"client_build, type:boolean, use_zero"` + + PackageId string `pg:"package_id, type:varchar"` + Version string `pg:"version, type:varchar"` + + CreatedAt *time.Time `pg:"created_at, type:timestamp without time zone, default:now()"` + LastActive *time.Time `pg:"last_active, type:timestamp without time zone, default:now()"` + CreatedBy string `pg:"created_by, type:varchar"` + + StartedAt *time.Time `pg:"started_at, type:timestamp without time zone"` + + RestartCount int `pg:"restart_count, type:integer, use_zero"` + + BuilderId string `pg:"builder_id, type:varchar"` + Priority int `pg:"priority, type:integer, use_zero"` + Metadata map[string]interface{} `pg:"metadata, type:jsonb"` +} + +type BuildSourceEntity struct { + tableName struct{} `pg:"build_src"` + + BuildId string `pg:"build_id, pk, type:varchar"` + Source []byte `pg:"source, type:bytea"` + Config map[string]interface{} `pg:"config, type:jsonb"` +} + +type BuildDependencyEntity struct { + tableName struct{} `pg:"build_depends"` + + BuildId string `pg:"build_id, type:varchar"` + DependId string `pg:"depend_id, type:varchar"` +} + +type ChangelogBuildSearchQueryEntity struct { + PackageId string `pg:"package_id, type:varchar, use_zero"` + Version string `pg:"version, type:varchar, use_zero"` + PreviousVersionPackageId string `pg:"previous_version_package_id, type:varchar, use_zero"` + PreviousVersion string `pg:"previous_version, type:varchar, use_zero"` + BuildType string `pg:"build_type, type:varchar, use_zero"` + ComparisonRevision int `pg:"comparison_revision, type:integer, use_zero"` + ComparisonPrevRevision int `pg:"comparison_prev_revision, type:integer, use_zero"` +} + +type DocumentGroupBuildSearchQueryEntity struct { + PackageId string `pg:"package_id, type:varchar, use_zero"` + Version string `pg:"version, type:varchar, use_zero"` + BuildType string `pg:"build_type, type:varchar, use_zero"` + Format string `pg:"format, type:varchar, use_zero"` + ApiType string `pg:"api_type, type:varchar, use_zero"` + GroupName string `pg:"group_name, type:varchar, use_zero"` +} + +func MakeBuildView(buildEnt *BuildEntity) *view.BuildView { + return &view.BuildView{ + PackageId: buildEnt.PackageId, + Version: buildEnt.Version, + BuildId: buildEnt.BuildId, + Status: buildEnt.Status, + Details: buildEnt.Details, + CreatedAt: *buildEnt.CreatedAt, + LastActive: *buildEnt.LastActive, + CreatedBy: buildEnt.CreatedBy, + RestartCount: buildEnt.RestartCount, + } +} diff --git a/qubership-apihub-service/entity/BuildResultEntity.go b/qubership-apihub-service/entity/BuildResultEntity.go new file mode 100644 index 0000000..dcd211f --- /dev/null +++ b/qubership-apihub-service/entity/BuildResultEntity.go @@ -0,0 +1,22 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package entity + +type BuildResultEntity struct { + tableName struct{} `pg:"build_result"` + + BuildId string `pg:"build_id, pk, type:varchar"` + Data []byte `pg:"data, type:bytea"` +} diff --git a/qubership-apihub-service/entity/BuilderNotificationsEntity.go b/qubership-apihub-service/entity/BuilderNotificationsEntity.go new file mode 100644 index 0000000..ea6234d --- /dev/null +++ b/qubership-apihub-service/entity/BuilderNotificationsEntity.go @@ -0,0 +1,24 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package entity + +type BuilderNotificationsEntity struct { + tableName struct{} `pg:"builder_notifications"` + + BuildId string `pg:"build_id, type:varchar"` + Severity int `pg:"severity, type:integer"` + Message string `pg:"message, type:varchar"` + FileId string `pg:"file_id, type:varchar"` +} diff --git a/qubership-apihub-service/entity/BusinessMetric.go b/qubership-apihub-service/entity/BusinessMetric.go new file mode 100644 index 0000000..741da9c --- /dev/null +++ b/qubership-apihub-service/entity/BusinessMetric.go @@ -0,0 +1,35 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package entity + +import "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + +type BusinessMetricEntity struct { + Date string `pg:"date, type:varchar"` + PackageId string `pg:"package_id, type:varchar"` + Metric string `pg:"metric, type:varchar"` + Username string `pg:"username, type:varchar"` + Value int `pg:"value, type:integer"` +} + +func MakeBusinessMetricView(ent BusinessMetricEntity) view.BusinessMetric { + return view.BusinessMetric{ + Date: ent.Date, + PackageId: ent.PackageId, + Username: ent.Username, + Metric: ent.Metric, + Value: ent.Value, + } +} diff --git a/qubership-apihub-service/entity/Changes.go b/qubership-apihub-service/entity/Changes.go new file mode 100644 index 0000000..9d1b2fb --- /dev/null +++ b/qubership-apihub-service/entity/Changes.go @@ -0,0 +1,437 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package entity + +import ( + "encoding/json" + "reflect" +) + +func (s Metadata) GetChanges(t Metadata) map[string]interface{} { + changes := make(map[string]interface{}, 0) + for key, sVal := range s { + if key == BUILDER_VERSION_KEY { + continue + } + tVal, exists := t[key] + if !exists { + changes[key] = map[string]interface{}{ + "old": sVal, + } + continue + } + //custom []string array handling + if sArrVal, ok := sVal.([]interface{}); ok { + equal := true + if tArrVal, ok := tVal.([]string); ok { + if len(sArrVal) == len(tArrVal) { + for i, sEl := range sArrVal { + if sStrVal, isStr := sEl.(string); isStr { + if tArrVal[i] != sStrVal { + equal = false + break + } + } else { + equal = false + break + } + } + } else { + equal = false + } + } else { + equal = false + } + if equal { + continue + } + } + if !reflect.DeepEqual(sVal, tVal) { + changes[key] = map[string]interface{}{ + "old": sVal, + "new": tVal, + } + } + } + return changes +} + +func (s PublishedVersionEntity) GetChanges(t PublishedVersionEntity) map[string]interface{} { + changes := make(map[string]interface{}, 0) + if s.PreviousVersion != t.PreviousVersion { + changes["PreviousVersion"] = map[string]interface{}{ + "old": s.PreviousVersion, + "new": t.PreviousVersion, + } + } + if s.PreviousVersionPackageId != t.PreviousVersionPackageId { + changes["PreviousVersionPackageId"] = map[string]interface{}{ + "old": s.PreviousVersionPackageId, + "new": t.PreviousVersionPackageId, + } + } + if s.Status != t.Status { + changes["Status"] = map[string]interface{}{ + "old": s.Status, + "new": t.Status, + } + } + if s.PublishedAt != t.PublishedAt { + changes["PublishedAt"] = map[string]interface{}{ + "old": s.PublishedAt, + "new": t.PublishedAt, + } + } + if s.DeletedAt != t.DeletedAt { + changes["DeletedAt"] = map[string]interface{}{ + "old": s.DeletedAt, + "new": t.DeletedAt, + } + } + if s.DeletedBy != t.DeletedBy { + changes["DeletedBy"] = map[string]interface{}{ + "old": s.DeletedBy, + "new": t.DeletedBy, + } + } + if metadataChanges := s.Metadata.GetChanges(t.Metadata); len(metadataChanges) > 0 { + changes["Metadata"] = metadataChanges + } + if (len(s.Labels) != 0 || len(t.Labels) != 0) && + !reflect.DeepEqual(s.Labels, t.Labels) { + changes["Labels"] = map[string]interface{}{ + "old": s.Labels, + "new": t.Labels, + } + } + if s.CreatedBy != t.CreatedBy { + changes["CreatedBy"] = map[string]interface{}{ + "old": s.CreatedBy, + "new": t.CreatedBy, + } + } + return changes +} + +func (s PublishedContentEntity) GetChanges(t PublishedContentEntity) map[string]interface{} { + changes := make(map[string]interface{}, 0) + if s.Checksum != t.Checksum { + changes["Checksum"] = map[string]interface{}{ + "old": s.Checksum, + "new": t.Checksum, + } + } + if s.Index != t.Index { + changes["Index"] = map[string]interface{}{ + "old": s.Index, + "new": t.Index, + } + } + if s.Slug != t.Slug { + changes["Slug"] = map[string]interface{}{ + "old": s.Slug, + "new": t.Slug, + } + } + if s.Name != t.Name { + changes["Name"] = map[string]interface{}{ + "old": s.Name, + "new": t.Name, + } + } + if s.Path != t.Path { + changes["Path"] = map[string]interface{}{ + "old": s.Path, + "new": t.Path, + } + } + if s.DataType != t.DataType { + changes["DataType"] = map[string]interface{}{ + "old": s.DataType, + "new": t.DataType, + } + } + if s.Format != t.Format { + changes["Format"] = map[string]interface{}{ + "old": s.Format, + "new": t.Format, + } + } + if s.Title != t.Title { + changes["Title"] = map[string]interface{}{ + "old": s.Title, + "new": t.Title, + } + } + if metadataChanges := s.Metadata.GetChanges(t.Metadata); len(metadataChanges) > 0 { + changes["Metadata"] = metadataChanges + } + if (len(s.OperationIds) != 0 || len(t.OperationIds) != 0) && + !reflect.DeepEqual(s.OperationIds, t.OperationIds) { + changes["OperationIds"] = map[string]interface{}{ + "old": s.OperationIds, + "new": t.OperationIds, + } + } + if s.ReferenceId != t.ReferenceId { + changes["ReferenceId"] = map[string]interface{}{ + "old": s.ReferenceId, + "new": t.ReferenceId, + } + } + if s.Filename != t.Filename { + changes["Filename"] = map[string]interface{}{ + "old": s.Filename, + "new": t.Filename, + } + } + return changes +} + +func (s PublishedReferenceEntity) GetChanges(t PublishedReferenceEntity) map[string]interface{} { + changes := make(map[string]interface{}, 0) + if s.Excluded != t.Excluded { + changes["Excluded"] = map[string]interface{}{ + "old": s.Excluded, + "new": t.Excluded, + } + } + return changes +} + +func (s PublishedSrcEntity) GetChanges(t PublishedSrcEntity) map[string]interface{} { + changes := make(map[string]interface{}, 0) + if s.ArchiveChecksum != t.ArchiveChecksum { + changes["ArchiveChecksum"] = map[string]interface{}{ + "old": s.ArchiveChecksum, + "new": t.ArchiveChecksum, + } + } + return changes +} + +func (s OperationEntity) GetChanges(t OperationEntity) map[string]interface{} { + changes := make(map[string]interface{}, 0) + if s.DataHash != t.DataHash { + changes["DataHash"] = map[string]interface{}{ + "old": s.DataHash, + "new": t.DataHash, + } + } + if s.Kind != t.Kind { + changes["Kind"] = map[string]interface{}{ + "old": s.Kind, + "new": t.Kind, + } + } + if s.Title != t.Title { + changes["Title"] = map[string]interface{}{ + "old": s.Title, + "new": t.Title, + } + } + if metadataChanges := s.Metadata.GetChanges(t.Metadata); len(metadataChanges) > 0 { + changes["Metadata"] = metadataChanges + } + if s.Type != t.Type { + changes["Type"] = map[string]interface{}{ + "old": s.Type, + "new": t.Type, + } + } + // if (len(s.DeprecatedItems) != 0 || len(t.DeprecatedItems) != 0) && + // !reflect.DeepEqual(s.DeprecatedItems, t.DeprecatedItems) { + // changes["DeprecatedItems"] = "DeprecatedItems field has changed" + // } + if (len(s.DeprecatedInfo) != 0 || len(t.DeprecatedInfo) != 0) && + !reflect.DeepEqual(s.DeprecatedInfo, t.DeprecatedInfo) { + changes["DeprecatedInfo"] = "DeprecatedInfo field has changed" + } + if (len(s.PreviousReleaseVersions) != 0 || len(t.PreviousReleaseVersions) != 0) && + !reflect.DeepEqual(s.PreviousReleaseVersions, t.PreviousReleaseVersions) { + changes["PreviousReleaseVersions"] = map[string]interface{}{ + "old": s.PreviousReleaseVersions, + "new": t.PreviousReleaseVersions, + } + } + if (len(s.Models) != 0 || len(t.Models) != 0) && + !reflect.DeepEqual(s.Models, t.Models) { + changes["Models"] = map[string]interface{}{ + "old": s.Models, + "new": t.Models, + } + } + if (len(s.CustomTags) != 0 || len(t.CustomTags) != 0) && + !reflect.DeepEqual(s.CustomTags, t.CustomTags) { + changes["CustomTags"] = map[string]interface{}{ + "old": s.CustomTags, + "new": t.CustomTags, + } + } + if s.ApiAudience != t.ApiAudience { + changes["ApiAudience"] = map[string]interface{}{ + "old": s.ApiAudience, + "new": t.ApiAudience, + } + } + return changes +} + +func (s OperationDataEntity) GetChanges(t OperationDataEntity) map[string]interface{} { + changes := make(map[string]interface{}, 0) + searchScopeChanged := false + if len(s.SearchScope) != 0 || len(t.SearchScope) != 0 { + for sKey, sVal := range s.SearchScope { + if tVal, exists := t.SearchScope[sKey]; exists { + if sValStr, isSValStr := sVal.(string); isSValStr { + if tValStr, isTValStr := tVal.(string); isTValStr { + if sValStr != tValStr { + searchScopeChanged = true + break + } + } else { + searchScopeChanged = true + break + } + } else { + if !reflect.DeepEqual(sVal, tVal) { + searchScopeChanged = true + break + } + } + } else { + searchScopeChanged = true + break + } + } + } + if searchScopeChanged { + // CreateVersionWithData operation_data insert depends on this value (for migration only) + changes["SearchScope"] = "SearchScope field has changed" + } + return changes +} + +func (s VersionComparisonEntity) GetChanges(t VersionComparisonEntity) map[string]interface{} { + changes := make(map[string]interface{}, 0) + if (len(s.Refs) != 0 || len(t.Refs) != 0) && + !reflect.DeepEqual(s.Refs, t.Refs) { + changes["Refs"] = map[string]interface{}{ + "old": s.Refs, + "new": t.Refs, + } + } + matchedOperationTypes := make(map[string]struct{}, 0) + for _, sOperationType := range s.OperationTypes { + found := false + for _, tOperationType := range t.OperationTypes { + if sOperationType.ApiType == tOperationType.ApiType { + operationTypeChanges := make(map[string]interface{}, 0) + found = true + matchedOperationTypes[sOperationType.ApiType] = struct{}{} + if sOperationType.ChangesSummary.GetTotalSummary() != tOperationType.ChangesSummary.GetTotalSummary() { + operationTypeChanges["TotalChangesSummary"] = map[string]interface{}{ + "old": sOperationType.ChangesSummary.GetTotalSummary(), + "new": tOperationType.ChangesSummary.GetTotalSummary(), + } + } + if sOperationType.NumberOfImpactedOperations.GetTotalSummary() != tOperationType.NumberOfImpactedOperations.GetTotalSummary() { + operationTypeChanges["TotalNumberOfImpactedOperations"] = map[string]interface{}{ + "old": sOperationType.NumberOfImpactedOperations.GetTotalSummary(), + "new": tOperationType.NumberOfImpactedOperations.GetTotalSummary(), + } + } + if !reflect.DeepEqual(sOperationType.ChangesSummary, tOperationType.ChangesSummary) { + operationTypeChanges["ChangesSummary"] = map[string]interface{}{ + "old": sOperationType.ChangesSummary, + "new": tOperationType.ChangesSummary, + } + } + if !reflect.DeepEqual(sOperationType.NumberOfImpactedOperations, tOperationType.NumberOfImpactedOperations) { + operationTypeChanges["NumberOfImpactedOperations"] = map[string]interface{}{ + "old": sOperationType.NumberOfImpactedOperations, + "new": tOperationType.NumberOfImpactedOperations, + } + } + if (len(sOperationType.Tags) != 0 || len(tOperationType.Tags) != 0) && + !reflect.DeepEqual(sOperationType.Tags, tOperationType.Tags) { + operationTypeChanges["Tags"] = map[string]interface{}{ + "old": sOperationType.Tags, + "new": tOperationType.Tags, + } + } + if (len(sOperationType.ApiAudienceTransitions) != 0 || len(tOperationType.ApiAudienceTransitions) != 0) && + !reflect.DeepEqual(sOperationType.ApiAudienceTransitions, tOperationType.ApiAudienceTransitions) { + changes["ApiAudienceTransitions"] = map[string]interface{}{ + "old": sOperationType.ApiAudienceTransitions, + "new": tOperationType.ApiAudienceTransitions, + } + } + if len(operationTypeChanges) > 0 { + changes[sOperationType.ApiType] = operationTypeChanges + } + } + } + if !found { + changes[sOperationType.ApiType] = "comparison operation type not found in build archive" + } + } + for _, tOperationType := range t.OperationTypes { + if _, matched := matchedOperationTypes[tOperationType.ApiType]; !matched { + changes[tOperationType.ApiType] = "unexpected comparison operation type (not found in database)" + } + } + return changes +} + +func (s OperationComparisonEntity) GetChanges(t OperationComparisonEntity) map[string]interface{} { + changes := make(map[string]interface{}, 0) + if s.DataHash != t.DataHash { + changes["DataHash"] = map[string]interface{}{ + "old": s.DataHash, + "new": t.DataHash, + } + } + if s.PreviousDataHash != t.PreviousDataHash { + changes["PreviousDataHash"] = map[string]interface{}{ + "old": s.PreviousDataHash, + "new": t.PreviousDataHash, + } + } + if s.ChangesSummary.GetTotalSummary() != t.ChangesSummary.GetTotalSummary() { + changes["TotalChangesSummary"] = map[string]interface{}{ + "old": s.ChangesSummary.GetTotalSummary(), + "new": t.ChangesSummary.GetTotalSummary(), + } + } + if !reflect.DeepEqual(s.ChangesSummary, t.ChangesSummary) { + changes["ChangesSummary"] = map[string]interface{}{ + "old": s.ChangesSummary, + "new": t.ChangesSummary, + } + } + + if len(s.Changes) != 0 || len(t.Changes) != 0 { + sChanges := s.Changes + var tChanges map[string]interface{} + inrec, _ := json.Marshal(t.Changes) + json.Unmarshal(inrec, &tChanges) + if !reflect.DeepEqual(sChanges, tChanges) { + changes["Changes"] = "Changes field has changed" + } + } + + return changes +} diff --git a/qubership-apihub-service/entity/ContentValidation.go b/qubership-apihub-service/entity/ContentValidation.go new file mode 100644 index 0000000..6a4a262 --- /dev/null +++ b/qubership-apihub-service/entity/ContentValidation.go @@ -0,0 +1,30 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package entity + +type PublishedContentMessageEntity struct { + tableName struct{} `pg:"published_content_messages"` + + //todo add packageId to correctly link this table via FK with published_data table + Checksum string `pg:"checksum, pk, type:varchar"` + Messages []ContentValidationMsg `pg:"messages, type:jsonb"` + Slug string `pg:"-"` +} + +type ContentValidationMsg struct { + Type string + Path []string + Text string +} diff --git a/qubership-apihub-service/entity/DraftEntities.go b/qubership-apihub-service/entity/DraftEntities.go new file mode 100644 index 0000000..a8ce70d --- /dev/null +++ b/qubership-apihub-service/entity/DraftEntities.go @@ -0,0 +1,133 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package entity + +import "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + +type ContentDraftEntity struct { + tableName struct{} `pg:"branch_draft_content"` + + ProjectId string `pg:"project_id, pk, type:varchar"` + BranchName string `pg:"branch_name, pk, type:varchar"` + FileId string `pg:"file_id, pk, type:varchar"` + Index int `pg:"index, use_zero, type:integer"` + Name string `pg:"name, type:varchar"` + Path string `pg:"path, type:varchar"` + Publish bool `pg:"publish, type:boolean, use_zero"` + DataType string `pg:"data_type, type:varchar"` + Data []byte `pg:"data, type:bytea"` + MediaType string `pg:"media_type, type:varchar"` + Status string `pg:"status, type:varchar"` + LastStatus string `pg:"last_status, type:varchar"` + ConflictedBlobId string `pg:"conflicted_blob_id, type:varchar"` + ConflictedFileId string `pg:"conflicted_file_id, type:varchar"` + MovedFrom string `pg:"moved_from, type:varchar"` + BlobId string `pg:"blob_id, type:varchar"` + Labels []string `pg:"labels, array, type:varchar[]"` + Included bool `pg:"included, type:boolean, use_zero"` + FromFolder bool `pg:"from_folder, type:boolean, use_zero"` + IsFolder bool `pg:"is_folder, type:boolean, use_zero"` +} + +type BranchRefDraftEntity struct { + tableName struct{} `pg:"branch_draft_reference"` + + ProjectId string `pg:"project_id, pk, type:varchar"` + BranchName string `pg:"branch_name, pk, type:varchar"` + RefPackageId string `pg:"reference_package_id, pk, type:varchar"` + RefVersion string `pg:"reference_version, pk, type:varchar"` + Status string `pg:"status, type:varchar"` +} + +func MakeContentView(content *ContentDraftEntity) *view.Content { + return &view.Content{ + FileId: content.FileId, + Name: content.Name, + Type: view.ParseTypeFromString(content.DataType), + Path: content.Path, + Publish: content.Publish, + Status: view.ParseFileStatus(content.Status), + LastStatus: view.ParseFileStatus(content.LastStatus), + ConflictedBlobId: content.ConflictedBlobId, + ConflictedFileId: content.ConflictedFileId, + MovedFrom: content.MovedFrom, + BlobId: content.BlobId, + Labels: content.Labels, + Included: content.Included, + FromFolder: content.FromFolder, + IsFolder: content.IsFolder, + } +} + +func MakeContentDataView(content *ContentDraftEntity) *view.ContentData { + return &view.ContentData{ + FileId: content.FileId, + Data: content.Data, + DataType: content.MediaType, + BlobId: content.BlobId, + } +} + +func MakeContentEntity(content *view.Content, index int, projectId string, branchName string, data []byte, mediaType string, status string) *ContentDraftEntity { + var resData []byte + if data != nil { + resData = data + } + + return &ContentDraftEntity{ + ProjectId: projectId, + BranchName: branchName, + FileId: content.FileId, + Index: index, + Name: content.Name, + Path: content.Path, + Publish: content.Publish, + DataType: string(content.Type), + Data: resData, + MediaType: mediaType, + Status: status, + LastStatus: string(content.LastStatus), + ConflictedBlobId: content.ConflictedBlobId, + ConflictedFileId: content.ConflictedFileId, + MovedFrom: content.MovedFrom, + BlobId: content.BlobId, + Labels: content.Labels, + Included: content.Included, + FromFolder: content.FromFolder, + IsFolder: content.IsFolder, + } +} + +func MakeRefEntity(ref *view.Ref, projectId string, branchName string, status string) *BranchRefDraftEntity { + return &BranchRefDraftEntity{ + ProjectId: projectId, + BranchName: branchName, + RefPackageId: ref.RefPackageId, + RefVersion: ref.RefPackageVersion, + Status: status, + } +} + +func MakeRefView(ref *BranchRefDraftEntity, refPackageName string, versionStatus string, kind string, isBroken bool) *view.Ref { + return &view.Ref{ + RefPackageId: ref.RefPackageId, + RefPackageVersion: ref.RefVersion, + RefPackageName: refPackageName, + VersionStatus: versionStatus, + Kind: kind, + Status: view.ParseFileStatus(ref.Status), + IsBroken: isBroken, + } +} diff --git a/qubership-apihub-service/entity/FavoritesEntities.go b/qubership-apihub-service/entity/FavoritesEntities.go new file mode 100644 index 0000000..2d3d2fa --- /dev/null +++ b/qubership-apihub-service/entity/FavoritesEntities.go @@ -0,0 +1,29 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package entity + +type FavoriteProjectEntity struct { + tableName struct{} `pg:"favorite_projects"` + + UserId string `pg:"user_id, pk, type:varchar"` + Id string `pg:"project_id, pk, type:varchar"` +} + +type FavoritePackageEntity struct { + tableName struct{} `pg:"favorite_packages"` + + UserId string `pg:"user_id, pk, type:varchar"` + Id string `pg:"package_id, pk, type:varchar"` +} diff --git a/qubership-apihub-service/entity/IntegrationEntity.go b/qubership-apihub-service/entity/IntegrationEntity.go new file mode 100644 index 0000000..a846606 --- /dev/null +++ b/qubership-apihub-service/entity/IntegrationEntity.go @@ -0,0 +1,23 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package entity + +type ExternalIdentityEntity struct { + tableName struct{} `pg:"external_identity"` + + Provider string `pg:"provider, pk, type:varchar"` + ExternalId string `pg:"external_id, pk, type:varchar"` + InternalId string `pg:"internal_id, type:varchar"` +} diff --git a/qubership-apihub-service/entity/Metadata.go b/qubership-apihub-service/entity/Metadata.go new file mode 100644 index 0000000..fffc86d --- /dev/null +++ b/qubership-apihub-service/entity/Metadata.go @@ -0,0 +1,333 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package entity + +import ( + "fmt" + "time" +) + +const COMMIT_ID_KEY = "commit_id" +const BLOB_ID_KEY = "blob_id" +const COMMIT_DATE_KEY = "commit_date" +const BRANCH_NAME_KEY = "branch_name" +const LABELS_KEY = "labels" +const REPOSITORY_URL_KEY = "repository_url" +const TITLE_KEY = "title" +const PATH_KEY = "path" +const METHOD_KEY = "method" +const TAGS_KEY = "tags" +const CLOUD_NAME_KEY = "cloud_name" +const CLOUD_URL_KEY = "cloud_url" +const NAMESPACE_KEY = "namespace" +const DESCRIPTION_KEY = "description" +const BUILDER_VERSION_KEY = "builder_version" +const TYPE_KEY = "type" +const INFO = "info" +const EXTERNAL_DOCS = "external_docs" +const VERSION = "version" +const DOC_TAGS_KEY = "tags" + +type Metadata map[string]interface{} + +func (m Metadata) GetStringValue(field string) string { + if fieldValue, ok := m[field].(string); ok { + return fieldValue + } + return "" +} + +func (m Metadata) GetIntValue(field string) int { + //parse as float64 because unmarshal reads json number as float64 + if fieldValue, ok := m[field].(float64); ok { + return int(fieldValue) + } + return 0 +} + +func (m Metadata) GetObject(field string) interface{} { + if field, ok := m[field]; ok { + return field + } + return nil +} + +func (m Metadata) GetStringArray(field string) []string { + if values, ok := m[field].([]interface{}); ok { + var valuesArr []string + for _, l := range values { + if strL, ok := l.(string); ok { + valuesArr = append(valuesArr, strL) + } + } + return valuesArr + } + return make([]string, 0) +} + +func (m Metadata) GetObjectArray(field string) ([]interface{}, error) { + if val, ok := m[field]; ok { + if values, ok := val.([]interface{}); ok { + return values, nil + } else { + return nil, fmt.Errorf("incorrect metadata value type, expecting array of objects, value: %+v", val) + } + } + return make([]interface{}, 0), nil +} + +func (m Metadata) GetMapStringToInterface(field string) (map[string]interface{}, error) { + if val, ok := m[field]; ok { + if values, ok := val.(map[string]interface{}); ok { + return values, nil + } else { + return nil, fmt.Errorf("incorrect metadata value type, expecting map string to interface, value: %+v", val) + } + } + return make(map[string]interface{}), nil +} + +func (m Metadata) SetCommitId(commitId string) { + m[COMMIT_ID_KEY] = commitId +} + +func (m Metadata) GetCommitId() string { + if commitId, ok := m[COMMIT_ID_KEY].(string); ok { + return commitId + } + return "" +} + +func (m Metadata) SetBlobId(blobId string) { + m[BLOB_ID_KEY] = blobId +} + +func (m Metadata) GetBlobId() string { + if blobId, ok := m[BLOB_ID_KEY].(string); ok { + return blobId + } + return "" +} + +func (m Metadata) SetCommitDate(commitDate time.Time) { + m[COMMIT_DATE_KEY] = commitDate +} + +func (m Metadata) GetCommitDate() time.Time { + if commitDate, ok := m[COMMIT_DATE_KEY].(time.Time); ok { + return commitDate + } + return time.Time{} +} + +func (m Metadata) SetBranchName(branchName string) { + m[BRANCH_NAME_KEY] = branchName +} + +func (m Metadata) GetBranchName() string { + if branchName, ok := m[BRANCH_NAME_KEY].(string); ok { + return branchName + } + return "" +} + +func (m Metadata) SetLabels(labels []string) { + m[LABELS_KEY] = labels +} + +func (m Metadata) GetLabels() []string { + if labels, ok := m[LABELS_KEY].([]interface{}); ok { + labelsArr := []string{} + for _, l := range labels { + labelsArr = append(labelsArr, l.(string)) + } + return labelsArr + } + return make([]string, 0) +} + +func (m Metadata) SetRepositoryUrl(repositoryUrl string) { + m[REPOSITORY_URL_KEY] = repositoryUrl +} + +func (m Metadata) GetRepositoryUrl() string { + if repositoryUrl, ok := m[REPOSITORY_URL_KEY].(string); ok { + return repositoryUrl + } + return "" +} + +func (m Metadata) SetTitle(title string) { + m[TITLE_KEY] = title +} + +func (m Metadata) GetTitle() string { + if title, ok := m[TITLE_KEY].(string); ok { + return title + } + return "" +} + +func (m Metadata) SetDescription(description string) { + m[DESCRIPTION_KEY] = description +} + +func (m Metadata) GetDescription() string { + if description, ok := m[DESCRIPTION_KEY].(string); ok { + return description + } + return "" +} + +func (m Metadata) SetPath(path string) { + m[PATH_KEY] = path +} + +func (m Metadata) GetPath() string { + if path, ok := m[PATH_KEY].(string); ok { + return path + } + return "" +} + +func (m Metadata) SetMethod(method string) { + m[METHOD_KEY] = method +} + +func (m Metadata) GetMethod() string { + if method, ok := m[METHOD_KEY].(string); ok { + return method + } + return "" +} + +func (m Metadata) SetTags(tags []string) { + m[TAGS_KEY] = tags +} + +func (m Metadata) GetTags() []string { + if tags, ok := m[TAGS_KEY].([]interface{}); ok { + tagsArr := []string{} + for _, l := range tags { + tagsArr = append(tagsArr, l.(string)) + } + return tagsArr + } + return make([]string, 0) +} + +func (m Metadata) SetCloudName(cloudName string) { + m[CLOUD_NAME_KEY] = cloudName +} + +func (m Metadata) GetCloudName() string { + if cloudName, ok := m[CLOUD_NAME_KEY].(string); ok { + return cloudName + } + return "" +} +func (m Metadata) SetCloudUrl(cloudUrl string) { + m[CLOUD_URL_KEY] = cloudUrl +} + +func (m Metadata) GetCloudUrl() string { + if cloudUrl, ok := m[CLOUD_URL_KEY].(string); ok { + return cloudUrl + } + return "" +} + +func (m Metadata) SetNamespace(namespace string) { + m[NAMESPACE_KEY] = namespace +} + +func (m Metadata) GetNamespace() string { + if namespace, ok := m[NAMESPACE_KEY].(string); ok { + return namespace + } + return "" +} + +func (m Metadata) SetBuilderVersion(builderVersion string) { + m[BUILDER_VERSION_KEY] = builderVersion +} + +func (m Metadata) GetBuilderVersion() string { + if builderVersion, ok := m[BUILDER_VERSION_KEY].(string); ok { + return builderVersion + } + return "" +} + +func (m Metadata) SetType(typeValue string) { + m[TYPE_KEY] = typeValue +} + +func (m Metadata) GetType() string { + if typeValue, ok := m[TYPE_KEY].(string); ok { + return typeValue + } + return "" +} + +func (m Metadata) SetInfo(info interface{}) { + m[INFO] = info +} + +func (m Metadata) GetInfo() interface{} { + if info, ok := m[INFO]; ok { + return info + } + return nil +} + +func (m Metadata) SetExternalDocs(externalDocs interface{}) { + m[EXTERNAL_DOCS] = externalDocs +} + +func (m Metadata) GetExternalDocs() interface{} { + if externalDocs, ok := m[EXTERNAL_DOCS]; ok { + return externalDocs + } + return nil +} + +func (m Metadata) SetVersion(version string) { + m[VERSION] = version +} + +func (m Metadata) GetVersion() string { + if version, ok := m[VERSION].(string); ok { + return version + } + return "" +} + +func (m Metadata) SetDocTags(tags []interface{}) { + m[DOC_TAGS_KEY] = tags +} + +func (m Metadata) GetDocTags() []interface{} { + if tags, ok := m[DOC_TAGS_KEY].([]interface{}); ok { + return tags + } + return nil +} + +func (m Metadata) MergeMetadata(other Metadata) { + for k, v := range other { + m[k] = v + } +} diff --git a/qubership-apihub-service/entity/MetricsEntity.go b/qubership-apihub-service/entity/MetricsEntity.go new file mode 100644 index 0000000..5f01995 --- /dev/null +++ b/qubership-apihub-service/entity/MetricsEntity.go @@ -0,0 +1,31 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package entity + +type BuildByStatusCountEntity struct { + tableName struct{} `pg:"build"` + BuildCount int `pg:"build_count, type:integer"` +} + +type BuildTimeMetricsEntity struct { + tableName struct{} `pg:"build"` + MaxBuildTime int `pg:"max_build_time, type:integer"` + AvgBuildTime int `pg:"avg_build_time, type:integer"` +} + +type BuildRetriesCountEntity struct { + tableName struct{} `pg:"build"` + RetriesCount int `pg:"retries_count, type:integer"` +} diff --git a/qubership-apihub-service/entity/MigrationEntity.go b/qubership-apihub-service/entity/MigrationEntity.go new file mode 100644 index 0000000..96408dd --- /dev/null +++ b/qubership-apihub-service/entity/MigrationEntity.go @@ -0,0 +1,22 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package entity + +type MigrationEntity struct { + tableName struct{} `pg:"schema_migrations"` + + Version int `pg:"version, pk, type:bigint"` + Dirty bool `pg:"dirty, type:boolean, use_zero"` +} diff --git a/qubership-apihub-service/entity/OperationEntity.go b/qubership-apihub-service/entity/OperationEntity.go new file mode 100644 index 0000000..47a4e1f --- /dev/null +++ b/qubership-apihub-service/entity/OperationEntity.go @@ -0,0 +1,621 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package entity + +import ( + "encoding/json" + "time" + + "github.com/iancoleman/orderedmap" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + log "github.com/sirupsen/logrus" +) + +type OperationEntity struct { + tableName struct{} `pg:"operation"` + + PackageId string `pg:"package_id, pk, type:varchar"` + Version string `pg:"version, pk, type:varchar"` + Revision int `pg:"revision, pk, type:integer"` + OperationId string `pg:"operation_id, pk, type:varchar"` + DataHash string `pg:"data_hash, type:varchar"` + Deprecated bool `pg:"deprecated, type:boolean, use_zero"` + Kind string `pg:"kind, type:varchar"` + Title string `pg:"title, type:varchar, use_zero"` + Metadata Metadata `pg:"metadata, type:jsonb"` + Type string `pg:"type, type:varchar, use_zero"` + DeprecatedInfo string `pg:"deprecated_info, type:varchar, use_zero"` + DeprecatedItems []view.DeprecatedItem `pg:"deprecated_items, type:jsonb, use_zero"` + PreviousReleaseVersions []string `pg:"previous_release_versions, type:varchar[], use_zero"` + Models map[string]string `pg:"models, type:jsonb, use_zero"` + CustomTags map[string]interface{} `pg:"custom_tags, type:jsonb, use_zero"` + ApiAudience string `pg:"api_audience, type:varchar, use_zero"` +} + +type OperationsTypeCountEntity struct { + tableName struct{} `pg:"operation"` + + ApiType string `pg:"type, type:varchar"` + OperationsCount int `pg:"operations_count, type:integer"` + DeprecatedCount int `pg:"deprecated_count, type:integer"` + NoBwcOperationsCount int `pg:"no_bwc_count, type:integer"` + InternalAudienceOperationsCount int `pg:"internal_count, type:integer"` + UnknownAudienceOperationsCount int `pg:"unknown_count, type:integer"` +} + +type DeprecatedOperationsSummaryEntity struct { + tableName struct{} `pg:"operation"` + + PackageId string `pg:"package_id, type:varchar"` + Version string `pg:"version, type:varchar"` + Revision int `pg:"revision, type:integer"` + ApiType string `pg:"type, type:varchar"` + Tags []string `pg:"tags, type:varchar[]"` + DeprecatedCount int `pg:"deprecated_count, type:integer"` +} + +type OperationsTypeDataHashEntity struct { + tableName struct{} `pg:"operation"` + + ApiType string `pg:"type, type:varchar"` + OperationsHash map[string]string `pg:"operations_hash, type:json"` +} + +type OperationDataEntity struct { + tableName struct{} `pg:"operation_data, alias:operation_data"` + + DataHash string `pg:"data_hash, pk, type:varchar"` + Data []byte `pg:"data, type:bytea"` + SearchScope map[string]interface{} `pg:"search_scope, type:jsonb"` +} + +type OperationComparisonEntity struct { + tableName struct{} `pg:"operation_comparison"` + + PackageId string `pg:"package_id, type:varchar, use_zero"` + Version string `pg:"version, type:varchar, use_zero"` + Revision int `pg:"revision, type:integer, use_zero"` + PreviousPackageId string `pg:"previous_package_id, type:varchar, use_zero"` + PreviousVersion string `pg:"previous_version, type:varchar, use_zero"` + PreviousRevision int `pg:"previous_revision, type:integer, use_zero"` + ComparisonId string `pg:"comparison_id, type:varchar"` + OperationId string `pg:"operation_id, type:varchar"` + DataHash string `pg:"data_hash, type:varchar"` + PreviousDataHash string `pg:"previous_data_hash, type:varchar"` + ChangesSummary view.ChangeSummary `pg:"changes_summary, type:jsonb"` + Changes map[string]interface{} `pg:"changes, type:jsonb"` +} + +type VersionComparisonEntity struct { + tableName struct{} `pg:"version_comparison, alias:version_comparison"` + + PackageId string `pg:"package_id, pk, type:varchar, use_zero"` + Version string `pg:"version, pk, type:varchar, use_zero"` + Revision int `pg:"revision, pk, type:integer, use_zero"` + PreviousPackageId string `pg:"previous_package_id, pk, type:varchar, use_zero"` + PreviousVersion string `pg:"previous_version, pk, type:varchar, use_zero"` + PreviousRevision int `pg:"previous_revision, pk, type:integer, use_zero"` + ComparisonId string `pg:"comparison_id, type:varchar"` + OperationTypes []view.OperationType `pg:"operation_types, type:jsonb"` + Refs []string `pg:"refs, type:varchar[]"` + OpenCount int `pg:"open_count, type:integer, use_zero"` + LastActive time.Time `pg:"last_active, type:timestamp without time zone, use_zero"` + NoContent bool `pg:"no_content, type:boolean, use_zero"` + BuilderVersion string `pg:"builder_version, type:varchar"` +} + +func MakeRefComparisonView(entity VersionComparisonEntity) *view.RefComparison { + refComparisonView := &view.RefComparison{ + OperationTypes: entity.OperationTypes, + NoContent: entity.NoContent, + PackageRef: view.MakePackageRefKey(entity.PackageId, entity.Version, entity.Revision), + PreviousPackageRef: view.MakePackageRefKey(entity.PreviousPackageId, entity.PreviousVersion, entity.PreviousRevision), + } + return refComparisonView +} + +type OperationRichEntity struct { + tableName struct{} `pg:"operation, alias:operation"` + OperationEntity + Data []byte `pg:"data, type:bytea"` +} + +type OperationComparisonChangelogEntity_deprecated struct { + tableName struct{} `pg:"_, alias:operation_comparison, discard_unknown_columns"` + OperationComparisonEntity + ApiType string `pg:"type, type:varchar"` + ApiKind string `pg:"kind, type:varchar"` + Title string `pg:"title, type:varchar"` + Metadata Metadata `pg:"metadata, type:jsonb"` + PackageRef string `pg:"package_ref, type:varchar"` + PreviousPackageRef string `pg:"previous_package_ref, type:varchar"` +} + +type OperationComparisonChangelogEntity struct { + tableName struct{} `pg:"_, alias:operation_comparison, discard_unknown_columns"` + OperationComparisonEntity + ApiType string `pg:"type, type:varchar"` + ApiKind string `pg:"kind, type:varchar"` + PreviousApiKind string `pg:"previous_kind, type:varchar"` + ApiAudience string `pg:"api_audience, type:varchar"` + PreviousApiAudience string `pg:"previous_api_audience, type:varchar"` + Title string `pg:"title, type:varchar"` + PreviousTitle string `pg:"previous_title, type:varchar"` + Metadata Metadata `pg:"metadata, type:jsonb"` + PackageRef string `pg:"package_ref, type:varchar"` + PreviousPackageRef string `pg:"previous_package_ref, type:varchar"` +} + +type ChangelogSearchQueryEntity struct { + ComparisonId string `pg:"comparison_id, type:varchar, use_zero"` + ApiType string `pg:"type, type:varchar, use_zero"` + TextFilter string `pg:"text_filter, type:varchar, use_zero"` + ApiKind string `pg:"api_kind, type:varchar, use_zero"` + ApiAudience string `pg:"api_audience, type:varchar, use_zero"` + DocumentSlug string `pg:"document_slug, type:varchar, use_zero"` + Tags []string `pg:"tags, type:varchar[], use_zero"` + EmptyTag bool `pg:"empty_tag, type:boolean, use_zero"` + RefPackageId string `pg:"ref_package_id, type:varchar, use_zero"` + Limit int `pg:"limit, type:integer, use_zero"` + Offset int `pg:"offset, type:integer, use_zero"` + EmptyGroup bool `pg:"-"` + Group string `pg:"-"` + GroupPackageId string `pg:"-"` + GroupVersion string `pg:"-"` + GroupRevision int `pg:"-"` + Severities []string `pg:"-"` +} + +type OperationTagsSearchQueryEntity struct { + PackageId string `pg:"package_id, type:varchar, use_zero"` + Version string `pg:"version, type:varchar, use_zero"` + Revision int `pg:"revision, type:integer, use_zero"` + Type string `pg:"type, type:varchar, use_zero"` + TextFilter string `pg:"text_filter, type:varchar, use_zero"` + Kind string `pg:"kind, type:varchar, use_zero"` + ApiAudience string `pg:"api_audience, type:varchar, use_zero"` + Limit int `pg:"limit, type:integer, use_zero"` + Offset int `pg:"offset, type:integer, use_zero"` +} +type OperationSearchQueryEntity struct { + PackageId string `pg:"package_id, type:varchar, use_zero"` + Version string `pg:"version, type:varchar, use_zero"` + Revision int `pg:"revision, type:integer, use_zero"` + ApiType string `pg:"type, type:varchar, use_zero"` + OperationId string `pg:"operation_id, type:varchar, use_zero"` +} + +type OperationModelsEntity struct { + tableName struct{} `pg:"operation, alias:operation"` + + OperationId string `pg:"operation_id, type:varchar"` + Models []string `pg:"models, type:varchar[]"` +} + +// deprecated +func MakeDocumentsOperationView_deprecated(operationEnt OperationEntity) view.DocumentsOperation_deprecated { + documentsOperation := view.DocumentsOperation_deprecated{ + OperationId: operationEnt.OperationId, + Title: operationEnt.Title, + DataHash: operationEnt.DataHash, + Deprecated: operationEnt.Deprecated, + ApiKind: operationEnt.Kind, + ApiType: operationEnt.Type, + } + switch operationEnt.Type { + case string(view.RestApiType): + restOperationMetadata := view.RestOperationMetadata{ + Path: operationEnt.Metadata.GetPath(), + Method: operationEnt.Metadata.GetMethod(), + Tags: operationEnt.Metadata.GetTags(), + } + documentsOperation.Metadata = restOperationMetadata + case string(view.GraphqlApiType): + graphQLOperationMetadata := view.GraphQLOperationMetadata{ + Type: operationEnt.Metadata.GetType(), + Method: operationEnt.Metadata.GetMethod(), + Tags: operationEnt.Metadata.GetTags(), + } + documentsOperation.Metadata = graphQLOperationMetadata + case string(view.ProtobufApiType): + protobufOperationMetadata := view.ProtobufOperationMetadata{ + Type: operationEnt.Metadata.GetType(), + Method: operationEnt.Metadata.GetMethod(), + } + documentsOperation.Metadata = protobufOperationMetadata + } + return documentsOperation +} + +func MakeDocumentsOperationView(operationEnt OperationEntity) interface{} { + switch operationEnt.Type { + case string(view.RestApiType): + return MakeRestOperationView(&operationEnt) + case string(view.GraphqlApiType): + return MakeGraphQLOperationView(&operationEnt) + case string(view.ProtobufApiType): + return MakeProtobufOperationView(&operationEnt) + } + return MakeCommonOperationView(&operationEnt) +} + +func MakeOperationView(operationEnt OperationRichEntity) interface{} { + data := orderedmap.New() + if len(operationEnt.Data) > 0 { + err := json.Unmarshal(operationEnt.Data, &data) + if err != nil { + log.Errorf("Failed to unmarshal data (dataHash: %v): %v", operationEnt.DataHash, err) + } + } + switch operationEnt.Type { + case string(view.RestApiType): + restOperationView := MakeRestOperationView(&operationEnt.OperationEntity) + restOperationView.Data = data + restOperationView.PackageRef = view.MakePackageRefKey(operationEnt.PackageId, operationEnt.Version, operationEnt.Revision) + return restOperationView + + case string(view.GraphqlApiType): + graphqlOperationView := MakeGraphQLOperationView(&operationEnt.OperationEntity) + graphqlOperationView.Data = data + graphqlOperationView.PackageRef = view.MakePackageRefKey(operationEnt.PackageId, operationEnt.Version, operationEnt.Revision) + return graphqlOperationView + + case string(view.ProtobufApiType): + protobufOperationView := MakeProtobufOperationView(&operationEnt.OperationEntity) + protobufOperationView.Data = data + protobufOperationView.PackageRef = view.MakePackageRefKey(operationEnt.PackageId, operationEnt.Version, operationEnt.Revision) + return protobufOperationView + } + return MakeCommonOperationView(&operationEnt.OperationEntity) +} + +func MakeOperationIdsSlice(operationEnt []OperationRichEntity) []string { + operationIds := make([]string, 0, len(operationEnt)) + for _, entity := range operationEnt { + operationIds = append(operationIds, entity.OperationId) + } + return operationIds +} + +func MakeCommonOperationView(operationEnt *OperationEntity) view.OperationListView { + return view.OperationListView{ + CommonOperationView: view.CommonOperationView{ + OperationId: operationEnt.OperationId, + Title: operationEnt.Title, + DataHash: operationEnt.DataHash, + Deprecated: operationEnt.Deprecated, + ApiKind: operationEnt.Kind, + ApiType: operationEnt.Type, + CustomTags: operationEnt.CustomTags, + ApiAudience: operationEnt.ApiAudience, + }, + } +} + +func MakeRestOperationView(operationEnt *OperationEntity) view.RestOperationView { + return view.RestOperationView{ + OperationListView: MakeCommonOperationView(operationEnt), + RestOperationMetadata: view.RestOperationMetadata{ + Path: operationEnt.Metadata.GetPath(), + Method: operationEnt.Metadata.GetMethod(), + Tags: operationEnt.Metadata.GetTags(), + }, + } +} + +func MakeGraphQLOperationView(operationEnt *OperationEntity) view.GraphQLOperationView { + return view.GraphQLOperationView{ + OperationListView: MakeCommonOperationView(operationEnt), + GraphQLOperationMetadata: view.GraphQLOperationMetadata{ + Type: operationEnt.Metadata.GetType(), + Method: operationEnt.Metadata.GetMethod(), + Tags: operationEnt.Metadata.GetTags(), + }, + } +} + +func MakeProtobufOperationView(operationEnt *OperationEntity) view.ProtobufOperationView { + return view.ProtobufOperationView{ + OperationListView: MakeCommonOperationView(operationEnt), + ProtobufOperationMetadata: view.ProtobufOperationMetadata{ + Type: operationEnt.Metadata.GetType(), + Method: operationEnt.Metadata.GetMethod(), + }, + } +} + +func MakeDeprecatedOperationView(operationEnt OperationRichEntity, includeDeprecatedItems bool) interface{} { + operationView := view.DeprecatedOperationView{ + OperationId: operationEnt.OperationId, + Title: operationEnt.Title, + DataHash: operationEnt.DataHash, + Deprecated: operationEnt.Deprecated, + ApiKind: operationEnt.Kind, + ApiType: operationEnt.Type, + PackageRef: view.MakePackageRefKey(operationEnt.PackageId, operationEnt.Version, operationEnt.Revision), + DeprecatedInfo: operationEnt.DeprecatedInfo, + DeprecatedCount: len(operationEnt.DeprecatedItems), + PreviousReleaseVersions: operationEnt.PreviousReleaseVersions, + ApiAudience: operationEnt.ApiAudience, + } + if includeDeprecatedItems { + operationView.DeprecatedItems = operationEnt.DeprecatedItems + } + + switch operationEnt.Type { + case string(view.RestApiType): + return view.DeprecatedRestOperationView{ + DeprecatedOperationView: operationView, + RestOperationMetadata: view.RestOperationMetadata{ + Path: operationEnt.Metadata.GetPath(), + Method: operationEnt.Metadata.GetMethod(), + Tags: operationEnt.Metadata.GetTags(), + }, + } + case string(view.GraphqlApiType): + return view.DeprecateGraphQLOperationView{ + DeprecatedOperationView: operationView, + GraphQLOperationMetadata: view.GraphQLOperationMetadata{ + Type: operationEnt.Metadata.GetType(), + Method: operationEnt.Metadata.GetMethod(), + Tags: operationEnt.Metadata.GetTags(), + }, + } + case string(view.ProtobufApiType): + return view.DeprecateProtobufOperationView{ + DeprecatedOperationView: operationView, + ProtobufOperationMetadata: view.ProtobufOperationMetadata{ + Type: operationEnt.Metadata.GetType(), + Method: operationEnt.Metadata.GetMethod(), + }, + } + } + return operationView +} + +func MakeSingleOperationView(operationEnt OperationRichEntity) interface{} { + data := orderedmap.New() + if len(operationEnt.Data) > 0 { + err := json.Unmarshal(operationEnt.Data, &data) + if err != nil { + log.Errorf("Failed to unmarshal data (dataHash: %v): %v", operationEnt.DataHash, err) + } + } + operationView := view.SingleOperationView{ + OperationId: operationEnt.OperationId, + Title: operationEnt.Title, + DataHash: operationEnt.DataHash, + Deprecated: operationEnt.Deprecated, + ApiKind: operationEnt.Kind, + ApiType: operationEnt.Type, + Data: data, + CustomTags: operationEnt.CustomTags, + ApiAudience: operationEnt.ApiAudience, + } + + switch operationEnt.Type { + case string(view.RestApiType): + return view.RestOperationSingleView{ + SingleOperationView: operationView, + RestOperationMetadata: view.RestOperationMetadata{ + Path: operationEnt.Metadata.GetPath(), + Method: operationEnt.Metadata.GetMethod(), + Tags: operationEnt.Metadata.GetTags(), + }, + } + case string(view.GraphqlApiType): + return view.GraphQLOperationSingleView{ + SingleOperationView: operationView, + GraphQLOperationMetadata: view.GraphQLOperationMetadata{ + Type: operationEnt.Metadata.GetType(), + Method: operationEnt.Metadata.GetMethod(), + Tags: operationEnt.Metadata.GetTags(), + }, + } + case string(view.ProtobufApiType): + return view.ProtobufOperationSingleView{ + SingleOperationView: operationView, + ProtobufOperationMetadata: view.ProtobufOperationMetadata{ + Type: operationEnt.Metadata.GetType(), + Method: operationEnt.Metadata.GetMethod(), + }, + } + } + return operationView +} + +func MakeSingleOperationDeprecatedItemsView(operationEnt OperationRichEntity) view.DeprecatedItems { + return view.DeprecatedItems{ + DeprecatedItems: operationEnt.DeprecatedItems, + } +} + +func MakeOperationChangesListView(changedOperationEnt OperationComparisonEntity) []interface{} { + result := make([]interface{}, 0) + if changes, ok := changedOperationEnt.Changes["changes"].([]interface{}); ok { + for _, change := range changes { + result = append(result, view.ParseSingleOperationChange(change)) + } + } + return result +} + +func MakeOperationComparisonChangelogView_deprecated(entity OperationComparisonChangelogEntity_deprecated) interface{} { + operationComparisonChangelogView := view.OperationComparisonChangelogView_deprecated{ + OperationId: entity.OperationId, + Title: entity.Title, + ChangeSummary: entity.ChangesSummary, + ApiKind: entity.ApiKind, + DataHash: entity.DataHash, + PreviousDataHash: entity.PreviousDataHash, + PackageRef: view.MakePackageRefKey(entity.PackageId, entity.Version, entity.Revision), + PreviousVersionPackageRef: view.MakePackageRefKey(entity.PreviousPackageId, entity.PreviousVersion, entity.PreviousRevision), + } + switch entity.ApiType { + case string(view.RestApiType): + return view.RestOperationComparisonChangelogView_deprecated{ + OperationComparisonChangelogView_deprecated: operationComparisonChangelogView, + RestOperationChange: view.RestOperationChange{ + Path: entity.Metadata.GetPath(), + Method: entity.Metadata.GetMethod(), + Tags: entity.Metadata.GetTags(), + }, + } + case string(view.GraphqlApiType): + return view.GraphQLOperationComparisonChangelogView_deprecated{ + OperationComparisonChangelogView_deprecated: operationComparisonChangelogView, + GraphQLOperationMetadata: view.GraphQLOperationMetadata{ + Type: entity.Metadata.GetType(), + Method: entity.Metadata.GetMethod(), + Tags: entity.Metadata.GetTags(), + }, + } + } + return operationComparisonChangelogView +} + +func MakeOperationComparisonChangelogView(entity OperationComparisonChangelogEntity) interface{} { + var currentOperation *view.ComparisonOperationView + var previousOperation *view.ComparisonOperationView + + if entity.DataHash != "" { + currentOperation = &view.ComparisonOperationView{ + Title: entity.Title, + ApiKind: entity.ApiKind, + ApiAudience: entity.ApiAudience, + DataHash: entity.DataHash, + PackageRef: view.MakePackageRefKey(entity.PackageId, entity.Version, entity.Revision), + } + } + if entity.PreviousDataHash != "" { + previousOperation = &view.ComparisonOperationView{ + Title: entity.PreviousTitle, + ApiKind: entity.PreviousApiKind, + ApiAudience: entity.PreviousApiAudience, + DataHash: entity.PreviousDataHash, + PackageRef: view.MakePackageRefKey(entity.PreviousPackageId, entity.PreviousVersion, entity.PreviousRevision), + } + } + + operationComparisonChangelogView := view.OperationComparisonChangelogView{ + OperationId: entity.OperationId, + CurrentOperation: currentOperation, + PreviousOperation: previousOperation, + ChangeSummary: entity.ChangesSummary, + } + + switch entity.ApiType { + case string(view.RestApiType): + return view.RestOperationComparisonChangelogView{ + OperationComparisonChangelogView: operationComparisonChangelogView, + RestOperationChange: view.RestOperationChange{ + Path: entity.Metadata.GetPath(), + Method: entity.Metadata.GetMethod(), + Tags: entity.Metadata.GetTags(), + }, + } + case string(view.GraphqlApiType): + return view.GraphQLOperationComparisonChangelogView{ + OperationComparisonChangelogView: operationComparisonChangelogView, + GraphQLOperationMetadata: view.GraphQLOperationMetadata{ + Type: entity.Metadata.GetType(), + Method: entity.Metadata.GetMethod(), + Tags: entity.Metadata.GetTags(), + }, + } + case string(view.ProtobufApiType): + return view.ProtobufOperationComparisonChangelogView{ + OperationComparisonChangelogView: operationComparisonChangelogView, + ProtobufOperationMetadata: view.ProtobufOperationMetadata{ + Type: entity.Metadata.GetType(), + Method: entity.Metadata.GetMethod(), + }, + } + } + return operationComparisonChangelogView +} + +// todo use current (not deprecated entity) +func MakeOperationComparisonChangesView(entity OperationComparisonChangelogEntity_deprecated) interface{} { + var action string + if entity.DataHash == "" { + action = view.ChangelogActionRemove + } else if entity.PreviousDataHash == "" { + action = view.ChangelogActionAdd + } else { + action = view.ChangelogActionChange + } + operationComparisonChangelogView := view.OperationComparisonChangesView{ + OperationId: entity.OperationId, + Title: entity.Title, + ChangeSummary: entity.ChangesSummary, + ApiKind: entity.ApiKind, + DataHash: entity.DataHash, + PreviousDataHash: entity.PreviousDataHash, + PackageRef: view.MakePackageRefKey(entity.PackageId, entity.Version, entity.Revision), + PreviousVersionPackageRef: view.MakePackageRefKey(entity.PreviousPackageId, entity.PreviousVersion, entity.PreviousRevision), + Changes: MakeOperationChangesListView(entity.OperationComparisonEntity), + Action: action, + } + switch entity.ApiType { + case string(view.RestApiType): + return view.RestOperationComparisonChangesView{ + OperationComparisonChangesView: operationComparisonChangelogView, + RestOperationChange: view.RestOperationChange{ + Path: entity.Metadata.GetPath(), + Method: entity.Metadata.GetMethod(), + Tags: entity.Metadata.GetTags(), + }, + } + case string(view.GraphqlApiType): + return view.GraphQLOperationComparisonChangesView{ + OperationComparisonChangesView: operationComparisonChangelogView, + GraphQLOperationMetadata: view.GraphQLOperationMetadata{ + Type: entity.Metadata.GetType(), + Method: entity.Metadata.GetMethod(), + Tags: entity.Metadata.GetTags(), + }, + } + case string(view.ProtobufApiType): + return view.ProtobufOperationComparisonChangesView{ + OperationComparisonChangesView: operationComparisonChangelogView, + ProtobufOperationMetadata: view.ProtobufOperationMetadata{ + Type: entity.Metadata.GetType(), + Method: entity.Metadata.GetMethod(), + }, + } + } + return operationComparisonChangelogView +} + +func MakeDeprecatedOperationType(ent DeprecatedOperationsSummaryEntity) view.DeprecatedOperationType { + return view.DeprecatedOperationType{ + ApiType: ent.ApiType, + Tags: ent.Tags, + DeprecatedCount: ent.DeprecatedCount, + } +} + +func MakeDeprecatedOperationTypesRef(packageRef string, deprecatedOperationTypes []DeprecatedOperationsSummaryEntity) view.DeprecatedOperationTypesRef { + deprecatedOperationTypesRef := view.DeprecatedOperationTypesRef{ + PackageRef: packageRef, + OperationTypes: make([]view.DeprecatedOperationType, 0), + } + for _, operationType := range deprecatedOperationTypes { + deprecatedOperationTypesRef.OperationTypes = append(deprecatedOperationTypesRef.OperationTypes, MakeDeprecatedOperationType(operationType)) + } + return deprecatedOperationTypesRef +} diff --git a/qubership-apihub-service/entity/OperationsGroupEntity.go b/qubership-apihub-service/entity/OperationsGroupEntity.go new file mode 100644 index 0000000..14ec698 --- /dev/null +++ b/qubership-apihub-service/entity/OperationsGroupEntity.go @@ -0,0 +1,126 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package entity + +import ( + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type OperationGroupEntity struct { + tableName struct{} `pg:"operation_group"` + + PackageId string `pg:"package_id, pk, type:varchar"` + Version string `pg:"version, pk, type:varchar"` + Revision int `pg:"revision, pk, type:integer"` + ApiType string `pg:"api_type, pk, type:varchar"` + GroupName string `pg:"group_name, pk, type:varchar"` + GroupId string `pg:"group_id, type:varchar"` + Description string `pg:"description, type:varchar"` + Autogenerated bool `pg:"autogenerated, type:boolean, use_zero"` + TemplateChecksum string `pg:"template_checksum, type:varchar"` + TemplateFilename string `pg:"template_filename, type:varchar"` +} + +type OperationGroupTemplateEntity struct { + tableName struct{} `pg:"operation_group_template"` + + Checksum string `pg:"checksum, pk, type:varchar"` + Template []byte `pg:"template, type:bytea"` +} + +type OperationGroupTemplateFileEntity struct { + tableName struct{} `pg:"operation_group_template, alias:operation_group_template"` + + Template []byte `pg:"template, type:bytea"` + TemplateFilename string `pg:"template_filename, type:string"` +} + +type OperationGroupHistoryEntity struct { + tableName struct{} `pg:"operation_group_history"` + + GroupId string `pg:"group_id, type:varchar"` + Action string `pg:"action, type:varchar"` + Data OperationGroupEntity `pg:"data, type:jsonb"` + UserId string `pg:"user_id, type:varchar"` + Date time.Time `pg:"date, type:type:timestamp without time zone"` + Automatic bool `pg:"automatic, type:boolean, use_zero"` +} + +type GroupedOperationEntity struct { + tableName struct{} `pg:"grouped_operation"` + + GroupId string `pg:"group_id, type:varchar"` + PackageId string `pg:"package_id, type:varchar"` + Version string `pg:"version, type:varchar"` + Revision int `pg:"revision, type:integer"` + OperationId string `pg:"operation_id, type:varchar"` +} + +type OperationGroupCountEntity struct { + tableName struct{} `pg:"operation_group, alias:operation_group"` + + OperationGroupEntity + OperationsCount int `pg:"operations_count, type:integer"` + ExportTemplateFilename string `pg:"export_template_filename, type:varchar"` +} + +type OperationGroupPublishEntity struct { + tableName struct{} `pg:"operation_group_publication, alias:operation_group_publication"` + + PublishId string `pg:"publish_id, pk, type:varchar"` + Status string `pg:"status, type:varchar"` + Details string `pg:"details, type:varchar, use_zero"` +} + +func MakeOperationGroupView(ent OperationGroupCountEntity) view.OperationGroup { + return view.OperationGroup{ + GroupName: ent.GroupName, + Description: ent.Description, + IsPrefixGroup: ent.Autogenerated, + OperationsCount: ent.OperationsCount, + } +} + +func MakeVersionOperationGroupView(ent OperationGroupCountEntity) view.VersionOperationGroup { + return view.VersionOperationGroup{ + GroupName: ent.GroupName, + ApiType: ent.ApiType, + Description: ent.Description, + IsPrefixGroup: ent.Autogenerated, + OperationsCount: ent.OperationsCount, + ExportTemplateFilename: ent.ExportTemplateFilename, + } +} + +func MakeOperationGroupHistoryEntity(ent OperationGroupEntity, action string, userId string) *OperationGroupHistoryEntity { + return &OperationGroupHistoryEntity{ + GroupId: ent.GroupId, + Action: action, + Data: ent, + UserId: userId, + Date: time.Now(), + Automatic: false, + } +} + +func MakeOperationGroupTemplateEntity(templateData []byte) *OperationGroupTemplateEntity { + return &OperationGroupTemplateEntity{ + Checksum: utils.GetEncodedChecksum(templateData), + Template: templateData, + } +} diff --git a/qubership-apihub-service/entity/PackageMemberRoleEntity.go b/qubership-apihub-service/entity/PackageMemberRoleEntity.go new file mode 100644 index 0000000..b9afc59 --- /dev/null +++ b/qubership-apihub-service/entity/PackageMemberRoleEntity.go @@ -0,0 +1,100 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package entity + +import ( + "sort" + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type RoleEntity struct { + tableName struct{} `pg:"role"` + + Id string `pg:"id, pk, type:varchar"` + Role string `pg:"role, type:varchar"` + Permissions []string `pg:"permissions, type:varchar array, array, use_zero"` + Rank int `pg:"rank, type:varchar"` + ReadOnly bool `pg:"read_only, use_zero, type:boolean"` +} + +type PackageMemberRoleEntity struct { + tableName struct{} `pg:"package_member_role, alias:package_member_role"` + + PackageId string `pg:"package_id, pk, type:varchar"` + UserId string `pg:"user_id, pk, type:varchar"` + Roles []string `pg:"roles, type:varchar array, array"` + CreatedAt time.Time `pg:"created_at, type:timestamp without time zone"` + CreatedBy string `pg:"created_by, type:varchar"` + UpdatedAt *time.Time `pg:"updated_at, type:timestamp without time zone"` + UpdatedBy string `pg:"updated_by, type:varchar"` +} + +type PackageMemberRoleRichEntity struct { + PackageId string `pg:"package_id, type:varchar"` + PackageKind string `pg:"package_kind, type:varchar"` + PackageName string `pg:"package_name, type:varchar"` + UserId string `pg:"user_id, type:varchar"` + UserName string `pg:"user_name, type:varchar"` + UserEmail string `pg:"user_email, type:varchar"` + UserAvatar string `pg:"user_avatar, type:varchar"` + RoleId string `pg:"role_id, type:varchar"` + Role string `pg:"role, type:varchar"` +} + +func MakePackageMemberView(packageId string, memberRoles []PackageMemberRoleRichEntity) view.PackageMember { + memberView := view.PackageMember{} + roles := make([]view.PackageMemberRoleView, 0) + for _, role := range memberRoles { + if memberView.User.Id == "" { + memberView.User = view.User{ + Id: role.UserId, + Email: role.UserEmail, + Name: role.UserName, + AvatarUrl: role.UserAvatar, + } + } + roleView := view.PackageMemberRoleView{ + RoleId: role.RoleId, + RoleName: role.Role, + } + if packageId == role.PackageId { + roleView.Inheritance = nil + } else { + roleView.Inheritance = &view.ShortPackage{ + PackageId: role.PackageId, + Kind: role.PackageKind, + Name: role.PackageName, + } + } + roles = append(roles, roleView) + sort.Slice(roles, func(i, j int) bool { + return roles[i].RoleId < roles[j].RoleId + }) + } + memberView.Roles = roles + return memberView +} + +func MakeRoleView(ent RoleEntity) view.PackageRole { + return view.PackageRole{ + RoleId: ent.Id, + RoleName: ent.Role, + ReadOnly: ent.ReadOnly, + Permissions: ent.Permissions, + Rank: ent.Rank, + } +} diff --git a/qubership-apihub-service/entity/PrincipalEntity.go b/qubership-apihub-service/entity/PrincipalEntity.go new file mode 100644 index 0000000..0bf18b1 --- /dev/null +++ b/qubership-apihub-service/entity/PrincipalEntity.go @@ -0,0 +1,62 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package entity + +import ( + "encoding/json" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + log "github.com/sirupsen/logrus" +) + +// By design either User fields or ApiKey fields are filled +type PrincipalEntity struct { + PrincipalUserId string `pg:"prl_usr_id, type:varchar"` + PrincipalUserName string `pg:"prl_usr_name, type:varchar"` + PrincipalUserEmail string `pg:"prl_usr_email, type:varchar"` + PrincipalUserAvatarUrl string `pg:"prl_usr_avatar_url, type:varchar"` + PrincipalApiKeyId string `pg:"prl_apikey_id, type:varchar"` + PrincipalApiKeyName string `pg:"prl_apikey_name, type:varchar"` +} + +func MakePrincipalView(ent *PrincipalEntity) *map[string]interface{} { + principal := make(map[string]interface{}) + var principalViewBytes []byte + if ent.PrincipalUserId != "" { + userPrincipalView := view.PrincipalUserView{ + PrincipalType: view.PTUser, + User: view.User{ + Id: ent.PrincipalUserId, + Name: ent.PrincipalUserName, + Email: ent.PrincipalUserEmail, + AvatarUrl: ent.PrincipalUserAvatarUrl, + }, + } + principalViewBytes, _ = json.Marshal(userPrincipalView) + } else if ent.PrincipalApiKeyId != "" { + apiKeyPrincipalView := view.PrincipalApiKeyView{ + PrincipalType: view.PTApiKey, + ApiKey: view.ApiKey{ + Id: ent.PrincipalApiKeyId, + Name: ent.PrincipalApiKeyName, + }, + } + principalViewBytes, _ = json.Marshal(apiKeyPrincipalView) + } + err := json.Unmarshal(principalViewBytes, &principal) + if err != nil { + log.Errorf("Failed to unmarshal Principal object data: %v", err) + } + return &principal +} diff --git a/qubership-apihub-service/entity/ProjectIntEntity.go b/qubership-apihub-service/entity/ProjectIntEntity.go new file mode 100644 index 0000000..512a519 --- /dev/null +++ b/qubership-apihub-service/entity/ProjectIntEntity.go @@ -0,0 +1,123 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package entity + +import ( + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type ProjectIntEntity struct { + tableName struct{} `pg:"project, alias:project"` + + Id string `pg:"id, pk, type:varchar"` + Name string `pg:"name, type:varchar"` + GroupId string `pg:"group_id, type:varchar"` + Alias string `pg:"alias, type:varchar"` + Description string `pg:"description, type:varchar"` + IntegrationType string `pg:"integration_type, type:varchar"` + RepositoryId string `pg:"repository_id, type:varchar"` + RepositoryName string `pg:"repository_name, type:varchar"` + RepositoryUrl string `pg:"repository_url, type:varchar"` + DefaultBranch string `pg:"default_branch, type:varchar"` + DefaultFolder string `pg:"default_folder, type:varchar"` + DeletedAt *time.Time `pg:"deleted_at, type:timestamp without time zone"` + DeletedBy string `pg:"deleted_by, type:varchar"` + LastVersion string `pg:"-"` + PackageId string `pg:"package_id, type:varchar"` + SecretToken string `pg:"secret_token, type:varchar"` + SecretTokenUserId string `pg:"secret_token_user_id, type:varchar"` +} + +type ProjectIntFavEntity struct { + tableName struct{} `pg:"project, alias:project"` + + ProjectIntEntity + + UserId string `pg:"user_id, pk, type:varchar"` + ProjectId string `pg:"project_id, pk, type:varchar"` +} + +func MakePrjIntEntity(pView *view.Project) *ProjectIntEntity { + return &ProjectIntEntity{ + Id: pView.Id, + Name: pView.Name, + GroupId: pView.GroupId, + Alias: pView.Alias, + Description: pView.Description, + IntegrationType: string(pView.Integration.Type), + RepositoryId: pView.Integration.RepositoryId, + RepositoryName: pView.Integration.RepositoryName, + RepositoryUrl: pView.Integration.RepositoryUrl, + DefaultBranch: pView.Integration.DefaultBranch, + DefaultFolder: pView.Integration.DefaultFolder, + DeletedAt: pView.DeletionDate, + DeletedBy: pView.DeletedBy, + + PackageId: pView.PackageId, + } +} + +func MakePrjIntUpdateEntity(updated *view.Project, existing *ProjectIntEntity) *ProjectIntEntity { + newPrj := ProjectIntEntity{ + Id: existing.Id, // Do not update id + Name: updated.Name, + GroupId: existing.GroupId, // Do not update parent + Alias: existing.Alias, // Do not update alias + Description: updated.Description, + RepositoryId: updated.Integration.RepositoryId, + DefaultBranch: updated.Integration.DefaultBranch, + RepositoryName: existing.RepositoryName, + RepositoryUrl: existing.RepositoryUrl, + DeletedAt: existing.DeletedAt, + DeletedBy: existing.DeletedBy, + IntegrationType: string(updated.Integration.Type), + DefaultFolder: updated.Integration.DefaultFolder, + + PackageId: updated.PackageId, + } + return &newPrj +} + +func MakeProjectView(projectEntity *ProjectIntEntity, isFavorite bool, groups []view.Group) *view.Project { + if groups == nil { + groups = make([]view.Group, 0) + } + + integrationType, _ := view.GitIntegrationTypeFromStr(projectEntity.IntegrationType) + return &view.Project{ + Id: projectEntity.Id, + GroupId: projectEntity.GroupId, + Name: projectEntity.Name, + Alias: projectEntity.Alias, + Description: projectEntity.Description, + IsFavorite: isFavorite, + Integration: view.IntegrationView{ + Type: integrationType, + RepositoryId: projectEntity.RepositoryId, + RepositoryName: projectEntity.RepositoryName, + RepositoryUrl: projectEntity.RepositoryUrl, + DefaultBranch: projectEntity.DefaultBranch, + DefaultFolder: projectEntity.DefaultFolder, + }, + Groups: groups, + DeletionDate: projectEntity.DeletedAt, + DeletedBy: projectEntity.DeletedBy, + LastVersion: projectEntity.LastVersion, + + PackageId: projectEntity.PackageId, + } +} diff --git a/qubership-apihub-service/entity/PublishedEntities.go b/qubership-apihub-service/entity/PublishedEntities.go new file mode 100644 index 0000000..57b5a1f --- /dev/null +++ b/qubership-apihub-service/entity/PublishedEntities.go @@ -0,0 +1,838 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package entity + +import ( + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +const KIND_PACKAGE = "package" +const KIND_GROUP = "group" +const KIND_WORKSPACE = "workspace" + +const KIND_DASHBOARD = "dashboard" + +type PackageEntity struct { + tableName struct{} `pg:"package_group, alias:package_group"` + + Id string `pg:"id, pk, type:varchar"` + Kind string `pg:"kind, type:varchar"` + Name string `pg:"name, type:varchar"` + ParentId string `pg:"parent_id, type:varchar"` + Alias string `pg:"alias, type:varchar"` + Description string `pg:"description, type:varchar"` + ImageUrl string `pg:"image_url, type:varchar"` + CreatedAt time.Time `pg:"created_at, type:timestamp without time zone"` + CreatedBy string `pg:"created_by, type:varchar"` + DeletedAt *time.Time `pg:"deleted_at, type:timestamp without time zone"` + DeletedBy string `pg:"deleted_by, type:varchar"` + LastVersion string `pg:"-"` + DefaultRole string `pg:"default_role, type:varchar, use_zero, default:Viewer"` + DefaultReleaseVersion string `pg:"default_released_version, type:varchar"` + ServiceName string `pg:"service_name, type:varchar"` + ReleaseVersionPattern string `pg:"release_version_pattern, type:varchar"` + ExcludeFromSearch bool `pg:"exclude_from_search, type:bool, use_zero"` + RestGroupingPrefix string `pg:"rest_grouping_prefix, type:varchar"` +} + +type PackageVersionRichEntity struct { + tableName struct{} `pg:"published_version, alias:published_version"` + + PublishedVersionEntity + PackageName string `pg:"package_name, type:varchar"` + ServiceName string `pg:"service_name, type:varchar"` + Kind string `pg:"kind, type:varchar"` + ParentNames []string `pg:"parent_names, type:varchar[]"` + NotLatestRevision bool `pg:"not_latest_revision, type:bool"` +} + +type PackageVersionRevisionEntity_deprecated struct { + tableName struct{} `pg:"published_version, alias:published_version"` + + PublishedVersionEntity + UserEntity + NotLatestRevision bool `pg:"not_latest_revision, type:bool"` +} + +type PackageVersionRevisionEntity struct { + tableName struct{} `pg:"published_version, alias:published_version"` + + PublishedVersionEntity + PrincipalEntity + NotLatestRevision bool `pg:"not_latest_revision, type:bool"` + PreviousVersionRevision int `pg:"previous_version_revision, type:integer"` +} + +type PackageVersionHistoryEntity struct { + tableName struct{} `pg:"published_version, alias:published_version"` + + PublishedVersionEntity + ApiTypes []string `pg:"api_types, type:varchar[]"` +} + +type PackageFavEntity struct { + tableName struct{} `pg:"package_group, alias:package_group"` + + PackageEntity + + UserId string `pg:"user_id, pk, type:varchar"` +} + +type PublishedVersionEntity struct { + tableName struct{} `pg:"published_version"` + + PackageId string `pg:"package_id, pk, type:varchar"` + Version string `pg:"version, pk, type:varchar"` + Revision int `pg:"revision, pk, type:integer"` + PreviousVersion string `pg:"previous_version, type:varchar"` + PreviousVersionPackageId string `pg:"previous_version_package_id, type:varchar"` + Status string `pg:"status, type:varchar"` + PublishedAt time.Time `pg:"published_at, type:timestamp without time zone"` + DeletedAt *time.Time `pg:"deleted_at, type:timestamp without time zone"` + DeletedBy string `pg:"deleted_by, type:varchar"` + Metadata Metadata `pg:"metadata, type:jsonb"` + Labels []string `pg:"labels, type:varchar array, array"` + CreatedBy string `pg:"created_by, type:varchar"` +} + +// todo remove this entity after migration createdBy:string -> createdBy:UserObject +type ReadonlyPublishedVersionEntity_deprecated struct { + tableName struct{} `pg:"published_version, alias:published_version"` + + PublishedVersionEntity + UserName string `pg:"user_name, type:varchar"` + PreviousVersionRevision int `pg:"previous_version_revision, type:integer"` +} + +type PublishedVersionSearchQueryEntity struct { + PackageId string `pg:"package_id, type:varchar, use_zero"` + Status string `pg:"status, type:varchar, use_zero"` + Label string `pg:"label, type:varchar, use_zero"` + TextFilter string `pg:"text_filter, type:varchar, use_zero"` + SortBy string `pg:"sort_by, type:varchar, use_zero"` + SortOrder string `pg:"sort_order, type:varchar, use_zero"` + Limit int `pg:"limit, type:integer, use_zero"` + Offset int `pg:"offset, type:integer, use_zero"` +} + +func GetVersionSortOrderPG(sortOrder string) string { + switch sortOrder { + case view.VersionSortOrderAsc: + return "asc" + case view.VersionSortOrderDesc: + return "desc" + } + return "" +} + +func GetVersionSortByPG(sortBy string) string { + switch sortBy { + case view.VersionSortByVersion: + return "version" + case view.VersionSortByCreatedAt: + return "published_at" + } + return "" +} + +type PackageVersionSearchQueryEntity struct { + PackageId string `pg:"package_id, type:varchar, use_zero"` + Version string `pg:"version, type:varchar, use_zero"` + Revision int `pg:"revision, type:integer, use_zero"` + Kind string `pg:"kind, type:varchar, use_zero"` + TextFilter string `pg:"text_filter, type:varchar, use_zero"` + Limit int `pg:"limit, type:integer, use_zero"` + Offset int `pg:"offset, type:integer, use_zero"` + ShowAllDescendants bool `pg:"show_all_descendants, type:bool, use_zero"` +} + +type PublishedShortVersionEntity struct { + tableName struct{} `pg:"published_version,discard_unknown_columns"` + + PackageId string `pg:"package_id, pk, type:varchar"` + Version string `pg:"version, pk, type:varchar"` + Revision int `pg:"revision, pk, type:integer"` + Status string `pg:"status, type:varchar"` + PublishedAt time.Time `pg:"published_at, type:timestamp without time zone"` +} + +type PublishedContentEntity struct { + tableName struct{} `pg:"published_version_revision_content, alias:published_version_revision_content"` + // TODO: not sure about pk + PackageId string `pg:"package_id, pk, type:varchar"` + Version string `pg:"version, pk, type:varchar"` + Revision int `pg:"revision, pk, type:integer"` + FileId string `pg:"file_id, pk, type:varchar"` + Checksum string `pg:"checksum, type:varchar"` + Index int `pg:"index, type:integer, use_zero"` + Slug string `pg:"slug, type:varchar"` + Name string `pg:"name, type:varchar"` + Path string `pg:"path, type:varchar"` + DataType string `pg:"data_type, type:varchar"` + Format string `pg:"format, type:varchar"` + Title string `pg:"title, type:varchar"` + Metadata Metadata `pg:"metadata, type:jsonb"` + ReferenceId string `pg:"-"` + OperationIds []string `pg:"operation_ids, type:varchar[], array"` + Filename string `pg:"filename, type:varchar"` +} + +type PublishedContentWithDataEntity struct { + tableName struct{} `pg:"published_version_revision_content, alias:published_version_revision_content"` + PublishedContentEntity + PublishedContentDataEntity +} + +type PublishedContentDataEntity struct { + tableName struct{} `pg:"published_data"` + + PackageId string `pg:"package_id, pk, type:varchar"` + Checksum string `pg:"checksum, pk, type:varchar"` + MediaType string `pg:"media_type, type:varchar"` + Data []byte `pg:"data, type:bytea"` +} + +type TransformedContentDataEntity struct { + tableName struct{} `pg:"transformed_content_data"` + + PackageId string `pg:"package_id, pk, type:varchar"` + Version string `pg:"version, pk, type:varchar"` + Revision int `pg:"revision, pk, type:integer"` + ApiType string `pg:"api_type, pk, type:varchar"` + GroupId string `pg:"group_id, pk, type:varchar"` + BuildType string `pg:"build_type, pk, type:varchar"` + Format string `pg:"format, pk, type:varchar"` + Data []byte `pg:"data, type:bytea"` + DocumentsInfo []view.PackageDocument `pg:"documents_info, type:jsonb"` +} + +type PublishedReferenceEntity struct { + tableName struct{} `pg:"published_version_reference, alias:published_version_reference"` + + PackageId string `pg:"package_id, pk, type:varchar"` + Version string `pg:"version, pk, type:varchar"` + Revision int `pg:"revision, pk, type:integer"` + RefPackageId string `pg:"reference_id, pk, type:varchar"` + RefVersion string `pg:"reference_version, pk, type:varchar"` + RefRevision int `pg:"reference_revision, pk, type:integer"` + ParentRefPackageId string `pg:"parent_reference_id, pk, type:varchar, use_zero"` + ParentRefVersion string `pg:"parent_reference_version, pk, type:varchar, use_zero"` + ParentRefRevision int `pg:"parent_reference_revision, pk, type:integer, use_zero"` + Excluded bool `pg:"excluded, type:boolean, use_zero"` +} + +type PublishedReferenceContainer struct { + References map[string]PublishedReferenceEntity +} + +type SharedUrlInfoEntity struct { + tableName struct{} `pg:"shared_url_info"` + + PackageId string `pg:"package_id, type:varchar"` + Version string `pg:"version, type:varchar"` + FileId string `pg:"file_id, type:varchar"` // TODO: slug! + SharedId string `pg:"shared_id, pk, type:varchar"` +} + +// deprecated +type PackageVersionPublishedReference struct { + tableName struct{} `pg:"published_version_reference, alias:published_version_reference"` + PublishedReferenceEntity + PackageName string `pg:"package_name, type:varchar"` + Kind string `pg:"kind, type:varchar"` + VersionStatus string `pg:"version_status, type:varchar"` + DeletedAt *time.Time `pg:"deleted_at, type:timestamp without time zone"` + DeletedBy string `pg:"deleted_by, type:varchar"` +} + +type PublishedSrcEntity struct { + tableName struct{} `pg:"published_sources"` + + PackageId string `pg:"package_id, pk, type:varchar"` + Version string `pg:"version, pk, type:varchar"` + Revision int `pg:"revision, pk, type:integer"` + Config []byte `pg:"config, type:bytea"` + Metadata []byte `pg:"metadata, type:bytea"` + ArchiveChecksum string `pg:"archive_checksum, type:varchar"` +} + +type PublishedSrcArchiveEntity struct { + tableName struct{} `pg:"published_sources_archives"` + + Checksum string `pg:"checksum, pk, type:varchar"` // sha512 + Data []byte `pg:"data, type:bytea"` +} + +type PublishedSrcDataConfigEntity struct { + PackageId string `pg:"package_id, pk, type:varchar"` + ArchiveChecksum string `pg:"archive_checksum, type:varchar"` + Data []byte `pg:"data, type:bytea"` + Config []byte `pg:"config, type:bytea"` +} + +type PublishedContentSearchQueryEntity struct { + TextFilter string `pg:"text_filter, type:varchar, use_zero"` + Limit int `pg:"limit, type:integer, use_zero"` + Offset int `pg:"offset, type:integer, use_zero"` + DocumentTypesFilter []string `pg:"-"` + OperationGroup string `pg:"operation_group_id, type:varchar, use_zero"` +} + +type ContentForDocumentsTransformationSearchQueryEntity struct { + Limit int `pg:"limit, type:integer, use_zero"` + Offset int `pg:"offset, type:integer, use_zero"` + DocumentTypesFilter []string `pg:"-"` + OperationGroup string `pg:"operation_group_id, type:varchar, use_zero"` +} +type PackageIdEntity struct { + tableName struct{} `pg:"package_group, alias:package_group"` + + Id string `pg:"id, type:varchar"` +} + +type CSVDashboardPublishEntity struct { + tableName struct{} `pg:"csv_dashboard_publication, alias:csv_dashboard_publication"` + + PublishId string `pg:"publish_id, pk, type:varchar"` + Status string `pg:"status, type:varchar"` + Message string `pg:"message, type:varchar, use_zero"` + Report []byte `pg:"csv_report, type:bytea"` +} + +func MakePublishedReferenceView(entity PublishedReferenceEntity) view.VersionReferenceV3 { + return view.VersionReferenceV3{ + PackageRef: view.MakePackageRefKey(entity.RefPackageId, entity.RefVersion, entity.RefRevision), + ParentPackageRef: view.MakePackageRefKey(entity.ParentRefPackageId, entity.ParentRefVersion, entity.ParentRefRevision), + Excluded: entity.Excluded, + } +} + +func MakePublishedVersionView(versionEnt *PublishedVersionEntity, contentEnts []PublishedContentEntity, refs []view.PublishedRef) *view.PublishedVersion { + contents := make([]view.PublishedContent, 0) + for _, ent := range contentEnts { + contents = append(contents, *MakePublishedContentView(&ent)) + } + + status, _ := view.ParseVersionStatus(versionEnt.Status) + var labels []string + if versionEnt.Labels != nil { + labels = versionEnt.Labels + } else { + labels = make([]string, 0) + } + + return &view.PublishedVersion{ + PackageId: versionEnt.PackageId, + Version: versionEnt.Version, + Revision: versionEnt.Revision, + Status: status, + PublishedAt: versionEnt.PublishedAt, + PreviousVersion: versionEnt.PreviousVersion, + PreviousVersionPackageId: versionEnt.PreviousVersionPackageId, + DeletedAt: versionEnt.DeletedAt, + BranchName: versionEnt.Metadata.GetBranchName(), + Contents: contents, + RelatedPackages: refs, + VersionLabels: labels, + } +} + +// todo remove this entity after migration createdBy:string -> createdBy:UserObject +func MakeReadonlyPublishedVersionListView2_deprecated(versionEnt *ReadonlyPublishedVersionEntity_deprecated) *view.PublishedVersionListView_deprecated_v2 { + return &view.PublishedVersionListView_deprecated_v2{ + Version: view.MakeVersionRefKey(versionEnt.Version, versionEnt.Revision), + Status: versionEnt.Status, + CreatedAt: versionEnt.PublishedAt, + CreatedBy: versionEnt.UserName, + PreviousVersion: view.MakeVersionRefKey(versionEnt.PreviousVersion, versionEnt.PreviousVersionRevision), + VersionLabels: versionEnt.Labels, + PreviousVersionPackageId: versionEnt.PreviousVersionPackageId, + } +} + +func MakeReadonlyPublishedVersionListView2(versionEnt *PackageVersionRevisionEntity) *view.PublishedVersionListView { + item := view.PublishedVersionListView{ + Version: view.MakeVersionRefKey(versionEnt.Version, versionEnt.Revision), + Status: versionEnt.Status, + CreatedAt: versionEnt.PublishedAt, + CreatedBy: *MakePrincipalView(&versionEnt.PrincipalEntity), + PreviousVersion: view.MakeVersionRefKey(versionEnt.PreviousVersion, versionEnt.PreviousVersionRevision), + VersionLabels: versionEnt.Labels, + PreviousVersionPackageId: versionEnt.PreviousVersionPackageId, + } + return &item +} + +func MakePublishedVersionHistoryView(ent PackageVersionHistoryEntity) view.PublishedVersionHistoryView { + return view.PublishedVersionHistoryView{ + PackageId: ent.PackageId, + Version: ent.Version, + Revision: ent.Revision, + Status: ent.Status, + PublishedAt: ent.PublishedAt, + PreviousVersionPackageId: ent.PreviousVersionPackageId, + PreviousVersion: ent.PreviousVersion, + ApiTypes: ent.ApiTypes, + } +} + +func MakePublishedVersionListView(versionEnt *PublishedVersionEntity) *view.PublishedVersionListView_deprecated { + status, _ := view.ParseVersionStatus(versionEnt.Status) + return &view.PublishedVersionListView_deprecated{ + Version: versionEnt.Version, + Revision: versionEnt.Revision, + Status: status, + PublishedAt: versionEnt.PublishedAt, + PreviousVersion: versionEnt.PreviousVersion, + PreviousVersionPackageId: versionEnt.PreviousVersionPackageId, + } +} + +func MakePublishedContentView(ent *PublishedContentEntity) *view.PublishedContent { + return &view.PublishedContent{ + ContentId: ent.FileId, + Type: view.ParseTypeFromString(ent.DataType), + Format: ent.Format, + Path: ent.Path, + Name: ent.Name, + Index: ent.Index, + Slug: ent.Slug, + Labels: ent.Metadata.GetLabels(), + Title: ent.Title, + Version: ent.Version, + ReferenceId: ent.ReferenceId, + } +} + +// deprecated +func MakePublishedDocumentView_deprecated(ent *PublishedContentEntity) *view.PublishedDocument_deprecated { + return &view.PublishedDocument_deprecated{ + FieldId: ent.FileId, + Type: ent.DataType, + Format: ent.Format, + Slug: ent.Slug, + Labels: ent.Metadata.GetLabels(), + Description: ent.Metadata.GetDescription(), + Version: ent.Metadata.GetVersion(), + Info: ent.Metadata.GetInfo(), + ExternalDocs: ent.Metadata.GetExternalDocs(), + Title: ent.Title, + Filename: ent.Filename, + Tags: ent.Metadata.GetDocTags(), + } +} + +func MakePublishedDocumentView(ent *PublishedContentEntity) *view.PublishedDocument { + return &view.PublishedDocument{ + FieldId: ent.FileId, + Type: ent.DataType, + Format: ent.Format, + Slug: ent.Slug, + Labels: ent.Metadata.GetLabels(), + Description: ent.Metadata.GetDescription(), + Version: ent.Metadata.GetVersion(), + Info: ent.Metadata.GetInfo(), + ExternalDocs: ent.Metadata.GetExternalDocs(), + Title: ent.Title, + Filename: ent.Filename, + Tags: ent.Metadata.GetDocTags(), + } +} + +func MakeDocumentForTransformationView(ent *PublishedContentWithDataEntity) *view.DocumentForTransformationView { + return &view.DocumentForTransformationView{ + FieldId: ent.FileId, + Type: ent.DataType, + Format: ent.Format, + Slug: ent.Slug, + Labels: ent.Metadata.GetLabels(), + Description: ent.Metadata.GetDescription(), + Version: ent.Metadata.GetVersion(), + Title: ent.Title, + Filename: ent.Filename, + IncludedOperationIds: ent.OperationIds, + Data: ent.Data, + } +} + +func MakePublishedDocumentRefView2(ent *PublishedContentEntity) *view.PublishedDocumentRefView { + return &view.PublishedDocumentRefView{ + FieldId: ent.FileId, + Type: ent.DataType, + Format: ent.Format, + Slug: ent.Slug, + Labels: ent.Metadata.GetLabels(), + Description: ent.Metadata.GetDescription(), + Version: ent.Metadata.GetVersion(), + Title: ent.Title, + Filename: ent.Filename, + PackageRef: view.MakePackageRefKey(ent.PackageId, ent.Version, ent.Revision), + } +} +func MakePublishedContentChangeView(ent *PublishedContentEntity) *view.PublishedContentChange { + return &view.PublishedContentChange{ + FileId: ent.FileId, + Type: view.ParseTypeFromString(ent.DataType), + Title: ent.Title, + Slug: ent.Slug, + Checksum: ent.Checksum, + } +} + +func MakeContentDataViewPub(content *PublishedContentEntity, contentData *PublishedContentDataEntity) *view.ContentData { + return &view.ContentData{ + FileId: content.FileId, + Data: contentData.Data, + DataType: contentData.MediaType, + } +} + +func MakeSharedUrlInfo(sui *SharedUrlInfoEntity) *view.SharedUrlResult_deprecated { + return &view.SharedUrlResult_deprecated{ + SharedId: sui.SharedId, + } +} + +func MakeSharedUrlInfoV2(sui *SharedUrlInfoEntity) *view.SharedUrlResult { + return &view.SharedUrlResult{ + SharedFileId: sui.SharedId, + } +} + +func MakePackageEntity(packg *view.SimplePackage) *PackageEntity { + return &PackageEntity{ + Id: packg.Id, + Kind: packg.Kind, + Name: packg.Name, + ParentId: packg.ParentId, + Alias: packg.Alias, + Description: packg.Description, + ImageUrl: packg.ImageUrl, + DefaultRole: packg.DefaultRole, + CreatedAt: packg.CreatedAt, + CreatedBy: packg.CreatedBy, + DeletedAt: packg.DeletionDate, + DeletedBy: packg.DeletedBy, + DefaultReleaseVersion: packg.DefaultReleaseVersion, + ServiceName: packg.ServiceName, + ReleaseVersionPattern: packg.ReleaseVersionPattern, + ExcludeFromSearch: *packg.ExcludeFromSearch, + RestGroupingPrefix: packg.RestGroupingPrefix, + } +} + +func MakeSimplePackageUpdateEntity(existingPackage *PackageEntity, packg *view.PatchPackageReq) *PackageEntity { + var packageEntity = PackageEntity{ + Id: existingPackage.Id, + Kind: existingPackage.Kind, + ParentId: existingPackage.ParentId, + Alias: existingPackage.Alias, + CreatedAt: existingPackage.CreatedAt, + CreatedBy: existingPackage.CreatedBy, + DeletedAt: existingPackage.DeletedAt, + DeletedBy: existingPackage.DeletedBy, + } + if packg.Name != nil { + packageEntity.Name = *packg.Name + } else { + packageEntity.Name = existingPackage.Name + } + if packg.Description != nil { + packageEntity.Description = *packg.Description + } else { + packageEntity.Description = existingPackage.Description + } + if packg.ImageUrl != nil { + packageEntity.ImageUrl = *packg.ImageUrl + } else { + packageEntity.ImageUrl = existingPackage.ImageUrl + } + if packg.ServiceName != nil { + packageEntity.ServiceName = *packg.ServiceName + } else { + packageEntity.ServiceName = existingPackage.ServiceName + } + if packg.DefaultRole != nil { + packageEntity.DefaultRole = *packg.DefaultRole + } else { + packageEntity.DefaultRole = existingPackage.DefaultRole + } + if packg.DefaultReleaseVersion != nil { + packageEntity.DefaultReleaseVersion = *packg.DefaultReleaseVersion + } else { + packageEntity.DefaultReleaseVersion = existingPackage.DefaultReleaseVersion + } + if packg.ReleaseVersionPattern != nil { + packageEntity.ReleaseVersionPattern = *packg.ReleaseVersionPattern + } else { + packageEntity.ReleaseVersionPattern = existingPackage.ReleaseVersionPattern + } + if packg.ExcludeFromSearch != nil { + packageEntity.ExcludeFromSearch = *packg.ExcludeFromSearch + } else { + packageEntity.ExcludeFromSearch = existingPackage.ExcludeFromSearch + } + if packg.RestGroupingPrefix != nil { + packageEntity.RestGroupingPrefix = *packg.RestGroupingPrefix + } else { + packageEntity.RestGroupingPrefix = existingPackage.RestGroupingPrefix + } + return &packageEntity +} + +func MakePackageGroupEntity(group *view.Group) *PackageEntity { + kind := KIND_GROUP + if group.ParentId == "" { + kind = KIND_WORKSPACE + } + return &PackageEntity{ + Id: group.Id, + Kind: kind, + Name: group.Name, + ParentId: group.ParentId, + Alias: group.Alias, + Description: group.Description, + ImageUrl: group.ImageUrl, + CreatedAt: group.CreatedAt, + CreatedBy: group.CreatedBy, + DeletedAt: group.DeletedAt, + DeletedBy: group.DeletedBy, + DefaultRole: view.ViewerRoleId, //todo remove after full v2 migration + } +} + +func MakePackageGroupUpdateEntity(existingGroup *PackageEntity, group *view.Group) *PackageEntity { + kind := KIND_GROUP + if existingGroup.ParentId == "" { + kind = KIND_WORKSPACE + } + return &PackageEntity{ + Id: existingGroup.Id, + Kind: kind, + Name: group.Name, + ParentId: existingGroup.ParentId, + Alias: existingGroup.Alias, + Description: group.Description, + ImageUrl: group.ImageUrl, + CreatedAt: existingGroup.CreatedAt, + CreatedBy: existingGroup.CreatedBy, + DeletedAt: existingGroup.DeletedAt, + DeletedBy: existingGroup.DeletedBy, + DefaultRole: view.ViewerRoleId, //todo remove after full v2 migration + ServiceName: existingGroup.ServiceName, + } +} + +func MakePackageGroupView(entity *PackageEntity) *view.Group { + return &view.Group{ + Id: entity.Id, + ParentId: entity.ParentId, + Name: entity.Name, + Alias: entity.Alias, + Description: entity.Description, + ImageUrl: entity.ImageUrl, + CreatedAt: entity.CreatedAt, + CreatedBy: entity.CreatedBy, + DeletedAt: entity.DeletedAt, + DeletedBy: entity.DeletedBy, + LastVersion: entity.LastVersion, + } +} + +func MakePackageGroupFavView(entity *PackageFavEntity) *view.Group { + view := MakePackageGroupView(&entity.PackageEntity) + view.IsFavorite = entity.UserId != "" && entity.Id != "" + return view +} + +func MakePackageGroupInfoView(entity *PackageEntity, parents []view.Group, isFavorite bool) *view.GroupInfo { + var parentsRes []view.Group + if parents == nil { + parentsRes = make([]view.Group, 0) + } else { + parentsRes = parents + } + + return &view.GroupInfo{ + GroupId: entity.Id, + ParentId: entity.ParentId, + Name: entity.Name, + Alias: entity.Alias, + ImageUrl: entity.ImageUrl, + Parents: parentsRes, + IsFavorite: isFavorite, + LastVersion: entity.LastVersion, + } +} +func MakeSimplePackageView(entity *PackageEntity, parents []view.ParentPackageInfo, isFavorite bool, userPermissions []string) *view.SimplePackage { + var parentsRes []view.ParentPackageInfo + if parents == nil { + parentsRes = make([]view.ParentPackageInfo, 0) + } else { + parentsRes = parents + } + + return &view.SimplePackage{ + Id: entity.Id, + ParentId: entity.ParentId, + Name: entity.Name, + Alias: entity.Alias, + ImageUrl: entity.ImageUrl, + Parents: parentsRes, + IsFavorite: isFavorite, + ServiceName: entity.ServiceName, + Description: entity.Description, + Kind: entity.Kind, + DefaultRole: entity.DefaultRole, + UserPermissions: userPermissions, + DefaultReleaseVersion: entity.DefaultReleaseVersion, + ReleaseVersionPattern: entity.ReleaseVersionPattern, + ExcludeFromSearch: &entity.ExcludeFromSearch, + RestGroupingPrefix: entity.RestGroupingPrefix, + } +} + +func MakePackagesInfo(entity *PackageEntity, defaultVersionDetails *view.VersionDetails, parents []view.ParentPackageInfo, isFavorite bool, userPermissions []string) *view.PackagesInfo { + var parentsRes []view.ParentPackageInfo + if parents == nil { + parentsRes = make([]view.ParentPackageInfo, 0) + } else { + parentsRes = parents + } + + packageInfo := view.PackagesInfo{ + Id: entity.Id, + ParentId: entity.ParentId, + Name: entity.Name, + Alias: entity.Alias, + ImageUrl: entity.ImageUrl, + Parents: parentsRes, + IsFavorite: isFavorite, + ServiceName: entity.ServiceName, + Description: entity.Description, + Kind: entity.Kind, + DefaultRole: entity.DefaultRole, + UserPermissions: userPermissions, + LastReleaseVersionDetails: defaultVersionDetails, + RestGroupingPrefix: entity.RestGroupingPrefix, + ReleaseVersionPattern: entity.ReleaseVersionPattern, + } + + return &packageInfo +} + +func MakePackageView(packageEntity *PackageEntity, isFavorite bool, groups []view.Group) *view.Package { + if groups == nil { + groups = make([]view.Group, 0) + } + return &view.Package{ + Id: packageEntity.Id, + GroupId: packageEntity.ParentId, + Name: packageEntity.Name, + Alias: packageEntity.Alias, + Description: packageEntity.Description, + IsFavorite: isFavorite, + Groups: groups, + DeletionDate: packageEntity.DeletedAt, + DeletedBy: packageEntity.DeletedBy, + ServiceName: packageEntity.ServiceName, + LastVersion: packageEntity.LastVersion, + } +} + +func MakePackageParentView(entity *PackageEntity) *view.ParentPackageInfo { + return &view.ParentPackageInfo{ + Id: entity.Id, + ParentId: entity.ParentId, + Name: entity.Name, + Alias: entity.Alias, + ImageUrl: entity.ImageUrl, + Kind: entity.Kind, + } +} + +func MakePackageUpdateEntity(existingEntity *PackageEntity, packageView *view.Package) *PackageEntity { + return &PackageEntity{ + Id: existingEntity.Id, + Kind: KIND_PACKAGE, + Name: packageView.Name, + ParentId: existingEntity.ParentId, + Alias: existingEntity.Alias, + Description: packageView.Description, + CreatedAt: existingEntity.CreatedAt, + CreatedBy: existingEntity.CreatedBy, + DeletedAt: existingEntity.DeletedAt, + DeletedBy: existingEntity.DeletedBy, + DefaultRole: view.ViewerRoleId, //todo remove after full v2 migration + ServiceName: packageView.ServiceName, + } +} + +func MakePackageVersionRef(entity *PackageVersionRichEntity) view.PackageVersionRef { + return view.PackageVersionRef{ + RefPackageId: entity.PackageId, + RefPackageName: entity.PackageName, + RefPackageVersion: view.MakeVersionRefKey(entity.Version, entity.Revision), + Kind: entity.Kind, + Status: entity.Status, + DeletedAt: entity.DeletedAt, + DeletedBy: entity.DeletedBy, + ParentNames: entity.ParentNames, + ServiceName: entity.ServiceName, + NotLatestRevision: entity.NotLatestRevision, + } +} + +func MakePackageVersionRevisionView_deprecated(ent *PackageVersionRevisionEntity_deprecated) *view.PackageVersionRevision_deprecated { + packageVersionRevision := view.PackageVersionRevision_deprecated{ + Version: view.MakeVersionRefKey(ent.Version, ent.Revision), + Revision: ent.Revision, + Status: ent.Status, + CreatedAt: ent.PublishedAt, + CreatedBy: *MakeUserV2View(&ent.UserEntity), + RevisionLabels: ent.Labels, + PublishMeta: view.BuildConfigMetadata{ + BranchName: ent.Metadata.GetBranchName(), + RepositoryUrl: ent.Metadata.GetRepositoryUrl(), + CloudName: ent.Metadata.GetCloudName(), + CloudUrl: ent.Metadata.GetCloudUrl(), + Namespace: ent.Metadata.GetNamespace(), + }, + NotLatestRevision: ent.NotLatestRevision, + } + return &packageVersionRevision +} + +func MakePackageVersionRevisionView(ent *PackageVersionRevisionEntity) *view.PackageVersionRevision { + packageVersionRevision := view.PackageVersionRevision{ + Version: view.MakeVersionRefKey(ent.Version, ent.Revision), + Revision: ent.Revision, + Status: ent.Status, + CreatedAt: ent.PublishedAt, + CreatedBy: *MakePrincipalView(&ent.PrincipalEntity), + RevisionLabels: ent.Labels, + PublishMeta: view.BuildConfigMetadata{ + BranchName: ent.Metadata.GetBranchName(), + RepositoryUrl: ent.Metadata.GetRepositoryUrl(), + CloudName: ent.Metadata.GetCloudName(), + CloudUrl: ent.Metadata.GetCloudUrl(), + Namespace: ent.Metadata.GetNamespace(), + }, + NotLatestRevision: ent.NotLatestRevision, + } + return &packageVersionRevision +} diff --git a/qubership-apihub-service/entity/SearchEntity.go b/qubership-apihub-service/entity/SearchEntity.go new file mode 100644 index 0000000..4433852 --- /dev/null +++ b/qubership-apihub-service/entity/SearchEntity.go @@ -0,0 +1,445 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package entity + +import ( + "strings" + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type VersionStatusSearchWeight struct { + VersionReleaseStatus string `pg:"version_status_release, type:varchar, use_zero"` + VersionReleaseStatusWeight float64 `pg:"version_status_release_weight, type:real, use_zero"` + VersionDraftStatus string `pg:"version_status_draft, type:varchar, use_zero"` + VersionDraftStatusWeight float64 `pg:"version_status_draft_weight, type:real, use_zero"` + VersionArchivedStatus string `pg:"version_status_archived, type:varchar, use_zero"` + VersionArchivedStatusWeight float64 `pg:"version_status_archived_weight, type:real, use_zero"` +} + +type OperationSearchWeight struct { + ScopeWeight float64 `pg:"scope_weight, type:real, use_zero"` + TitleWeight float64 `pg:"title_weight, type:real, use_zero"` + OpenCountWeight float64 `pg:"open_count_weight, type:real, use_zero"` +} + +type OperationSearchScopeFilter struct { + FilterAll bool `pg:"filter_all, type:boolean, use_zero"` + + FilterRequest bool `pg:"filter_request, type:boolean, use_zero"` + FilterResponse bool `pg:"filter_response, type:boolean, use_zero"` + FilterAnnotation bool `pg:"filter_annotation, type:boolean, use_zero"` + FilterExamples bool `pg:"filter_examples, type:boolean, use_zero"` + FilterProperties bool `pg:"filter_properties, type:boolean, use_zero"` + FilterProperty bool `pg:"filter_property, type:boolean, use_zero"` + FilterArgument bool `pg:"filter_argument, type:boolean, use_zero"` +} + +type OperationSearchQuery struct { + OperationSearchScopeFilter + OperationSearchWeight + VersionStatusSearchWeight + SearchString string `pg:"search_filter, type:varchar, use_zero"` //for postgres FTS + TextFilter string `pg:"text_filter, type:varchar, use_zero"` //for varchar + ApiType string `pg:"api_type, type:varchar, use_zero"` + Packages []string `pg:"packages, type:varchar[], use_zero"` + Versions []string `pg:"versions, type:varchar[], use_zero"` + Statuses []string `pg:"statuses, type:varchar[], use_zero"` + Methods []string `pg:"methods, type:varchar[], use_zero"` + OperationTypes []string `pg:"operation_types, type:varchar[], use_zero"` + StartDate time.Time `pg:"start_date, type:timestamp without time zone, use_zero"` + EndDate time.Time `pg:"end_date, type:timestamp without time zone, use_zero"` + Limit int `pg:"limit, type:integer, use_zero"` + Offset int `pg:"offset, type:integer, use_zero"` + + RestApiType string `pg:"rest_api_type, type:varchar, use_zero"` + GraphqlApiType string `pg:"graphql_api_type, type:varchar, use_zero"` +} + +// deprecated +type OperationSearchResult_deprecated struct { + tableName struct{} `pg:",discard_unknown_columns"` + + PackageId string `pg:"package_id, type:varchar"` + PackageName string `pg:"name, type:varchar"` + Version string `pg:"version, type:varchar"` + Revision int `pg:"revision type:integer"` + VersionStatus string `pg:"status, type:varchar"` + OperationId string `pg:"operation_id, type:varchar"` + Title string `pg:"title, type:varchar"` + Deprecated bool `pg:"deprecated, type:boolean"` + ApiType string `pg:"api_type, type:varchar"` + Metadata Metadata `pg:"metadata, type:jsonb"` + ParentNames []string `pg:"parent_names, type:varchar[]"` + + //debug + ScopeWeight float64 `pg:"scope_weight, type:real"` + ScopeTf float64 `pg:"scope_tf, type:real"` + TitleTf float64 `pg:"title_tf, type:real"` + VersionStatusTf float64 `pg:"version_status_tf, type:real"` + OpenCountWeight float64 `pg:"open_count_weight, type:real"` + OperationOpenCount float64 `pg:"operation_open_count, type:real"` +} + +type OperationSearchResult struct { + tableName struct{} `pg:",discard_unknown_columns"` + + OperationEntity + PackageName string `pg:"name, type:varchar"` + VersionStatus string `pg:"status, type:varchar"` + ParentNames []string `pg:"parent_names, type:varchar[]"` + + //debug + ScopeWeight float64 `pg:"scope_weight, type:real"` + ScopeTf float64 `pg:"scope_tf, type:real"` + TitleTf float64 `pg:"title_tf, type:real"` + VersionStatusTf float64 `pg:"version_status_tf, type:real"` + OpenCountWeight float64 `pg:"open_count_weight, type:real"` + OperationOpenCount float64 `pg:"operation_open_count, type:real"` +} + +func MakeOperationSearchQueryEntity(searchQuery *view.SearchQueryReq) (*OperationSearchQuery, error) { + + //todo probably need to replace more symbols + ftsSearchString := searchQuery.SearchString + ftsSearchString = strings.ReplaceAll(ftsSearchString, " ", " & ") + ftsSearchString = strings.ReplaceAll(ftsSearchString, "/", " & ") + ftsSearchString = strings.ReplaceAll(ftsSearchString, "_", " & ") + ftsSearchString = strings.ReplaceAll(ftsSearchString, "-", " & ") + ftsSearchString = strings.TrimSpace(ftsSearchString) + ftsSearchString = strings.Trim(ftsSearchString, "&") + ftsSearchString = strings.Trim(ftsSearchString, "|") + ftsSearchString = strings.ReplaceAll(ftsSearchString, ":*", "") + ftsSearchString = strings.TrimSpace(ftsSearchString) + ":*" //starts with + + searchQueryEntity := &OperationSearchQuery{ + SearchString: ftsSearchString, + TextFilter: searchQuery.SearchString, + Packages: searchQuery.PackageIds, + Versions: searchQuery.Versions, + Statuses: searchQuery.Statuses, + StartDate: searchQuery.PublicationDateInterval.StartDate, + EndDate: searchQuery.PublicationDateInterval.EndDate, + Methods: make([]string, 0), + OperationTypes: make([]string, 0), + Limit: searchQuery.Limit, + Offset: searchQuery.Limit * searchQuery.Page, + RestApiType: string(view.RestApiType), + GraphqlApiType: string(view.GraphqlApiType), + } + if searchQueryEntity.Packages == nil { + searchQueryEntity.Packages = make([]string, 0) + } + if searchQueryEntity.Versions == nil { + searchQueryEntity.Versions = make([]string, 0) + } + if searchQueryEntity.Statuses == nil { + searchQueryEntity.Statuses = make([]string, 0) + } + if searchQueryEntity.StartDate.IsZero() { + searchQueryEntity.StartDate = time.Unix(0, 0) //January 1, 1970 + } + if searchQueryEntity.EndDate.IsZero() { + searchQueryEntity.EndDate = time.Unix(2556057600, 0) //December 31, 2050 + } + return searchQueryEntity, nil +} + +// depreacted +func MakeOperationSearchResultView_deprecated(ent OperationSearchResult_deprecated) view.OperationSearchResult_deprecated { + operationSearchResult := view.OperationSearchResult_deprecated{ + PackageId: ent.PackageId, + PackageName: ent.PackageName, + ParentPackages: ent.ParentNames, + Version: view.MakeVersionRefKey(ent.Version, ent.Revision), + VersionStatus: ent.VersionStatus, + OperationId: ent.OperationId, + Title: ent.Title, + Deprecated: ent.Deprecated, + ApiType: ent.ApiType, + + //debug + Debug: view.OperationSearchWeightsDebug{ + ScopeWeight: ent.ScopeWeight, + ScopeTf: ent.ScopeTf, + TitleTf: ent.TitleTf, + VersionStatusTf: ent.VersionStatusTf, + OperationOpenCountWeight: ent.OpenCountWeight, + OperationOpenCount: ent.OperationOpenCount, + }, + } + + switch operationSearchResult.ApiType { + case string(view.RestApiType): + restOperationChange := view.RestOperationChange{ + Path: ent.Metadata.GetPath(), + Method: ent.Metadata.GetMethod(), + } + operationSearchResult.Metadata = restOperationChange + case string(view.GraphqlApiType): + graphQLOperationMetadata := view.GraphQLOperationMetadata{ + Type: ent.Metadata.GetType(), + Method: ent.Metadata.GetMethod(), + } + operationSearchResult.Metadata = graphQLOperationMetadata + } + return operationSearchResult +} + +func MakeOperationSearchResultView(ent OperationSearchResult) interface{} { + operationSearchResult := view.CommonOperationSearchResult{ + PackageId: ent.PackageId, + PackageName: ent.PackageName, + ParentPackages: ent.ParentNames, + VersionStatus: ent.VersionStatus, + Version: view.MakeVersionRefKey(ent.Version, ent.Revision), + Title: ent.Title, + + //debug + Debug: view.OperationSearchWeightsDebug{ + ScopeWeight: ent.ScopeWeight, + ScopeTf: ent.ScopeTf, + TitleTf: ent.TitleTf, + VersionStatusTf: ent.VersionStatusTf, + OperationOpenCountWeight: ent.OpenCountWeight, + OperationOpenCount: ent.OperationOpenCount, + }, + } + + switch ent.Type { + case string(view.RestApiType): + return view.RestOperationSearchResult{ + CommonOperationSearchResult: operationSearchResult, + RestOperationView: MakeRestOperationView(&ent.OperationEntity), + } + case string(view.GraphqlApiType): + return view.GraphQLOperationSearchResult{ + CommonOperationSearchResult: operationSearchResult, + GraphQLOperationView: MakeGraphQLOperationView(&ent.OperationEntity), + } + } + return operationSearchResult +} + +type PackageSearchWeight struct { + PackageNameWeight float64 `pg:"pkg_name_weight, type:real, use_zero"` + PackageDescriptionWeight float64 `pg:"pkg_description_weight, type:real, use_zero"` + PackageIdWeight float64 `pg:"pkg_id_weight, type:real, use_zero"` + PackageServiceNameWeight float64 `pg:"pkg_service_name_weight, type:real, use_zero"` + VersionWeight float64 `pg:"version_weight, type:real, use_zero"` + VersionLabelWeight float64 `pg:"version_label_weight, type:real, use_zero"` + DefaultVersionWeight float64 `pg:"default_version_weight, type:real, use_zero"` + OpenCountWeight float64 `pg:"open_count_weight, type:real, use_zero"` +} + +type PackageSearchQuery struct { + PackageSearchWeight + VersionStatusSearchWeight + TextFilter string `pg:"text_filter, type:varchar, use_zero"` //for varchar + Packages []string `pg:"packages, type:varchar[], use_zero"` + Versions []string `pg:"versions, type:varchar[], use_zero"` + Statuses []string `pg:"statuses, type:varchar[], use_zero"` + StartDate time.Time `pg:"start_date, type:timestamp without time zone, use_zero"` + EndDate time.Time `pg:"end_date, type:timestamp without time zone, use_zero"` + Limit int `pg:"limit, type:integer, use_zero"` + Offset int `pg:"offset, type:integer, use_zero"` +} + +type PackageSearchResult struct { + tableName struct{} `pg:",discard_unknown_columns"` + + PackageId string `pg:"package_id, type:varchar"` + PackageName string `pg:"name, type:varchar"` + PackageDescription string `pg:"description, type:varchar"` + PackageServiceName string `pg:"service_name, type:varchar"` + Version string `pg:"version, type:varchar"` + Revision int `pg:"revision, type:integer"` + VersionStatus string `pg:"status, type:varchar"` + CreatedAt time.Time `pg:"created_at, type:timestamp without time zone"` + Labels []string `pg:"labels, type:varchar[], array"` + LatestRevision bool `pg:"latest_revision, type:boolean"` + ParentNames []string `pg:"parent_names, type:varchar[]"` + + //debug + PackageIdTf float64 `pg:"pkg_id_tf, type:real"` + PackageNameTf float64 `pg:"pkg_name_tf, type:real"` + PackageDescriptionTf float64 `pg:"pkg_description_tf, type:real"` + PackageServiceNameTf float64 `pg:"pkg_service_name_tf, type:real"` + VersionTf float64 `pg:"version_tf, type:real"` + VersionLabelsTf float64 `pg:"version_labels_tf, type:real"` + DefaultVersionTf float64 `pg:"default_version_tf, type:real"` + VersionStatusTf float64 `pg:"version_status_tf, type:real"` + OpenCountWeight float64 `pg:"open_count_weight, type:real"` + VersionOpenCount float64 `pg:"version_open_count, type:real"` +} + +func MakePackageSearchQueryEntity(searchQuery *view.SearchQueryReq) (*PackageSearchQuery, error) { + searchQueryEntity := &PackageSearchQuery{ + TextFilter: searchQuery.SearchString, + Packages: searchQuery.PackageIds, + Versions: searchQuery.Versions, + Statuses: searchQuery.Statuses, + StartDate: searchQuery.PublicationDateInterval.StartDate, + EndDate: searchQuery.PublicationDateInterval.EndDate, + Limit: searchQuery.Limit, + Offset: searchQuery.Limit * searchQuery.Page, + } + if searchQueryEntity.Packages == nil { + searchQueryEntity.Packages = make([]string, 0) + } + if searchQueryEntity.Versions == nil { + searchQueryEntity.Versions = make([]string, 0) + } + if searchQueryEntity.Statuses == nil { + searchQueryEntity.Statuses = make([]string, 0) + } + if searchQueryEntity.StartDate.IsZero() { + searchQueryEntity.StartDate = time.Unix(0, 0) //January 1, 1970 + } + if searchQueryEntity.EndDate.IsZero() { + searchQueryEntity.EndDate = time.Unix(2556057600, 0) //December 31, 2050 + } + return searchQueryEntity, nil +} + +func MakePackageSearchResultView(ent PackageSearchResult) *view.PackageSearchResult { + return &view.PackageSearchResult{ + PackageId: ent.PackageId, + PackageName: ent.PackageName, + Description: ent.PackageDescription, + ServiceName: ent.PackageServiceName, + ParentPackages: ent.ParentNames, + Version: view.MakeVersionRefKey(ent.Version, ent.Revision), + VersionStatus: ent.VersionStatus, + CreatedAt: ent.CreatedAt, + Labels: ent.Labels, + LatestRevision: ent.LatestRevision, + + //debug + Debug: view.PackageSearchWeightsDebug{ + PackageIdTf: ent.PackageIdTf, + PackageNameTf: ent.PackageNameTf, + PackageDescriptionTf: ent.PackageDescriptionTf, + PackageServiceNameTf: ent.PackageServiceNameTf, + VersionTf: ent.VersionTf, + VersionLabelsTf: ent.VersionLabelsTf, + DefaultVersionTf: ent.DefaultVersionTf, + VersionStatusTf: ent.VersionStatusTf, + VersionOpenCountWeight: ent.OpenCountWeight, + VersionOpenCount: ent.VersionOpenCount, + }, + } +} + +type DocumentSearchWeight struct { + TitleWeight float64 `pg:"title_weight, type:real, use_zero"` + LabelsWeight float64 `pg:"labels_weight, type:real, use_zero"` + ContentWeight float64 `pg:"content_weight, type:real, use_zero"` + OpenCountWeight float64 `pg:"open_count_weight, type:real, use_zero"` +} + +type DocumentSearchQuery struct { + DocumentSearchWeight + VersionStatusSearchWeight + TextFilter string `pg:"text_filter, type:varchar, use_zero"` //for varchar + Packages []string `pg:"packages, type:varchar[], use_zero"` + Versions []string `pg:"versions, type:varchar[], use_zero"` + Statuses []string `pg:"statuses, type:varchar[], use_zero"` + StartDate time.Time `pg:"start_date, type:timestamp without time zone, use_zero"` + EndDate time.Time `pg:"end_date, type:timestamp without time zone, use_zero"` + Limit int `pg:"limit, type:integer, use_zero"` + Offset int `pg:"offset, type:integer, use_zero"` + UnknownTypes []string `pg:"unknown_types, type:varchar[], use_zero"` +} + +type DocumentSearchResult struct { + tableName struct{} `pg:",discard_unknown_columns"` + + PackageId string `pg:"package_id, type:varchar"` + PackageName string `pg:"name, type:varchar"` + Version string `pg:"version, type:varchar"` + Revision int `pg:"revision type:integer"` + VersionStatus string `pg:"status, type:varchar"` + CreatedAt time.Time `pg:"created_at, type:timestamp without time zone"` + Slug string `pg:"slug, type:varchar"` + Title string `pg:"title, type:varchar"` + Type string `pg:"type, type:varchar"` + Metadata Metadata `pg:"metadata, type:jsonb"` + ParentNames []string `pg:"parent_names, type:varchar[]"` + + //debug + TitleTf float64 `pg:"title_tf, type:real"` + LabelsTf float64 `pg:"labels_tf, type:real"` + ContentTf float64 `pg:"content_tf, type:real"` + VersionStatusTf float64 `pg:"version_status_tf, type:real"` + OpenCountWeight float64 `pg:"open_count_weight, type:real"` + DocumentOpenCount float64 `pg:"document_open_count, type:real"` +} + +func MakeDocumentSearchQueryEntity(searchQuery *view.SearchQueryReq, unknownTypes []string) (*DocumentSearchQuery, error) { + searchQueryEntity := &DocumentSearchQuery{ + TextFilter: searchQuery.SearchString, + Packages: searchQuery.PackageIds, + Versions: searchQuery.Versions, + Statuses: searchQuery.Statuses, + StartDate: searchQuery.PublicationDateInterval.StartDate, + EndDate: searchQuery.PublicationDateInterval.EndDate, + Limit: searchQuery.Limit, + Offset: searchQuery.Limit * searchQuery.Page, + UnknownTypes: unknownTypes, + } + if searchQueryEntity.Packages == nil { + searchQueryEntity.Packages = make([]string, 0) + } + if searchQueryEntity.Versions == nil { + searchQueryEntity.Versions = make([]string, 0) + } + if searchQueryEntity.Statuses == nil { + searchQueryEntity.Statuses = make([]string, 0) + } + if searchQueryEntity.StartDate.IsZero() { + searchQueryEntity.StartDate = time.Unix(0, 0) //January 1, 1970 + } + if searchQueryEntity.EndDate.IsZero() { + searchQueryEntity.EndDate = time.Unix(2556057600, 0) //December 31, 2050 + } + return searchQueryEntity, nil +} + +func MakeDocumentSearchResultView(ent DocumentSearchResult, content string) *view.DocumentSearchResult { + return &view.DocumentSearchResult{ + PackageId: ent.PackageId, + PackageName: ent.PackageName, + ParentPackages: ent.ParentNames, + Version: view.MakeVersionRefKey(ent.Version, ent.Revision), + VersionStatus: ent.VersionStatus, + CreatedAt: ent.CreatedAt, + Slug: ent.Slug, + Type: ent.Type, + Title: ent.Title, + Content: content, + Labels: ent.Metadata.GetLabels(), + //debug + Debug: view.DocumentSearchWeightsDebug{ + TitleTf: ent.TitleTf, + LabelsTf: ent.LabelsTf, + ContentTf: ent.ContentTf, + VersionStatusTf: ent.VersionStatusTf, + DocumentOpenCountWeight: ent.OpenCountWeight, + DocumentOpenCount: ent.DocumentOpenCount, + }, + } +} diff --git a/qubership-apihub-service/entity/SystemRoleEntity.go b/qubership-apihub-service/entity/SystemRoleEntity.go new file mode 100644 index 0000000..ac822f2 --- /dev/null +++ b/qubership-apihub-service/entity/SystemRoleEntity.go @@ -0,0 +1,22 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package entity + +type SystemRoleEntity struct { + tableName struct{} `pg:"system_role"` + + UserId string `pg:"user_id, pk, type:varchar"` + Role string `pg:"role, use_zero, type:varchar"` +} diff --git a/qubership-apihub-service/entity/TransitionEntity.go b/qubership-apihub-service/entity/TransitionEntity.go new file mode 100644 index 0000000..46ce27e --- /dev/null +++ b/qubership-apihub-service/entity/TransitionEntity.go @@ -0,0 +1,70 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package entity + +import ( + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type TransitionActivityEntity struct { + tableName struct{} `pg:"activity_tracking_transition"` + + Id string `pg:"id, pk, type:varchar"` + TrType string `pg:"tr_type, type:varchar"` + FromId string `pg:"from_id, type:varchar"` + ToId string `pg:"to_id, type:varchar"` + Status string `pg:"status, type:varchar"` + Details string `pg:"details, type:varchar"` + StartedBy string `pg:"started_by, type:varchar"` + StartedAt time.Time `pg:"started_at, type:timestamp without time zone"` + FinishedAt time.Time `pg:"finished_at, type:timestamp without time zone"` + ProgressPercent int `pg:"progress_percent, type:integer"` + AffectedObjects int `pg:"affected_objects, type:integer"` + CompletedSerialNumber *int `pg:"completed_serial_number, type:integer"` +} + +type PackageTransitionEntity struct { + tableName struct{} `pg:"package_transition"` + + OldPackageId string `pg:"old_package_id, type:varchar"` + NewPackageId string `pg:"new_package_id, type:varchar"` +} + +func MakeTransitionStatusView(ent *TransitionActivityEntity) *view.TransitionStatus { + result := &view.TransitionStatus{ + Id: ent.Id, + TrType: ent.TrType, + FromId: ent.FromId, + ToId: ent.ToId, + Status: ent.Status, + Details: ent.Details, + StartedBy: ent.StartedBy, + StartedAt: ent.StartedAt, + FinishedAt: ent.FinishedAt, + ProgressPercent: ent.ProgressPercent, + AffectedObjects: ent.AffectedObjects, + CompletedSerialNumber: ent.CompletedSerialNumber, + } + return result +} + +func MakePackageTransitionView(ent *PackageTransitionEntity) *view.PackageTransition { + return &view.PackageTransition{ + OldPackageId: ent.OldPackageId, + NewPackageId: ent.NewPackageId, + } +} diff --git a/qubership-apihub-service/entity/UserAvatarEntity.go b/qubership-apihub-service/entity/UserAvatarEntity.go new file mode 100644 index 0000000..2fd3755 --- /dev/null +++ b/qubership-apihub-service/entity/UserAvatarEntity.go @@ -0,0 +1,41 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package entity + +import "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + +type UserAvatarEntity struct { + tableName struct{} `pg:"user_avatar_data"` + + Id string `pg:"user_id, pk, type:varchar"` + Avatar []byte `pg:"avatar, type:bytea"` + Checksum [32]byte `pg:"checksum, type:bytea"` +} + +func MakeUserAvatarEntity(avatarView *view.UserAvatar) *UserAvatarEntity { + return &UserAvatarEntity{ + Id: avatarView.Id, + Avatar: avatarView.Avatar, + Checksum: avatarView.Checksum, + } +} + +func MakeUserAvatarView(avatarEntity *UserAvatarEntity) *view.UserAvatar { + return &view.UserAvatar{ + Id: avatarEntity.Id, + Avatar: avatarEntity.Avatar, + Checksum: avatarEntity.Checksum, + } +} diff --git a/qubership-apihub-service/entity/UserEntity.go b/qubership-apihub-service/entity/UserEntity.go new file mode 100644 index 0000000..4d308a0 --- /dev/null +++ b/qubership-apihub-service/entity/UserEntity.go @@ -0,0 +1,71 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package entity + +import ( + "strings" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type UserEntity struct { + tableName struct{} `pg:"user_data, alias:user_data"` + + Id string `pg:"user_id, pk, type:varchar"` + Username string `pg:"name, type:varchar"` + Email string `pg:"email, type:varchar"` + AvatarUrl string `pg:"avatar_url, type:varchar"` + Password []byte `pg:"password, type:bytea"` + PrivatePackageId string `pg:"private_package_id, type:varchar"` +} + +func MakeUserView(userEntity *UserEntity) *view.User { + return &view.User{ + Id: userEntity.Id, + Name: userEntity.Username, + Email: userEntity.Email, + AvatarUrl: userEntity.AvatarUrl, + } +} + +func MakeUserV2View(userEntity *UserEntity) *view.User { + return &view.User{ + Id: userEntity.Id, + Name: userEntity.Username, + Email: userEntity.Email, + AvatarUrl: userEntity.AvatarUrl, + } +} + +func MakeExternalUserEntity(userView *view.User, privatePackageId string) *UserEntity { + return &UserEntity{ + Id: userView.Id, + Username: userView.Name, + Email: strings.ToLower(userView.Email), + AvatarUrl: userView.AvatarUrl, + PrivatePackageId: privatePackageId, + } +} + +func MakeInternalUserEntity(internalUser *view.InternalUser, password []byte, privatePackageId string) *UserEntity { + return &UserEntity{ + Id: internalUser.Id, + Username: internalUser.Name, + Email: strings.ToLower(internalUser.Email), + AvatarUrl: "", //todo maybe some hardcoded url for all internal users? + Password: password, + PrivatePackageId: privatePackageId, + } +} diff --git a/qubership-apihub-service/entity/ValidationEntities.go b/qubership-apihub-service/entity/ValidationEntities.go new file mode 100644 index 0000000..2188e2f --- /dev/null +++ b/qubership-apihub-service/entity/ValidationEntities.go @@ -0,0 +1,30 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package entity + +import ( + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type PublishedVersionValidationEntity struct { + tableName struct{} `pg:"published_version_validation"` + + PackageId string `pg:"package_id, pk, type:varchar"` + Version string `pg:"version, pk, type:varchar"` + Revision int `pg:"revision, pk, type:integer"` + Changelog *view.VersionChangelog `pg:"changelog, type:jsonb"` + Spectral view.VersionSpectral `pg:"spectral, type:jsonb"` + Bwc *view.VersionBwc `pg:"bwc, type:jsonb"` +} diff --git a/qubership-apihub-service/entity/VersionCleanupEntity.go b/qubership-apihub-service/entity/VersionCleanupEntity.go new file mode 100644 index 0000000..fd30bbc --- /dev/null +++ b/qubership-apihub-service/entity/VersionCleanupEntity.go @@ -0,0 +1,29 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package entity + +import "time" + +type VersionCleanupEntity struct { + tableName struct{} `pg:"versions_cleanup_run"` + + RunId string `pg:"run_id, pk, type:uuid"` + StartedAt time.Time `pg:"started_at, type:timestamp without time zone"` + Status string `pg:"status, type:varchar"` + Details string `pg:"details, type:varchar"` + PackageId string `pg:"package_id, type:varchar"` + DeleteBefore time.Time `pg:"delete_before, type:timestamp without time zone"` + DeletedItems int `pg:"deleted_items, type:integer"` +} diff --git a/qubership-apihub-service/exception/ErrorCodes.go b/qubership-apihub-service/exception/ErrorCodes.go new file mode 100644 index 0000000..def59b3 --- /dev/null +++ b/qubership-apihub-service/exception/ErrorCodes.go @@ -0,0 +1,727 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package exception + +const IncorrectParamType = "5" +const IncorrectParamTypeMsg = "$param parameter should be $type" + +const InvalidURLEscape = "6" +const InvalidURLEscapeMsg = "Failed to unescape parameter $param" + +const InvalidParameter = "7" +const InvalidParameterMsg = "Failed to read parameter $param" + +const EmptyParameter = "8" +const EmptyParameterMsg = "Parameter $param should not be empty" + +const InvalidParameterValue = "9" +const InvalidParameterValueMsg = "Value '$value' is not allowed for parameter $param" +const InvalidLimitMsg = "Value '$value' is not allowed for parameter limit. Allowed values are in range 1:$maxLimit" + +const BadRequestBody = "10" +const BadRequestBodyMsg = "Failed to decode body" + +const RequiredParamsMissing = "15" +const RequiredParamsMissingMsg = "Required parameters are missing: $params" + +const GroupNotFound = "10" +const GroupNotFoundMsg = "Group with id = $id not found" + +const AliasAlreadyTaken = "11" +const AliasAlreadyTakenMsg = "Alias $alias is already taken" + +const ParentGroupNotFound = "12" +const ParentGroupNotFoundMsg = "Parent group with id = $parentId not found" + +const IncorrectDepthForRootGroups = "13" +const IncorrectDepthForRootGroupsMsg = "You can't use depth $depth to search root groups. Allowed values: 0, 1" + +const ProjectNotFound = "20" +const ProjectNotFoundMsg = "Project with projectId = $projectId not found" + +const ProjectAliasAlreadyExists = "21" +const ProjectAliasAlreadyExistsMsg = "Project with alias = $alias already exists" + +const PackageNotFound = "22" +const PackageNotFoundMsg = "Package with packageId = $packageId not found" + +const PackageParentIsMissing = "23" +const PackageParentIsMissingMsg = "PackageId cannot be empty for package with kind 'group' or 'package'" + +const IncorrectPackageKind = "24" +const IncorrectPackageKindMsg = "Kind '$kind' is not allowed for package" + +const BranchDraftNotFound = "30" +const BranchDraftNotFoundMsg = "Draft for project with id $projectId and branch $branch doesn't exist" + +const ConfigNotFound = "31" +const ConfigNotFoundMsg = "Config for project with id $projectId and branch $branch doesn't exist" + +const InvalidApihubConfig = "33" +const InvalidApihubConfigMsg = "Failed to parse apihub config file" + +const NoFilesSent = "35" +const NoFilesSentMsg = "Request has no files in it" + +const BranchNotFound = "36" +const BranchNotFoundMsg = "Branch $branch doesn't exist for project $projectId" + +const BranchAlreadyExists = "37" +const BranchAlreadyExistsMsg = "Branch $branch already exists for project $projectId" + +const ContentIdNotFound = "40" +const ContentIdNotFoundMsg = "Content with id $contentId not found in branch $branch for project $projectId" + +const ContentSlugNotFound = "41" +const ContentSlugNotFoundMsg = "Content with slug $contentSlug not found" + +const NoContentToDelete = "42" +const NoContentToDeleteMsg = "No content found to delete. Path: $contentId in branch $branch for project $projectId" + +const FileNotFound = "43" +const FileNotFoundMsg = "File for path $fileId not found in branch $branch of project $projectGitId" + +const DraftFileNotFound = "48" +const DraftFileNotFoundMsg = "File $fileId doesn't exist in draft for project $projectId and branch $branchName" + +const FileIdIsTaken = "44" +const FileIdIsTakenMsg = "File with id $fileId already exists" + +const IncorrectFilePath = "45" +const IncorrectFilePathMsg = "File path is incorrect: '$path'" + +const IncorrectFileName = "46" +const IncorrectFileNameMsg = "File name is incorrect: '$name'" + +const FileByRefNotFound = "47" +const FileByRefNotFoundMsg = "File for path $fileId not found by reference $ref in project $projectGitId" + +const PublishedPackageVersionNotFound = "49" +const PublishedPackageVersionNotFoundMsg = "Published version $version not found for package $packageId" + +const PublishedVersionNotFound = "50" +const PublishedVersionNotFoundMsg = "Published version $version not found" + +const SharedIdIsIncorrect = "56" +const SharedIdIsIncorrectMsg = "Shared ID is incorrect: $sharedId" + +const GeneratedSharedIdIsNotUnique = "57" +const GeneratedSharedIdIsNotUniqueMsg = "Generated shared ID is not unique" + +const NoContentFoundForSharedId = "58" +const NoContentFoundForSharedIdMsg = "Content with SharedId $sharedId not found" + +const InsufficientRightsToCommit = "60" +const InsufficientRightsToCommitMsg = "User doesn't have enough privileges to commit in $branch" + +const NoTicketInCommit = "62" +const NoTicketInCommitMsg = "Ticket id is required in commit message" + +const FileByBlobIdNotFound = "63" +const FileByBlobIdNotFoundMsg = "File content for blobId '$blobId' not found in project $projectGitId" + +const UnknownIntegrationType = "80" +const UnknownIntegrationTypeMsg = "Unknown integration type: $type" + +const UserIdNotFound = "81" +const UserIdNotFoundMsg = "User id not found in context" + +const GitIntegrationConnectFailed = "82" +const GitIntegrationConnectFailedMsg = "Failed to connect to git (type $type) using new api key for user $user" + +const ApiKeyNotFound = "83" +const ApiKeyNotFoundMsg = "Api key for user $user and integration $integration not found" + +const PackageApiKeyNotFound = "91" +const PackageApiKeyNotFoundMsg = "Api key $apiKeyId for package $packageId not found" + +const PackageApiKeyAlreadyRevoked = "92" +const PackageApiKeyAlreadyRevokedMsg = "Api key $apiKeyId for package $packageId is already revoked" + +const ApiKeyNotFoundByKey = "94" +const ApiKeyNotFoundByKeyMsg = "Api key not found by provided key" + +const ApiKeyHeaderIsEmpty = "96" +const ApiKeyHeaderIsEmptyMsg = "Header api-key is empty" + +const ApiKeyNameDuplicate = "97" +const ApiKeyNameDuplicateMsg = "API key with name $name already exists" + +const ApiKeyNotFoundById = "98" +const ApiKeyNotFoundByIdMsg = "Api key with id $apiKeyId not found" + +const RepositoryIdNotFound = "93" +const RepositoryIdNotFoundMsg = "Repository $repositoryId not found" + +const SharedFileIdNotFound = "95" +const SharedFileIdNotFoundMsg = "Shared File Id $sharedFileId is not found" + +const ReferencedPackageNotFound = "84" +const ReferencedPackageNotFoundMsg = "Referenced package $package not found" + +const ReferencedPackageVersionNotFound = "85" +const ReferencedPackageVersionNotFoundMsg = "Referenced package $package version $version not found" + +const ParentGroupIdCantBeModified = "86" +const ParentGroupIdCantBeModifiedMsg = "Parent group id can't be modified" + +const AliasCantBeModified = "87" +const AliasCantBeModifiedMsg = "Alias can't be modified" + +const ParentIdCantBeModified = "88" +const ParentIdCantBeModifiedMsg = "Parent id can't be modified" + +const ServiceNameCantBeModified = "89" +const ServiceNameCantBeModifiedMsg = "Service name can't be modified" + +const UnsupportedDiffType = "90" +const UnsupportedDiffTypeMsg = "Type $type is not supported for diff service" + +const NotFavored = "100" +const NotFavoredMsg = "$id is not favored by $user" + +const AlreadyFavored = "101" +const AlreadyFavoredMsg = "$id is already favored by $user" + +const UnsupportedSourceType = "300" +const UnsupportedSourceTypeMsg = "Source type $type is not supported" + +const InvalidUrl = "400" +const InvalidUrlMsg = "The file is not available at the URL, authorization may be required. Try to download and upload file directly" + +const UrlUnexpectedErr = "401" +const UrlUnexpectedErrMsg = "The file is not available at the URL. Try to download and upload file directly" + +const AliasContainsForbiddenChars = "500" +const AliasContainsForbiddenCharsMsg = "Alias contains forbidden chars (not url-safe)" + +const RefNotFound = "600" +const RefNotFoundMsg = "Ref $ref for project $projectId, version $version and branch $branch not found" + +const RefAlreadyExists = "601" +const RefAlreadyExistsMsg = "Ref $ref for project $projectId, version $version and branch $branch already exists" + +const UnsupportedStatus = "602" +const UnsupportedStatusMsg = "Type $status is not supported" + +const IntegrationTokenRevoked = "700" +const IntegrationTokenRevokedMsg = "Token for integration $integration was revoked. Try to re-login to re-enable integration." + +const GitlabDeadlineExceeded = "701" +const GitlabDeadlineExceededMsg = "Gitlab is currently unavailable. Please try again later." + +const IntegrationTokenExpired = "702" +const IntegrationTokenExpiredMsg = "Token for integration $integration is expired. Try to re-login to re-enable integration." + +const IntegrationTokenUnexpectedlyExpired = "703" +const IntegrationTokenUnexpectedlyExpiredMsg = "Token unexpectedly expired. Token was successfully renewed. Please retry the request" + +const IntegrationTokenAuthFailed = "704" +const IntegrationTokenAuthFailedMsg = "Failed to auth with existing token. Try to re-login to re-enable integration." + +const ConnectionNotUpgraded = "800" +const ConnectionNotUpgradedMsg = "Failed to upgrade connection" + +const UnsupportedActionWithFile = "901" +const UnsupportedActionWithFileMsg = "Unsupported action (action $code) with file $fileId" + +const IncorrectMultipartFile = "1000" +const IncorrectMultipartFileMsg = "Unable to read Multipart file" + +const ExternalRefFileMissing = "1001" +const ExternalRefFileMissingMsg = "Missing external ref file $file while resolving $rootFile" + +const UnserializableFile = "1002" +const UnserializableFileMsg = "Unable to read file $fileId with type $fileType. ErrorBuilds: $error" + +const UnexpectedFileType = "1003" +const UnexpectedFileTypeMsg = "Unexpected file type $type of file $fileId" + +const ArrayAsRootError = "1005" +const ArrayAsRootErrorMsg = "File $fileId contains array as root object, it's not supported" + +const ExternalRefPathMissing = "1010" +const ExternalRefPathMissingMsg = "Error while processing file '$rootFile': external ref path '$path' not found in ref file '$refFile'" + +const GitCommitNotFoundForFile = "1020" +const GitCommitNotFoundForFileMsg = "Can't find latest git commit for file '$file'" + +const GitBranchConfigContainDuplicateFiles = "1030" +const GitBranchConfigContainDuplicateFilesMsg = "Apihub config('$path') in git is incorrect, please fix it manually. Config contains duplicate file entries: $files" + +const IncorrectRefsProvidedForPublish = "1040" +const IncorrectRefsProvidedForPublishMsg = "Incorrect refs provided for publish: $list" + +const NotApplicableOperation = "1100" +const NotApplicableOperationMsg = "Operation '$operation' is not applicable for file with status '$status'" + +const SharedContentUnavailable = "1200" +const SharedContentUnavailableMsg = "Content for sharedId $sharedId is no longer available because its version was deleted" + +const UnableToGenerateInteractiveDoc = "1210" +const UnableToGenerateInteractiveDocMsg = "Unable to generate interactive documentation for $file since it's not a supported specification" + +const UnableToSelectWsServer = "1220" +const UnableToSelectWsServerMsg = "Unable to select ws server" + +const GroupDocGenerationUnsupported = "1230" +const GroupDocGenerationUnsupportedMsg = "Documentation generation for groups is not supported yet" + +const ReleaseVersionDoesntMatchPattern = "1301" +const ReleaseVersionDoesntMatchPatternMsg = "Release version name '$version' doesn't match '$pattern' pattern" + +const ServiceNameAlreadyTaken = "1400" +const ServiceNameAlreadyTakenMsg = "Service name $serviceName already taken by package $packageId" + +const MigrationVersionIsTooLow = "1500" +const MigrationVersionIsTooLowMsg = "Current DB migration version $currentVersion is not high enough. This operation requires version $requiredVersion or higher" + +const MigrationVersionIsDirty = "1501" +const MigrationVersionIsDirtyMsg = "Current DB migration version $currentVersion is dirty. Please fix the migration before running this operation" + +const AgentConfigNotFound = "1600" +const AgentConfigNotFoundMsg = "Agent config for cloud $cloud and namespace $namespace not found" + +const InvalidPackagedFile = "1601" +const InvalidPackagedFileMsg = "Package file '$file' has incorrect format: $error" + +const InvalidPackageArchive = "1602" +const InvalidPackageArchiveMsg = "Failed to read package archive: $error" + +const InvalidPackageArchivedFile = "1603" +const InvalidPackageArchivedFileMsg = "Failed to read $file from package archive: $error" + +const PackageArchivedFileNotFound = "1604" +const PackageArchivedFileNotFoundMsg = "File '$file' not found in '$folder' folder in package archive" + +const FileMissingFromSources = "1605" +const FileMissingFromSourcesMsg = "File '$fileId' not found in sources archive" + +const DocumentMissingFromPackage = "1606" +const DocumentMissingFromPackageMsg = "File '$fileId' is present in build config but not found in documents list" + +const ReferenceMissingFromPackage = "1607" +const ReferenceMissingFromPackageMsg = "Reference with refId='$refId', version='$version' is present in build config but not found in refs list" + +const PackageForBuildConfigDiscrepancy = "1608" +const PackageForBuildConfigDiscrepancyMsg = "Package value doesn't match expected build config value for '$param' parameter: expected='$expected', actual='$actual'" + +const FileDuplicate = "1609" //similar to 1030 +const FileDuplicateMsg = "Files with fileIds '$fileIds' have multiple occurrences in '$configName'" + +const FileMissing = "1610" +const FileMissingMsg = "Files with fileIds '$fileIds' not found in '$location'" + +const FileRedundant = "1611" +const FileRedundantMsg = "Files '$files' found in '$location' but not listed in any configuration" + +const IncorrectMetadataField = "1612" +const IncorrectMetadataFieldMsg = "Metadata filed $field is incorrect: $description" + +const NameAlreadyTaken = "1700" +const NameAlreadyTakenMsg = "The name '$name' is already taken in '$directory'" + +const PackageAlreadyExists = "1701" +const PackageAlreadyExistsMsg = "Alias '$id' is already reserved. Please use another alias." + +const UserAvatarNotFound = "1702" +const UserAvatarNotFoundMsg = "User avatar not found for userid: $userid" + +const SamlInstanceIsNull = "1703" +const SamlInstanceIsNullMsg = "Saml instance initialized with error. Error: $error" + +const SamlInstanceHasError = "1704" +const SamlInstanceHasErrorMsg = "Saml instance has error $error" + +const SamlResponseHaveNoUserId = "1705" +const SamlResponseHaveNoUserIdMsg = "Saml response missing user id" + +const SamlResponseHasBrokenContent = "1706" +const SamlResponseHasBrokenContentMsg = "Saml response has broken content for user $userId. Error: $error" + +const AssertionIsNull = "1707" +const AssertionIsNullMsg = "Assertion from SAML response is null" + +const SamlResponseHasParsingError = "1708" +const SamlResponseHasParsingErrorMsg = "Saml response has error in parsing process. Error: $error" + +const SamlResponseMissingEmail = "1709" +const SamlResponseMissingEmailMsg = "Saml response missing user email" + +const PackageRedirectExists = "1710" +const PackageRedirectExistsMsg = "Package id '$id' is reserved for redirect(old package id)" + +const IncorrectRedirectUrlError = "1711" +const IncorrectRedirectUrlErrorMsg = "Incorrect redirect URL $url" + +const UsersNotFound = "1800" +const UsersNotFoundMsg = "Users ($users) do not exist" + +const NotAvailableRole = "1801" +const NotAvailableRoleMsg = "Requested role $role is not available. I.e. you don't have permission to set the role." + +const RoleNotFound = "1802" +const RoleNotFoundMsg = "Role $role doesn't exist" + +const RoleCannotBeDeleted = "1803" +const RoleCannotBeDeletedMsg = "You can't delete role $role for $user because its inherited from $package" + +const UserWithNoRoles = "1804" +const UserWithNoRolesMsg = "User $user doesn't have any roles for package $packageId" + +const OwnRoleNotEditable = "1805" +const OwnRoleNotEditableMsg = "You cannot edit your own role" + +const ArchiveSizeExceeded = "1806" +const ArchiveSizeExceededMsg = "Archive size exceeded. Archive size limit - $size" + +const PublishFileSizeExceeded = "1807" +const PublishFileSizeExceededMsg = "File size exceeded. File size limit - $size" + +const BranchContentSizeExceeded = "1808" +const BranchContentSizeExceededMsg = "Branch content size exceeded. Branch content size limit - $size" + +const RoleNotAllowed = "1809" +const RoleNotAllowedMsg = "User(s) with role $role cannot be added to the package" + +const InsufficientPrivileges = "1900" +const InsufficientPrivilegesMsg = "You don't have enough privileges to perform this operation" + +const EmailAlreadyTaken = "2000" +const EmailAlreadyTakenMsg = "User with email '$email' already exists" + +const PasswordTooLong = "2001" +const PasswordTooLongMsg = "Password length exceeds 72 bytes" + +const UserNotFound = "2100" +const UserNotFoundMsg = "User with userId = $userId not found" + +const PackageDoesntExists = "2101" +const PackageDoesntExistsMsg = "Package with '$id' doesn't exists" + +const PackageAlreadyTaken = "2102" +const PackageAlreadyTakenMsg = "Package $packageId is already in use by project $projectId" + +const PackageKindIsNotAllowed = "2103" +const PackageKindIsNotAllowedMsg = "Package '$packageId' with kind - '$kind' is not allowed for project integration" + +const DefaultReleaseVersionIsNotReleased = "2200" +const DefaultReleaseVersionIsNotReleasedMsg = "Default release version - '$version ' isn't in release status" + +const DefaultReleaseVersionHasNotLatestRevision = "2201" +const DefaultReleaseVersionHasNotLatestRevisionMsg = "Default release version - '$version ' has not latest revision" + +const OperationNotFound = "2301" +const OperationNotFoundMsg = "Operation $operationId not found in published version $version for package $packageId" + +const PreviousVersionNotFound = "2400" +const PreviousVersionNotFoundMsg = "Previous version '$previousVersion' for version '$version' doesn't exist" + +const NoPreviousVersion = "2401" +const NoPreviousVersionMsg = "Version '$version' doesn't have a previous version" + +const InvalidRevisionFormat = "2500" +const InvalidRevisionFormatMsg = "Version '$version' has invalid revision format" + +const PackageIdMismatch = "2501" +const PackageIdMismatchMsg = "PackageId from config $configPackageId doesn't match packageId $packageId from path" + +const EmptyDataForPublish = "2502" +const EmptyDataForPublishMsg = "Publish cannot be started without reference and documents" + +const VersionNameNotAllowed = "2503" +const VersionNameNotAllowedMsg = "Version name '$version' contains restricted characters ('$character')" + +const InvalidPreviousVersionPackage = "2504" +const InvalidPreviousVersionPackageMsg = "Previous version packageId $previousVersionPackageId is same as packageId $packageId" + +const PreviousVersionNameNotAllowed = "2505" +const PreviousVersionNameNotAllowedMsg = "Previous Version '$version' contains restricted characters ('@')" + +const InvalidSearchParameters = "2600" +const InvalidSearchParametersMsg = "Incorrect search parameters: $error" + +const LdapConnectionIsNotCorrect = "2601" +const LdapConnectionIsNotCorrectMsg = "Ldap connection isn't correct. Ldap server - $server. Error - $error" + +const LdapConnectionIsNotAllowed = "2602" +const LdapConnectionIsNotAllowedMsg = "Ldap bind connection isn't allowed. Ldap server - $server. Error - $error" + +const LdapSearchFailed = "2603" +const LdapSearchFailedMsg = "Ldap search failed. Ldap server - $server. Error - $error" + +const PreviousVersionFromRequestIsEmpty = "3001" +const PreviousVersionFromRequestIsEmptyMsg = "Previous version from request is empty" + +const InvalidReleaseVersionPatternFormat = "2604" +const InvalidReleaseVersionPatternFormatMsg = "Release Version Pattern '$pattern' has invalid pattern format" + +const BuildNotFoundByQuery = "2610" +const BuildNotFoundByQueryMsg = "Build config not found by $query" + +const BuildNotFoundById = "2611" +const BuildNotFoundByIdMsg = "Build with $id not found" + +const UnsupportedMemberUpdateAction = "4000" +const UnsupportedMemberUpdateActionMsg = "Action $action is is not supported" + +const InvalidRolePermission = "4001" +const InvalidRolePermissionMsg = "Permission $permission is invalid" + +const RoleAlreadyExists = "4002" +const RoleAlreadyExistsMsg = "Role with id=$roleId already exists" + +const RoleNotEditable = "4003" +const RoleNotEditableMsg = "Role '$roleId' cannot be edited" + +const NotEnoughPermissionsForRole = "4004" +const NotEnoughPermissionsForRoleMsg = "You don't have enough permissions to manage '$roleId' role" + +const RoleDoesntExist = "4005" +const RoleDoesntExistMsg = "Role '$roleId' does not exist" + +const MemberRoleNotFound = "4006" +const MemberRoleNotFoundMsg = "User '$userId' doesn't have '$roleId' role for $packageId" + +const AllRolesRequired = "4007" +const AllRolesRequiredMsg = "All existing roles are required" + +const SysadmNotFound = "4008" +const SysadmNotFoundMsg = "System administrator with userId = $userId not found" + +const RoleNameDoesntMatchPattern = "4009" +const RoleNameDoesntMatchPatternMsg = "Role name '$role' doesn't match '$pattern' pattern" + +const UnableToChangeOldRevision = "4201" +const UnableToChangeOldRevisionMsg = "Unable to change old revision. You can update only the latest one." + +const InvalidCompareVersionReq = "4019" +const InvalidCompareVersionReqMsg = "Compare version req '$compareVersionReq' has incorrect format: $error" + +const InvalidDocumentType = "4024" +const InvalidDocumentTypeMsg = "Unexpected document type '$type'" + +const InvalidDocumentFormat = "4025" +const InvalidDocumentFormatMsg = "Unexpected document format - '$format'" + +const BuildNotOwned = "4300" +const BuildNotOwnedMsg = "You cannot use build '$buildId' since you are not its owner" + +const BuildNotFound = "4301" +const BuildNotFoundMsg = "Build '$buildId' doesn't exist" + +const BuildAlreadyFinished = "4302" +const BuildAlreadyFinishedMsg = "Build '$buildId' already finished" + +const ForbiddenDefaultMigrationBuildParameters = "4401" +const ForbiddenDefaultMigrationBuildParametersMsg = "Config contains forbidden migration build parameters - '$parameters'" + +const ChangesAreNotEmpty = "4402" +const ChangesAreNotEmptyMsg = "Changes are not empty when noChangelog is true" + +const AgentNotFound = "4500" +const AgentNotFoundMsg = "Agent '$agentId' not found" + +const InvalidAgentUrl = "4501" +const InvalidAgentUrlMsg = "Agent url '$url' for agent '$agentId' is not valid" + +const InactiveAgent = "4502" +const InactiveAgentMsg = "Agent '$agentId' is not active" + +const ProxyFailed = "4503" +const ProxyFailedMsg = "Failed to proxy the request to $url" + +const IncompatibleAgentVersion = "4504" +const IncompatibleAgentVersionMsg = "Current version $version of Agent not supported by APIHUB. Please, update this instance." + +const ChangesAreEmpty = "4600" +const ChangesAreEmptyMsg = "Changes are empty" + +const UnableToChangeExcludeFromSearch = "4700" +const UnableToChangeExcludeFromSearchMsg = "This package cannot be included in global search as parent group/workspace is excluded" + +const UnableToGetMigrationDataCleanupResult = "4800" +const UnableToGetMigrationDataCleanupResultMsg = "Cleanup data for specified id not found" + +const BuildSourcesNotFound = "4900" +const BuildSourcesNotFoundMsg = "Build sources for '$publishId' build not found" + +const SourcesNotFound = "4901" +const SourcesNotFoundMsg = "Sources archive not found for package '$packageId' and version '$versionName'" + +const PublishedSourcesDataNotFound = "4902" +const PublishedSourcesDataNotFoundMsg = "Published version source data not found for package '$packageId' and version '$versionName'" + +const InvalidComparisonField = "4810" +const InvalidComparisonFieldMsg = "Comparison field '$field' is not valid ($error)" + +const ComparisonNotFound = "4811" +const ComparisonNotFoundMsg = "Comparison for versions pair not found (comparisonId=$comparisonId) (packageId:$packageId - version:$version - revision:$revision vs previousPackageId:$previousPackageId - previousVersion:$previousVersion - previousRevision:$previousRevision)" + +const PublishedVersionRevisionNotFound = "4812" +const PublishedVersionRevisionNotFoundMsg = "Published version $version with revision $revision not found for package $packageId" + +const DuplicateReference = "4813" +const DuplicateReferenceMsg = "Duplicate references are not allowed (refId = $refId, refVersion = $refVersion)" + +const MultiplePackageReference = "4814" +const MultiplePackageReferenceMsg = "Multiple references for the same package are not allowed (refId = $refId)" + +const ExcludedComparisonReference = "4815" +const ExcludedComparisonReferenceMsg = "Excluded reference found in comparison list (refId = $refId, version = $version, revision = $revision)" + +const InvalidGroupingPrefix = "5000" +const InvalidGroupingPrefixMsg = "Grouping prefix has invalid format ($error)" + +const DefaultVersionNotFound = "5001" +const DefaultVersionNotFoundMsg = "Package $packageId doesn't have a default version" + +const OperationsAreEmpty = "5200" +const OperationsAreEmptyMsg = "Operations are empty" + +const UnsupportedFormat = "5100" +const UnsupportedFormatMsg = "Format $format is not supported" + +const InvalidURL = "5300" +const InvalidURLMsg = "Url '$url' is not a valid url" + +const VersionIsEqualToPreviousVersion = "5400" +const VersionIsEqualToPreviousVersionMsg = "Version '$version' cannot be the same as previous version '$previousVersion'" + +const InvalidGraphQLOperationType = "5500" +const InvalidGraphQLOperationTypeMsg = "Unexpected graphQL operation type '$type'" + +const InvalidProtobufOperationType = "5501" +const InvalidProtobufOperationTypeMsg = "Unexpected protobuf operation type '$type'" + +const UserByEmailNotFound = "6000" +const UserByEmailNotFoundMsg = "User with email = '$email' not found" + +const OperationGroupAlreadyExists = "6010" +const OperationGroupAlreadyExistsMsg = "Operation group with groupName=$groupName already exists" + +const OperationGroupNotFound = "6011" +const OperationGroupNotFoundMsg = "Operation group with groupName=$groupName doesn't exist" + +const OperationGroupNotModifiable = "6012" +const OperationGroupNotModifiableMsg = "You can only modify 'template' and 'description' parameters for autogenerated operation group '$groupName'" + +const OverlappingQueryParameter = "6013" +const OverlappingQueryParameterMsg = "Query parameter '$param2' cannot be used in addition to '$param1' parameter" + +const GroupingVersionNotAllowed = "6014" +const GroupingVersionNotAllowedMsg = "Cannot add operation from package '$packageId' version '$version' since its not referenced in current package version" + +const GroupOperationsLimitExceeded = "6015" +const GroupOperationsLimitExceededMsg = "Operations limit per group ($limit) exceeded" + +const EmptyOperationGroupName = "6016" +const EmptyOperationGroupNameMsg = "Operation group name cannot be empty" + +const OperationGroupExportTemplateNotFound = "6017" +const OperationGroupExportTemplateNotFoundMsg = "Export template not found for operation group '$groupName'" + +const UnsupportedQueryParam = "6100" +const UnsupportedQueryParamMsg = "'$param' query param is supported only for dashboards" + +const UnableToDeleteOldRevision = "6205" +const UnableToDeleteOldRevisionMsg = "Unable to delete old revision." + +const FromPackageNotFound = "6301" +const FromPackageNotFoundMsg = "Unable to perform package move operation since 'from' package $packageId not found" + +const ToParentPackageNotFound = "6302" +const ToParentPackageNotFoundMsg = "Unable to perform package move operation since 'to' parent package $packageId not found" + +const TransitionActivityNotFound = "6303" +const TransitionActivityNotFoundMsg = "Transition activity $id not found" + +const ToPackageExists = "6304" +const ToPackageExistsMsg = "Unable to perform package move operation since 'to' package $packageId already exists" + +const ToPackageRedirectExists = "6305" +const ToPackageRedirectExistsMsg = "Unable to perform package move operation since 'to' package $packageId is already used by 'old' package id which was moved to $newPackageId. " + + "Add `\"overwriteHistory\": true` parameter if you want to do force move. In this case transition record will be lost and there would be no redirect from $packageId to $newPackageId." + +const SinglePrivatePackageAllowed = "6400" +const SinglePrivatePackageAllowedMsg = "Only one private package allowed for user" + +const PrivateWorkspaceIdAlreadyTaken = "6401" +const PrivateWorkspaceIdAlreadyTakenMsg = "Id '$id' cannot be used for private workspace since it's already used by another user or package" + +const PrivateWorkspaceIdDoesntExist = "6402" +const PrivateWorkspaceIdDoesntExistMsg = "User '$userId' doesn't have a private workspace" + +const PrivateWorkspaceNotModifiableMsg = "Only sysadmin can modify private workspaces" + +const OperationModelNotFound = "6410" +const OperationModelNotFoundMsg = "Model '$modelName' doesn't exist for operation '$operationId'" + +const InvalidDocumentTransformationReq = "6217" +const InvalidDocumentTransformationReqMsg = "Document transformation req 'documentTransformationReq' has incorrect format: $error" + +const UnknownBuildType = "6270" +const UnknownBuildTypeMsg = "Unknown build type: $type" + +const TransformedDocumentsNotFound = "6280" +const TransformedDocumentsNotFoundMsg = "Transformed documents not found. Package id - '$packageId', version - '$version', apiType - '$apiType', groupName = '$groupName'" + +const UnknownResponseFormat = "6290" +const UnknownResponseFormatMsg = "Unknown response format: $format" + +const InvalidTextFilterFormatForOperationCustomTag = "6411" +const InvalidTextFilterFormatForOperationCustomTagMsg = "Invalid textFilter format for search by operation custom tag. textFilter value - '$textFilter'" + +const PackageVersionCannotBeCopied = "6420" +const PackageVersionCannotBeCopiedMsg = "Version '$version' from package '$packageId' cannot be copied to '$targetPackageId' package: $error" + +const FormatNotSupportedForBuildType = "6500" +const FormatNotSupportedForBuildTypeMsg = "Format '$format' is not supported for '$buildType' buildType" + +const InvalidGroupExportTemplateType = "6501" +const InvalidGroupExportTemplateTypeMsg = "Template field should only contain a file or an empty string" + +const InvalidMultipartFileType = "6502" +const InvalidMultipartFileTypeMsg = "'$field' field should only contain a file or an empty string" + +const GitIntegrationUnsupportedHookEventType = "6600" +const GitIntegrationUnsupportedHookEventTypeMsg = "Event type '$type' is not supported" + +const AliasContainsRunenvChars = "6601" +const AliasContainsRunenvCharsMsg = "The alias 'RUNENV' is reserved for internal use. Please use another alias" + +const GitVersionPublishFileNotFound = "6610" +const GitVersionPublishFileNotFoundMsg = "Version publish file for project with id $projectId and branch $branch doesn't exist" + +const GitVersionPublishFileInvalid = "6611" +const GitVersionPublishFileInvalidMsg = "Version publish file for project with id $projectId and branch $branch is invalid" + +const PublishProcessNotFound = "6700" +const PublishProcessNotFoundMsg = "Publish process with publishId=$publishId not found" + +const UnsupportedApiType = "6710" +const UnsupportedApiTypeMsg = "Api type $apiType is not supported for this operation" + +const EmptyCSVFile = "6800" +const EmptyCSVFileMsg = "CSV file is empty" + +const InvalidCSVFile = "6801" +const InvalidCSVFileMsg = "CSV file has invalid format: $error" + +const InvalidPackageKind = "6802" +const InvalidPackageKindMsg = `Action is not allowed for package with kind="$kind", allowed kind - "$allowedKind"` + +const HostNotAllowed = "6900" +const HostNotAllowedMsg = "Host not allowed: $host" \ No newline at end of file diff --git a/qubership-apihub-service/exception/Errors.go b/qubership-apihub-service/exception/Errors.go new file mode 100644 index 0000000..559f0d9 --- /dev/null +++ b/qubership-apihub-service/exception/Errors.go @@ -0,0 +1,73 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package exception + +import ( + "fmt" + "strings" +) + +type CustomError struct { + Status int `json:"status"` + Code string `json:"code,omitempty"` + Message string `json:"message,omitempty"` + Params map[string]interface{} `json:"params,omitempty"` + Debug string `json:"debug,omitempty"` +} + +func (c CustomError) Error() string { + msg := c.Message + for k, v := range c.Params { + //todo make smart replace (e.g. now it replaces $projectId if we have $project in params) + msg = strings.ReplaceAll(msg, "$"+k, fmt.Sprintf("%v", v)) + } + if c.Debug != "" { + return msg + " | " + c.Debug + } else { + return msg + } +} + +// todo replace with CustomError +type NotFoundError struct { + Id string + Name string + Message string +} + +func (g NotFoundError) Error() string { + if g.Message != "" { + return g.Message + } + if g.Id != "" { + return fmt.Sprintf("entity with id = %s not found", g.Id) + } else { + return fmt.Sprintf("entity with name = %s not found", g.Name) + } +} + +// todo replace with CustomError +type ContentNotFoundError struct { + ContentId string + Slug string +} + +func (c ContentNotFoundError) Error() string { + if c.ContentId != "" { + return fmt.Sprintf("content with contentId = %v not found", c.ContentId) + } else { + return fmt.Sprintf("content with slug = %v not found", c.Slug) + } +} diff --git a/qubership-apihub-service/go.mod b/qubership-apihub-service/go.mod new file mode 100644 index 0000000..f14287a --- /dev/null +++ b/qubership-apihub-service/go.mod @@ -0,0 +1,169 @@ +module github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service + +go 1.22 + +require ( + github.com/buraksezer/olric v0.4.7 + github.com/buraksezer/olric-cloud-plugin v0.3.0-beta.4 + github.com/crewjam/saml v0.4.12 + github.com/go-ldap/ldap v3.0.3+incompatible + github.com/go-pg/pg/v10 v10.10.6 + github.com/go-playground/validator/v10 v10.10.0 + github.com/go-resty/resty/v2 v2.7.0 + github.com/google/uuid v1.3.0 + github.com/gorilla/handlers v1.4.2 + github.com/gorilla/mux v1.7.4 + github.com/gorilla/websocket v1.5.0 + github.com/gosimple/slug v1.12.0 + github.com/iancoleman/orderedmap v0.3.0 + github.com/robfig/cron/v3 v3.0.1 + github.com/russellhaering/goxmldsig v1.2.0 + github.com/shaj13/go-guardian/v2 v2.11.3 + github.com/shaj13/libcache v1.0.0 + github.com/sirupsen/logrus v1.9.3 + github.com/stretchr/testify v1.8.1 + github.com/x-cray/logrus-prefixed-formatter v0.5.2 + github.com/xanzy/go-gitlab v0.53.0 + github.com/xuri/excelize/v2 v2.7.1 + golang.org/x/crypto v0.14.0 + golang.org/x/sync v0.1.0 + gopkg.in/natefinch/lumberjack.v2 v2.0.0 +) + +require ( + github.com/beorn7/perks v1.0.1 // indirect + github.com/cespare/xxhash/v2 v2.1.1 // indirect + github.com/dustin/go-humanize v1.0.1 // indirect + github.com/klauspost/compress v1.16.7 // indirect + github.com/klauspost/cpuid/v2 v2.2.5 // indirect + github.com/matttproud/golang_protobuf_extensions v1.0.1 // indirect + github.com/minio/md5-simd v1.1.2 // indirect + github.com/minio/sha256-simd v1.0.1 // indirect + github.com/prometheus/client_model v0.2.0 // indirect + github.com/prometheus/common v0.9.1 // indirect + github.com/prometheus/procfs v0.0.8 // indirect + github.com/rs/xid v1.5.0 // indirect + gopkg.in/ini.v1 v1.67.0 // indirect + gopkg.in/yaml.v2 v2.4.0 // indirect + sigs.k8s.io/yaml v1.2.0 // indirect +) + +require ( + cloud.google.com/go v0.88.0 // indirect + github.com/Azure/azure-sdk-for-go v46.4.0+incompatible // indirect + github.com/Azure/go-autorest v14.2.0+incompatible // indirect + github.com/Azure/go-autorest/autorest v0.11.7 // indirect + github.com/Azure/go-autorest/autorest/adal v0.9.14 // indirect + github.com/Azure/go-autorest/autorest/azure/auth v0.5.2 // indirect + github.com/Azure/go-autorest/autorest/azure/cli v0.4.1 // indirect + github.com/Azure/go-autorest/autorest/date v0.3.0 // indirect + github.com/Azure/go-autorest/autorest/to v0.4.0 // indirect + github.com/Azure/go-autorest/autorest/validation v0.3.0 // indirect + github.com/Azure/go-autorest/logger v0.2.1 // indirect + github.com/Azure/go-autorest/tracing v0.6.0 // indirect + github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da // indirect + github.com/aws/aws-sdk-go v1.35.1 // indirect + github.com/beevik/etree v1.1.0 // indirect + github.com/buraksezer/connpool v0.6.0 // indirect + github.com/buraksezer/consistent v0.0.0-20191006190839-693edf70fd72 // indirect + github.com/cespare/xxhash v1.1.0 // indirect + github.com/crewjam/httperr v0.2.0 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/denverdino/aliyungo v0.0.0-20200929080117-4fc2b424761a // indirect + github.com/digitalocean/godo v1.45.0 // indirect + github.com/dimchansky/utfbom v1.1.0 // indirect + github.com/form3tech-oss/jwt-go v3.2.5+incompatible // indirect + github.com/go-logr/logr v0.2.0 // indirect + github.com/go-pg/zerochecker v0.2.0 // indirect + github.com/go-playground/locales v0.14.0 // indirect + github.com/go-playground/universal-translator v0.18.0 // indirect + github.com/gogo/protobuf v1.3.2 // indirect + github.com/golang-jwt/jwt/v4 v4.4.3 // indirect + github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e // indirect + github.com/golang/protobuf v1.5.2 // indirect + github.com/google/btree v1.0.0 // indirect + github.com/google/go-querystring v1.0.0 // indirect + github.com/google/gofuzz v1.1.0 // indirect + github.com/googleapis/gax-go/v2 v2.0.5 // indirect + github.com/googleapis/gnostic v0.4.1 // indirect + github.com/gophercloud/gophercloud v0.13.0 // indirect + github.com/gosimple/unidecode v1.0.1 // indirect + github.com/hashicorp/errwrap v1.0.0 // indirect + github.com/hashicorp/go-cleanhttp v0.5.1 // indirect + github.com/hashicorp/go-discover v0.0.0-20200812215701-c4b85f6ed31f // indirect + github.com/hashicorp/go-immutable-radix v1.0.0 // indirect + github.com/hashicorp/go-msgpack v0.5.3 // indirect + github.com/hashicorp/go-multierror v1.1.0 // indirect + github.com/hashicorp/go-retryablehttp v0.6.8 // indirect + github.com/hashicorp/go-sockaddr v1.0.2 // indirect + github.com/hashicorp/go-uuid v1.0.2 // indirect + github.com/hashicorp/golang-lru v0.5.1 // indirect + github.com/hashicorp/logutils v1.0.0 // indirect + github.com/hashicorp/mdns v1.0.3 // indirect + github.com/hashicorp/memberlist v0.1.5 // indirect + github.com/hashicorp/vic v1.5.1-0.20190403131502-bbfe86ec9443 // indirect + github.com/imdario/mergo v0.3.12 // indirect + github.com/jinzhu/inflection v1.0.0 // indirect + github.com/jmespath/go-jmespath v0.4.0 // indirect + github.com/jonboulle/clockwork v0.2.2 // indirect + github.com/joyent/triton-go v1.8.5 // indirect + github.com/json-iterator/go v1.1.12 // indirect + github.com/leodido/go-urn v1.2.1 // indirect + github.com/linode/linodego v0.21.1 // indirect + github.com/mattermost/xml-roundtrip-validator v0.1.0 // indirect + github.com/mattn/go-colorable v0.1.6 // indirect + github.com/mattn/go-isatty v0.0.12 // indirect + github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect + github.com/miekg/dns v1.1.31 // indirect + github.com/minio/minio-go/v7 v7.0.65 + github.com/mitchellh/go-homedir v1.1.0 // indirect + github.com/mitchellh/mapstructure v1.2.2 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect + github.com/nicolai86/scaleway-sdk v1.10.2-0.20180628010248-798f60e20bb2 // indirect + github.com/packethost/packngo v0.3.0 // indirect + github.com/pkg/errors v0.9.1 + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/prometheus/client_golang v1.5.1 + github.com/richardlehane/mscfb v1.0.4 // indirect + github.com/richardlehane/msoleps v1.0.3 // indirect + github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529 // indirect + github.com/softlayer/softlayer-go v1.0.1 // indirect + github.com/softlayer/xmlrpc v0.0.0-20200409220501-5f089df7cb7e // indirect + github.com/spf13/pflag v1.0.5 // indirect + github.com/tencentcloud/tencentcloud-sdk-go v3.0.233+incompatible // indirect + github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect + github.com/vmihailenco/bufpool v0.1.11 // indirect + github.com/vmihailenco/msgpack v4.0.4+incompatible // indirect + github.com/vmihailenco/msgpack/v5 v5.3.4 // indirect + github.com/vmihailenco/tagparser v0.1.2 // indirect + github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect + github.com/vmware/govmomi v0.23.1 // indirect + github.com/xuri/efp v0.0.0-20220603152613-6918739fd470 // indirect + github.com/xuri/nfp v0.0.0-20220409054826-5e722a1d9e22 // indirect + go.opencensus.io v0.23.0 // indirect + golang.org/x/net v0.17.0 + golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914 // indirect + golang.org/x/sys v0.13.0 // indirect + golang.org/x/term v0.13.0 // indirect + golang.org/x/text v0.13.0 // indirect + golang.org/x/time v0.0.0-20200630173020-3af7569d3a1e + google.golang.org/api v0.51.0 // indirect + google.golang.org/appengine v1.6.7 // indirect + google.golang.org/genproto v0.0.0-20211013025323-ce878158c4d4 // indirect + google.golang.org/grpc v1.41.0 // indirect + google.golang.org/protobuf v1.27.1 // indirect + gopkg.in/asn1-ber.v1 v1.0.0-20181015200546-f715ec2f112d // indirect + gopkg.in/inf.v0 v0.9.1 // indirect + gopkg.in/resty.v1 v1.12.0 + gopkg.in/square/go-jose.v2 v2.5.1 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect + k8s.io/api v0.20.6 // indirect + k8s.io/apimachinery v0.20.6 // indirect + k8s.io/client-go v0.20.6 // indirect + k8s.io/klog/v2 v2.4.0 // indirect + k8s.io/utils v0.0.0-20201110183641-67b214c5f920 // indirect + mellium.im/sasl v0.2.1 // indirect + sigs.k8s.io/structured-merge-diff/v4 v4.0.3 // indirect +) diff --git a/qubership-apihub-service/metrics/BusinessMetrics.go b/qubership-apihub-service/metrics/BusinessMetrics.go new file mode 100644 index 0000000..0bf85ab --- /dev/null +++ b/qubership-apihub-service/metrics/BusinessMetrics.go @@ -0,0 +1,22 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package metrics + +const ComparisonsCalled = "comparisons_called" +const ExportsCalled = "exports_called" +const DeprecatedOperationsCalled = "deprecated_operations_called" +const DocumentsCalled = "documents_called" +const PackagesAndDashboardsCreated = "packages_and_dashboards_created" +const ReleaseVersionsPublished = "release_versions_published" diff --git a/qubership-apihub-service/metrics/Metrics.go b/qubership-apihub-service/metrics/Metrics.go new file mode 100644 index 0000000..cd28ab5 --- /dev/null +++ b/qubership-apihub-service/metrics/Metrics.go @@ -0,0 +1,123 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package metrics + +import ( + "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/client_golang/prometheus/promauto" +) + +var WSBranchEditSessionCount = prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "apihub_ws_branch_edit_session_count", + Help: "ws branch edit sessions count.", + }, + []string{}, +) + +var WSFileEditSessionCount = prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "apihub_ws_file_edit_session_count", + Help: "ws file edit sessions count.", + }, + []string{}, +) + +var TotalRequests = prometheus.NewCounterVec( + prometheus.CounterOpts{ + Name: "apihub_http_requests_total", + Help: "Number of get requests.", + }, + []string{"path", "code", "method"}, +) + +var HttpDuration = promauto.NewHistogramVec( + prometheus.HistogramOpts{ + Name: "apihub_http_request_duration_seconds_historgram", + Buckets: []float64{ + 0.1, // 100 ms + 0.2, + 0.25, + 0.5, + 1, + 1.5, + 3, + 5, + 10, + }, + }, + []string{"path", "code", "method"}, +) + +var BuildNoneStatusQueueSize = prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "apihub_build_none_queue_size", + Help: "Build count with status = 'none'", + }, + []string{}, +) + +var BuildRunningStatusQueueSize = prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "apihub_build_running_queue_size", + Help: "Build count with status = 'running'", + }, + []string{}, +) + +var FailedBuildCount = prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "apihub_failed_build_count", + Help: "Build count with status = 'error'", + }, + []string{}, +) + +var MaxBuildTime = prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "apihub_max_build_time", + Help: "Max build time", + }, + []string{}, +) + +var AvgBuildTime = prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "apihub_avg_build_time", + Help: "Avg build time", + }, + []string{}, +) + +var NumberOfBuildRetries = prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "apihub_build_retries_count", + Help: "Number of build retries", + }, + []string{}, +) + +func RegisterAllPrometheusApplicationMetrics() { + prometheus.Register(TotalRequests) + prometheus.Register(HttpDuration) + prometheus.Register(WSBranchEditSessionCount) + prometheus.Register(WSFileEditSessionCount) + prometheus.Register(BuildRunningStatusQueueSize) + prometheus.Register(BuildNoneStatusQueueSize) + prometheus.Register(FailedBuildCount) + prometheus.Register(MaxBuildTime) + prometheus.Register(AvgBuildTime) + prometheus.Register(NumberOfBuildRetries) +} diff --git a/qubership-apihub-service/middleware/PrometheusMiddleware.go b/qubership-apihub-service/middleware/PrometheusMiddleware.go new file mode 100644 index 0000000..e4d8015 --- /dev/null +++ b/qubership-apihub-service/middleware/PrometheusMiddleware.go @@ -0,0 +1,60 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package midldleware + +import ( + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/metrics" + "github.com/gorilla/mux" + "net/http" + "strconv" + "strings" + "time" +) + +type loggingResponseWriter struct { + http.ResponseWriter + statusCode int +} + +func newLoggingResponseWriter(w http.ResponseWriter) *loggingResponseWriter { + return &loggingResponseWriter{w, http.StatusOK} +} + +func (lrw *loggingResponseWriter) WriteHeader(code int) { + lrw.statusCode = code + lrw.ResponseWriter.WriteHeader(code) +} + +func PrometheusMiddleware(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + route := mux.CurrentRoute(r) + path, _ := route.GetPathTemplate() + statusCode := 200 + now := time.Now() + + if strings.Contains(path, "/ws/") { + next.ServeHTTP(w, r) + } else { + lrw := newLoggingResponseWriter(w) + next.ServeHTTP(lrw, r) + statusCode = lrw.statusCode + } + + elapsedSeconds := time.Since(now).Seconds() + + metrics.TotalRequests.WithLabelValues(path, strconv.Itoa(statusCode), r.Method).Inc() + metrics.HttpDuration.WithLabelValues(path, strconv.Itoa(statusCode), r.Method).Observe(elapsedSeconds) + }) +} diff --git a/qubership-apihub-service/migration/controller/OperationsMigrationController.go b/qubership-apihub-service/migration/controller/OperationsMigrationController.go new file mode 100644 index 0000000..80e010f --- /dev/null +++ b/qubership-apihub-service/migration/controller/OperationsMigrationController.go @@ -0,0 +1,243 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package controller + +import ( + "encoding/json" + "io/ioutil" + "net/http" + "strconv" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/controller" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/migration/service" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/migration/view" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/google/uuid" + "github.com/gorilla/mux" + log "github.com/sirupsen/logrus" +) + +type OperationsMigrationController interface { + StartOpsMigration(w http.ResponseWriter, r *http.Request) + GetMigrationReport(w http.ResponseWriter, r *http.Request) + CancelRunningMigrations(w http.ResponseWriter, r *http.Request) + GetSuspiciousBuilds(w http.ResponseWriter, r *http.Request) +} + +func NewTempMigrationController(migrationService service.DBMigrationService, isSysadmFunc func(context.SecurityContext) bool) OperationsMigrationController { + return &operationsMigrationControllerImpl{ + migrationService: migrationService, + isSysadm: isSysadmFunc, + } +} + +type operationsMigrationControllerImpl struct { + migrationService service.DBMigrationService + isSysadm func(context.SecurityContext) bool +} + +func (t operationsMigrationControllerImpl) StartOpsMigration(w http.ResponseWriter, r *http.Request) { + ctx := context.Create(r) + sufficientPrivileges := t.isSysadm(ctx) + if !sufficientPrivileges { + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + + defer r.Body.Close() + body, err := ioutil.ReadAll(r.Body) + if err != nil { + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + var req view.MigrationRequest + + err = json.Unmarshal(body, &req) + if err != nil { + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BadRequestBody, + Message: exception.BadRequestBodyMsg, + Debug: err.Error(), + }) + return + } + + id := uuid.New().String() + + utils.SafeAsync(func() { + err := t.migrationService.MigrateOperations(id, req) + if err != nil { + log.Errorf("Operations migration process failed: %s", err) + } else { + log.Infof("Operations migration process complete") + } + }) + + result := map[string]interface{}{} + result["id"] = id + + controller.RespondWithJson(w, http.StatusCreated, result) +} + +func (t operationsMigrationControllerImpl) GetMigrationReport(w http.ResponseWriter, r *http.Request) { + var err error + ctx := context.Create(r) + sufficientPrivileges := t.isSysadm(ctx) + if !sufficientPrivileges { + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + + params := mux.Vars(r) + migrationId := params["migrationId"] + + includeBuildSamples := false + if r.URL.Query().Get("includeBuildSamples") != "" { + includeBuildSamples, err = strconv.ParseBool(r.URL.Query().Get("includeBuildSamples")) + if err != nil { + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "includeBuildSamples", "type": "boolean"}, + Debug: err.Error(), + }) + return + } + } + report, err := t.migrationService.GetMigrationReport(migrationId, includeBuildSamples) + if err != nil { + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Code: "999", + Message: "Failed to get migration result", + Debug: err.Error(), + }) + return + } + if report == nil { + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusNotFound, + Code: "998", + Message: "Migration not found", + }) + return + } + + controller.RespondWithJson(w, http.StatusOK, report) +} + +func (t operationsMigrationControllerImpl) CancelRunningMigrations(w http.ResponseWriter, r *http.Request) { + ctx := context.Create(r) + sufficientPrivileges := t.isSysadm(ctx) + if !sufficientPrivileges { + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + err := t.migrationService.CancelRunningMigrations() + if err != nil { + controller.RespondWithError(w, "Failed to cancel running migrations", err) + return + } + w.WriteHeader(http.StatusOK) +} + +func (t operationsMigrationControllerImpl) GetSuspiciousBuilds(w http.ResponseWriter, r *http.Request) { + var err error + ctx := context.Create(r) + sufficientPrivileges := t.isSysadm(ctx) + if !sufficientPrivileges { + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + }) + return + } + + params := mux.Vars(r) + migrationId := params["migrationId"] + + limit := 100 + maxLimit := 5000 + if r.URL.Query().Get("limit") != "" { + limit, err = strconv.Atoi(r.URL.Query().Get("limit")) + if err != nil { + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "limit", "type": "int"}, + Debug: err.Error(), + }) + return + } + if limit < 1 || limit > maxLimit { + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameterValue, + Message: exception.InvalidLimitMsg, + Params: map[string]interface{}{"value": limit, "maxLimit": maxLimit}, + }) + return + } + } + page := 0 + if r.URL.Query().Get("page") != "" { + page, err = strconv.Atoi(r.URL.Query().Get("page")) + if err != nil { + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectParamType, + Message: exception.IncorrectParamTypeMsg, + Params: map[string]interface{}{"param": "page", "type": "int"}, + Debug: err.Error()}) + return + } + } + changedField := r.URL.Query().Get("changedField") + + suspiciousBuilds, err := t.migrationService.GetSuspiciousBuilds(migrationId, changedField, limit, page) + if err != nil { + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to get migration result", + Debug: err.Error(), + }) + return + } + + controller.RespondWithJson(w, http.StatusOK, suspiciousBuilds) +} diff --git a/qubership-apihub-service/migration/entity/MigrationEntity.go b/qubership-apihub-service/migration/entity/MigrationEntity.go new file mode 100644 index 0000000..362ae12 --- /dev/null +++ b/qubership-apihub-service/migration/entity/MigrationEntity.go @@ -0,0 +1,123 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package entity + +import ( + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/migration/view" +) + +type PublishedContentMigrationEntity struct { + tableName struct{} `pg:"published_version_revision_content, alias:published_version_revision_content"` + + entity.PublishedContentEntity + Data []byte `pg:"data, type:bytea"` +} + +type MigrationRunEntity struct { + tableName struct{} `pg:"migration_run"` + + Id string `pg:"id, type:varchar"` + StartedAt time.Time `pg:"started_at, type:timestamp without time zone"` + Status string `pg:"status, type:varchar"` + Stage string `pg:"stage, type:varchar"` + PackageIds []string `pg:"package_ids, type:varchar[]"` + Versions []string `pg:"versions, type:varchar[]"` + IsRebuild bool `pg:"is_rebuild, type:boolean"` + IsRebuildChangelogOnly bool `pg:"is_rebuild_changelog_only, type:boolean"` + SkipValidation bool `pg:"skip_validation, type:boolean"` + CurrentBuilderVersion string `pg:"current_builder_version, type:varchar"` + ErrorDetails string `pg:"error_details, type:varchar"` + FinishedAt time.Time `pg:"finished_at, type:timestamp without time zone"` + UpdatedAt time.Time `pg:"updated_at, type:timestamp without time zone"` +} + +type MigratedVersionEntity struct { + tableName struct{} `pg:"migrated_version, alias:migrated_version"` + + PackageId string `pg:"package_id, type:varchar"` + Version string `pg:"version, type:varchar"` + Revision int `pg:"revision, type:integer"` + Error string `pg:"error, type:varchar"` + BuildId string `pg:"build_id, type:varchar"` + MigrationId string `pg:"migration_id, type:varchar"` + BuildType string `pg:"build_type, type:varchar"` + NoChangelog bool `pg:"no_changelog, type:bool"` +} + +type MigratedVersionResultEntity struct { + tableName struct{} `pg:"migrated_version, alias:migrated_version"` + + MigratedVersionEntity + PreviousVersion string `pg:"previous_version, type:varchar"` + PreviousVersionPackageId string `pg:"previous_version_package_id, type:varchar"` +} + +type MigrationChangelogEntity struct { + tableName struct{} `pg:"version_comparison, alias:version_comparison"` + + PackageId string `pg:"package_id, type:varchar"` + Version string `pg:"version, type:varchar"` + Revision int `pg:"revision, type:integer"` + PreviousPackageId string `pg:"previous_package_id, type:varchar"` + PreviousVersion string `pg:"previous_version, type:varchar"` + PreviousRevision int `pg:"previous_revision, type:integer"` +} + +type SchemaMigrationEntity struct { + tableName struct{} `pg:"stored_schema_migration, alias:stored_schema_migration"` + + Num int `pg:"num, pk, type:integer"` + UpHash string `pg:"up_hash, type:varchar"` + SqlUp string `pg:"sql_up, type:varchar"` + DownHash string `pg:"down_hash, type:varchar"` + SqlDown string `pg:"sql_down, type:varchar"` +} + +type MigratedVersionChangesEntity struct { + tableName struct{} `pg:"migrated_version_changes, alias:migrated_version_changes"` + + PackageId string `pg:"package_id, type:varchar"` + Version string `pg:"version, type:varchar"` + Revision int `pg:"revision, type:integer"` + BuildId string `pg:"build_id, type:varchar"` + MigrationId string `pg:"migration_id, type:varchar"` + Changes map[string]interface{} `pg:"changes, type:jsonb"` + UniqueChanges []string `pg:"unique_changes, type:varchar[]"` +} + +type MigratedVersionChangesResultEntity struct { + tableName struct{} `pg:"migrated_version_changes, alias:migrated_version_changes"` + + MigratedVersionChangesEntity + BuildType string `pg:"build_type, type:varchar"` + PreviousVersion string `pg:"previous_version, type:varchar"` + PreviousVersionPackageId string `pg:"previous_version_package_id, type:varchar"` +} + +func MakeSuspiciousBuildView(changedVersion MigratedVersionChangesResultEntity) *view.SuspiciousMigrationBuild { + return &view.SuspiciousMigrationBuild{ + PackageId: changedVersion.PackageId, + Version: changedVersion.Version, + Revision: changedVersion.Revision, + BuildId: changedVersion.BuildId, + Changes: changedVersion.Changes, + BuildType: changedVersion.BuildType, + PreviousVersion: changedVersion.PreviousVersion, + PreviousVersionPackageId: changedVersion.PreviousVersionPackageId, + } +} diff --git a/qubership-apihub-service/migration/repository/MigrationRunRepository.go b/qubership-apihub-service/migration/repository/MigrationRunRepository.go new file mode 100644 index 0000000..6540248 --- /dev/null +++ b/qubership-apihub-service/migration/repository/MigrationRunRepository.go @@ -0,0 +1,72 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repository + +import ( + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/db" + mEntity "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/migration/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/migration/view" + "github.com/go-pg/pg/v10" +) + +type MigrationRunRepository interface { + GetMigrationRun(migrationId string) (*mEntity.MigrationRunEntity, error) + UpdateMigrationRun(entity *mEntity.MigrationRunEntity) error + GetRunningMigrations() ([]*mEntity.MigrationRunEntity, error) +} + +func NewMigrationRunRepository(cp db.ConnectionProvider) MigrationRunRepository { + return &migrationRunRepositoryImpl{cp: cp} +} + +type migrationRunRepositoryImpl struct { + cp db.ConnectionProvider +} + +func (m migrationRunRepositoryImpl) GetMigrationRun(migrationId string) (*mEntity.MigrationRunEntity, error) { + mRunEnt := new(mEntity.MigrationRunEntity) + err := m.cp.GetConnection().Model(mRunEnt). + Where("id = ?", migrationId). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return mRunEnt, nil +} + +func (m migrationRunRepositoryImpl) UpdateMigrationRun(ent *mEntity.MigrationRunEntity) error { + ent.UpdatedAt = time.Now() + _, err := m.cp.GetConnection().Model(ent).Where("id = ?", ent.Id).Update() + return err +} + +func (m migrationRunRepositoryImpl) GetRunningMigrations() ([]*mEntity.MigrationRunEntity, error) { + ents := make([]*mEntity.MigrationRunEntity, 0) + err := m.cp.GetConnection().Model(&ents). + Where("status = ?", view.MigrationStatusRunning). + Where("started_at > ?", time.Now().Add(-7*24*time.Hour)). + Select() + if err != nil { + if err != pg.ErrNoRows { + return nil, err + } + } + return ents, nil +} diff --git a/qubership-apihub-service/migration/service/DBMigrationService.go b/qubership-apihub-service/migration/service/DBMigrationService.go new file mode 100644 index 0000000..8f8a0a7 --- /dev/null +++ b/qubership-apihub-service/migration/service/DBMigrationService.go @@ -0,0 +1,401 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "context" + "fmt" + "os" + "sort" + "strings" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + + mEntity "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/migration/entity" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/db" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + mRepository "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/migration/repository" + mView "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/migration/view" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + "github.com/go-pg/pg/v10" + log "github.com/sirupsen/logrus" +) + +type DBMigrationService interface { + Migrate(basePath string) (int, int, bool, error) + SoftMigrateDb(currentVersion int, newVersion int, migrationRequired bool) error + MigrateOperations(migrationId string, req mView.MigrationRequest) error + GetMigrationReport(migrationId string, includeBuildSamples bool) (*mView.MigrationReport, error) + CancelRunningMigrations() error + GetSuspiciousBuilds(migrationId string, changedField string, limit int, page int) ([]mView.SuspiciousMigrationBuild, error) +} + +func NewDBMigrationService(cp db.ConnectionProvider, mRRepo mRepository.MigrationRunRepository, + bCRepo repository.BuildCleanupRepository, transitionRepository repository.TransitionRepository, + systemInfoService service.SystemInfoService, minioStorageService service.MinioStorageService) (DBMigrationService, error) { + service := &dbMigrationServiceImpl{ + cp: cp, + systemInfoService: systemInfoService, + repo: mRRepo, + buildCleanupRepository: bCRepo, + transitionRepository: transitionRepository, + migrationsFolder: systemInfoService.GetBasePath() + "/resources/migrations", + minioStorageService: minioStorageService, + } + upMigrations, downMigrations, err := service.getMigrationFilenamesMap() + if err != nil { + return nil, fmt.Errorf("failed to read migration files: %v", err.Error()) + } + service.upMigrations = upMigrations + service.downMigrations = downMigrations + return service, nil +} + +type dbMigrationServiceImpl struct { + cp db.ConnectionProvider + systemInfoService service.SystemInfoService + repo mRepository.MigrationRunRepository + buildCleanupRepository repository.BuildCleanupRepository + transitionRepository repository.TransitionRepository + migrationsFolder string + upMigrations map[int]string + downMigrations map[int]string + minioStorageService service.MinioStorageService +} + +const storedMigrationsTableMigrationVersion = 84 + +func (d *dbMigrationServiceImpl) createSchemaMigrationsTable() error { + _, err := d.cp.GetConnection().Exec(` + create table if not exists schema_migrations + ( + version integer not null, + dirty boolean not null, + PRIMARY KEY(version) + )`) + return err +} + +func (d *dbMigrationServiceImpl) createStoredMigrationsTable() error { + _, err := d.cp.GetConnection().Exec(` + create table if not exists stored_schema_migration + ( + num integer not null, + up_hash varchar not null, + sql_up varchar not null, + down_hash varchar null, + sql_down varchar null, + PRIMARY KEY(num) + )`) + return err +} + +func (d *dbMigrationServiceImpl) Migrate(basePath string) (currentMigrationNum int, newMigrationNum int, migrationRequired bool, err error) { + log.Infof("Schema Migration: start") + + var currentMigrationNumber int + _, err = d.cp.GetConnection().QueryOne(pg.Scan(¤tMigrationNumber), `SELECT version FROM schema_migrations`) + if err != nil { + if strings.Contains(err.Error(), "does not exist") { + err = d.createSchemaMigrationsTable() + if err != nil { + return 0, 0, false, fmt.Errorf("failed to create schema migrations table: %w", err) + } + _, err = d.cp.GetConnection().QueryOne(pg.Scan(¤tMigrationNumber), `SELECT version FROM schema_migrations`) + } + if err != pg.ErrNoRows { + return 0, 0, false, err + } + } + if currentMigrationNumber < storedMigrationsTableMigrationVersion { + err = d.createStoredMigrationsTable() + if err != nil { + return 0, 0, false, fmt.Errorf("failed to create stored migrations table: %w", err) + } + } + newMigrationNumber := len(d.upMigrations) + log.Infof("Schema Migration: calculating migrations to execute") + upMigrations, downMigrations, err := d.getRequiredMigrations(currentMigrationNumber, newMigrationNumber) + if err != nil { + return 0, 0, false, fmt.Errorf("failed to calculate required migrations to execute: %w", err) + } + if len(upMigrations)+len(downMigrations) == 0 { + log.Infof("Schema Migration: no migrations required") + return currentMigrationNumber, newMigrationNumber, false, nil + } + + // It's a tricky. Making table creation as a soft migration was a big mistake... + // Added constraint for the table in the regular migration(#91) will mess empty DB startup, so need to make sure the table is created + err = d.createMigrationTables() + if err != nil { + log.Errorf("Failed to create operations migration table: %s", err) + return currentMigrationNumber, newMigrationNumber, true, nil + } + + err = d.applyRequiredMigrations(upMigrations, downMigrations) + if err != nil { + return 0, 0, false, err + } + log.Infof("Schema Migration: finished successfully") + return currentMigrationNumber, newMigrationNumber, true, nil +} + +func (d *dbMigrationServiceImpl) applyRequiredMigrations(upMigrations []mEntity.SchemaMigrationEntity, downMigrations []mEntity.SchemaMigrationEntity) error { + if len(upMigrations)+len(downMigrations) == 0 { + return nil + } + sort.Slice(upMigrations, func(i, j int) bool { + return upMigrations[i].Num < upMigrations[j].Num + }) + sort.Slice(downMigrations, func(i, j int) bool { + return downMigrations[i].Num > downMigrations[j].Num + }) + var latestMigrationNum int + if len(upMigrations) > 0 { + latestMigrationNum = upMigrations[len(upMigrations)-1].Num + } else { + latestMigrationNum = downMigrations[len(downMigrations)-1].Num - 1 + } + log.Infof("Schema migration: start applying %v down and %v up migrations", len(downMigrations), len(upMigrations)) + ctx := context.Background() + err := d.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + for _, downMigration := range downMigrations { + if downMigration.SqlDown != "" { + rs, err := tx.Exec(downMigration.SqlDown) + if err != nil { + return fmt.Errorf("failed to apply stored down migration %v: %w", downMigration.Num, err) + } + log.Infof("successfully applied stored down migration %v: %v rows affected", downMigration.Num, rs.RowsAffected()) + } else { + log.Infof("down migration %v is empty, nothing to apply", downMigration.Num) + } + _, err := tx.Model(&downMigration).WherePK().Delete() + if err != nil { + return fmt.Errorf("failed to remove applied down migration %v from stored_schema_migration: %w", downMigration.Num, err) + } + } + for _, upMigration := range upMigrations { + rs, err := tx.Exec(upMigration.SqlUp) + if err != nil { + return fmt.Errorf("failed to apply local up migration %v: %w", upMigration.Num, err) + } + if upMigration.SqlDown != "" { + _, err = tx.Exec(`SAVEPOINT up_migration`) + if err != nil { + return fmt.Errorf("failed to validate local down migration %v: failed to create transaction savepoint: %w", upMigration.Num, err) + } + _, err = tx.Exec(upMigration.SqlDown) + if err != nil { + return fmt.Errorf("failed to execute local down migration %v: %w", upMigration.Num, err) + } + _, err = tx.Exec(`ROLLBACK TO SAVEPOINT up_migration`) + if err != nil { + return fmt.Errorf("failed to validate local down migration %v: failed to rollback to transaction savepoint: %w", upMigration.Num, err) + } + _, err = tx.Exec(`RELEASE SAVEPOINT up_migration`) + if err != nil { + return fmt.Errorf("failed to validate local down migration %v: failed to release transaction savepoint: %w", upMigration.Num, err) + } + } + _, err = tx.Model(&upMigration).Insert() + if err != nil { + return fmt.Errorf("failed to store local up migration %v: %w", upMigration.Num, err) + } + log.Infof("successfully applied local up migration %v: %v rows affected", upMigration.Num, rs.RowsAffected()) + } + migrationEntity := entity.MigrationEntity{ + Version: latestMigrationNum, + Dirty: false, + } + _, err := tx.Model(&entity.MigrationEntity{}). + Where("version is not null"). + Delete() + if err != nil { + return fmt.Errorf("failed to update schema_migrations table with latest migration version %v", latestMigrationNum) + } + _, err = tx.Model(&migrationEntity). + Insert() + if err != nil { + return fmt.Errorf("failed to update schema_migrations table with latest migration version %v", latestMigrationNum) + } + return nil + }) + if err != nil { + return err + } + return nil +} + +func (d *dbMigrationServiceImpl) getRequiredMigrations(currentMigrationNumber int, newMigrationNumber int) ([]mEntity.SchemaMigrationEntity, []mEntity.SchemaMigrationEntity, error) { + requiredUpMigrations := make([]mEntity.SchemaMigrationEntity, 0) + requiredDownMigrations := make([]mEntity.SchemaMigrationEntity, 0) + var latestStoredMigration mEntity.SchemaMigrationEntity + err := d.cp.GetConnection().Model(&latestStoredMigration).Order("num desc").Limit(1).Select() + if err != nil { + if err != pg.ErrNoRows { + return nil, nil, err + } + } + if newMigrationNumber == 0 && currentMigrationNumber == 0 { + return requiredUpMigrations, requiredDownMigrations, nil + } + + if latestStoredMigration.Num == 0 { + log.Infof("Schema Migration: there are no stored migrations, trying to store already applied migrations") + if newMigrationNumber < currentMigrationNumber { + return nil, nil, fmt.Errorf("total number of 'up' migrations (%v) is lower than currently applied version from schema_migrations (%v). Please execute required down migrations and retry", newMigrationNumber, currentMigrationNumber) + } + err = d.storeAlreadyAppliedSchemaMigrations(currentMigrationNumber) + if err != nil { + return nil, nil, fmt.Errorf("failed to store already applied schema migrations: %w", err) + } + if currentMigrationNumber == newMigrationNumber { + return requiredUpMigrations, requiredDownMigrations, nil + } + for i := currentMigrationNumber + 1; i <= newMigrationNumber; i++ { + migrationEnt, err := d.makeLocalMigrationEntity(i) + if err != nil { + return nil, nil, err + } + requiredUpMigrations = append(requiredUpMigrations, *migrationEnt) + } + return requiredUpMigrations, requiredDownMigrations, nil + } + + i := currentMigrationNumber + j := newMigrationNumber + for i > 0 && j > 0 { + if i > j { + //applied migration missing + storedMigration, err := d.getSchemaMigrationEntity(i) + if err != nil { + return nil, nil, fmt.Errorf("failed to read stored migration %v: %w", i, err) + } + if storedMigration == nil { + return nil, nil, fmt.Errorf("stored migration %v not found", i) + } + requiredDownMigrations = append(requiredDownMigrations, *storedMigration) + i-- + continue + } + localMigration, err := d.makeLocalMigrationEntity(j) + if err != nil { + return nil, nil, fmt.Errorf("failed to read local migration %v: %w", j, err) + } + if j > i { + //new migration + requiredUpMigrations = append(requiredUpMigrations, *localMigration) + j-- + continue + } + //same migration number (i==j) + storedMigration, err := d.getSchemaMigrationEntity(i) + if err != nil { + return nil, nil, fmt.Errorf("failed to read stored migration %v: %w", i, err) + } + if storedMigration == nil { + return nil, nil, fmt.Errorf("stored migration %v not found", i) + } + if localMigration.UpHash == storedMigration.UpHash { + break + } + //same migration number but different content + requiredUpMigrations = append(requiredUpMigrations, *localMigration) + requiredDownMigrations = append(requiredDownMigrations, *storedMigration) + i-- + j-- + } + return requiredUpMigrations, requiredDownMigrations, nil +} + +func (d *dbMigrationServiceImpl) storeAlreadyAppliedSchemaMigrations(currentMigrationNumber int) error { + if currentMigrationNumber == 0 { + return nil + } + schemaMigrationEntities := make([]*mEntity.SchemaMigrationEntity, 0) + for i := 1; i <= currentMigrationNumber; i++ { + migrationEnt, err := d.makeLocalMigrationEntity(i) + if err != nil { + return err + } + schemaMigrationEntities = append(schemaMigrationEntities, migrationEnt) + } + + ctx := context.Background() + return d.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + _, err := tx.Model(&schemaMigrationEntities).OnConflict("(num) DO NOTHING").Insert() + if err != nil { + return err + } + return nil + }) +} + +func (d *dbMigrationServiceImpl) makeLocalMigrationEntity(migrationNumber int) (*mEntity.SchemaMigrationEntity, error) { + upMigrationFile, exists := d.upMigrations[migrationNumber] + if !exists { + return nil, fmt.Errorf("failed to read up migration file %v", migrationNumber) + } + upMigrationFileData, err := os.ReadFile(upMigrationFile) + if err != nil { + return nil, fmt.Errorf("failed to read up migration file %v: %w", upMigrationFile, err) + } + var downMigrationFileData []byte + downMigrationFile, exists := d.downMigrations[migrationNumber] + if exists { + downMigrationFileData, err = os.ReadFile(downMigrationFile) + if err != nil { + return nil, fmt.Errorf("failed to read down migration file %v: %w", downMigrationFile, err) + } + } else { + downMigrationFileData = []byte{} + } + upMigrationHash := calculateMigrationHash(migrationNumber, upMigrationFileData) + downMigrationHash := calculateMigrationHash(migrationNumber, downMigrationFileData) + + return &mEntity.SchemaMigrationEntity{ + UpHash: upMigrationHash, + DownHash: downMigrationHash, + Num: migrationNumber, + SqlUp: string(upMigrationFileData), + SqlDown: string(downMigrationFileData), + }, nil +} + +func (d *dbMigrationServiceImpl) getSchemaMigrationEntity(migrationNumber int) (*mEntity.SchemaMigrationEntity, error) { + var storedMigration mEntity.SchemaMigrationEntity + err := d.cp.GetConnection().Model(&storedMigration).Where("num = ?", migrationNumber).Limit(1).Select() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return &storedMigration, nil +} + +func (d *dbMigrationServiceImpl) CancelRunningMigrations() error { + _, err := d.cp.GetConnection().Exec(` + update build set status = ?, details = ? + where status in (?) and created_by = 'db migration'`, + view.StatusError, CancelledMigrationError, + pg.In([]view.BuildStatusEnum{view.StatusNotStarted, view.StatusRunning})) + if err != nil { + return err + } + return nil +} diff --git a/qubership-apihub-service/migration/service/OperationsMigration.go b/qubership-apihub-service/migration/service/OperationsMigration.go new file mode 100644 index 0000000..644aa93 --- /dev/null +++ b/qubership-apihub-service/migration/service/OperationsMigration.go @@ -0,0 +1,890 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "context" + "fmt" + "strconv" + "strings" + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + mEntity "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/migration/entity" + mView "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/migration/view" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + "github.com/go-pg/pg/v10" + log "github.com/sirupsen/logrus" +) + +func (d dbMigrationServiceImpl) MigrateOperations(migrationId string, req mView.MigrationRequest) error { + log.Infof("Migration started with request: %+v", req) + + err := d.validateMinRequiredVersion(filesOperationsMigrationVersion) + if err != nil { + return err + } + + mrEnt := mEntity.MigrationRunEntity{ + Id: migrationId, + StartedAt: time.Now(), + Status: mView.MigrationStatusRunning, + Stage: "starting", + PackageIds: req.PackageIds, + Versions: req.Versions, + IsRebuild: req.Rebuild, + CurrentBuilderVersion: req.CurrentBuilderVersion, + IsRebuildChangelogOnly: req.RebuildChangelogOnly, + SkipValidation: req.SkipValidation, + } + + _, err = d.cp.GetConnection().Model(&mrEnt).Insert() + if err != nil { + return fmt.Errorf("failed to insert MigrationRunEntity: %w", err) + } + + _, err = d.cp.GetConnection().Exec(`create schema if not exists migration;`) + if err != nil { + return err + } + _, err = d.cp.GetConnection().Exec(fmt.Sprintf(`create table migration."version_comparison_%s" as select * from version_comparison;`, migrationId)) + if err != nil { + return err + } + _, err = d.cp.GetConnection().Exec(fmt.Sprintf(`create table migration."operation_comparison_%s" as select * from operation_comparison;`, migrationId)) + if err != nil { + return err + } + _, err = d.cp.GetConnection().Exec(fmt.Sprintf(`create index "operation_comparison_%s_comparison_id_index" on migration."operation_comparison_%s" (comparison_id);`, migrationId, migrationId)) + if err != nil { + return err + } + _, err = d.cp.GetConnection().Exec(fmt.Sprintf(`create table migration."expired_ts_operation_data_%s" (package_id varchar, version varchar, revision integer);`, migrationId)) + if err != nil { + return err + } + defer utils.SafeAsync(func() { + _, err := d.cp.GetConnection().Exec(fmt.Sprintf(`drop table migration."version_comparison_%s";`, migrationId)) + if err != nil { + log.Errorf("failed to cleanup migration tables: %v", err.Error()) + } + _, err = d.cp.GetConnection().Exec(fmt.Sprintf(`drop table migration."operation_comparison_%s";`, migrationId)) + if err != nil { + log.Errorf("failed to cleanup migration tables: %v", err.Error()) + } + _, err = d.cp.GetConnection().Exec(fmt.Sprintf(`drop table migration."expired_ts_operation_data_%s";`, migrationId)) + if err != nil { + log.Errorf("failed to cleanup migration tables: %v", err.Error()) + } + }) + // Need to fill empty created_by column for existing old versions + fillCreatedBy := `update published_version set created_by = 'unknown' where created_by is null;` + _, err = d.cp.GetConnection().Exec(fillCreatedBy) + if err != nil { + return err + } + + // Need to cleanup broken versions without content + err = d.cleanupEmptyVersions() + if err != nil { + return err + } + + if req.Rebuild { + err = d.cleanForRebuild(req.PackageIds, req.Versions, "") + if err != nil { + return err + } + + if len(req.PackageIds) == 0 && len(req.Versions) == 0 { + // it means that we're going to rebuild all versions + // this action will generate a lot of data and may cause DB disk overflow + // Try to avoid too much space usage by cleaning up all old migration build data + log.Infof("Starting cleanup before full migration") + if d.systemInfoService.IsMinioStorageActive() { + ctx := context.Background() + ids, err := d.buildCleanupRepository.GetRemoveMigrationBuildIds() + if err != nil { + return err + } + err = d.minioStorageService.RemoveFiles(ctx, view.BUILD_RESULT_TABLE, ids) + if err != nil { + return err + } + deleted, err := d.buildCleanupRepository.RemoveMigrationBuildSourceData(ids) + if err != nil { + return err + } + log.Infof("Cleanup before full migration cleaned up %d entries", deleted) + } else { + deleted, err := d.buildCleanupRepository.RemoveMigrationBuildData() + if err != nil { + return err + } + log.Infof("Cleanup before full migration cleaned up %d entries", deleted) + } + } + } + + // TODO: restart migration by id? stop migration by id? + // TODO: allow only one migration? + + if req.RebuildChangelogOnly { + err = d.cleanForRebuild(req.PackageIds, req.Versions, view.ChangelogType) + if err != nil { + return err + } + err := d.rebuildAllChangelogs(req.PackageIds, req.Versions, migrationId) + if err != nil { + migrationStatus := mView.MigrationStatusFailed + if err.Error() == CancelledMigrationError { + migrationStatus = mView.MigrationStatusCancelled + } + errUpdateMigrationStatus := d.updateMigrationStatus(migrationId, migrationStatus, "") + if errUpdateMigrationStatus != nil { + return errUpdateMigrationStatus + } + return err + } + } else { + err = d.rebuildAllVersions(req.PackageIds, req.Versions, migrationId) + if err != nil { + migrationStatus := mView.MigrationStatusFailed + if err.Error() == CancelledMigrationError { + migrationStatus = mView.MigrationStatusCancelled + } + errUpdateMigrationStatus := d.updateMigrationStatus(migrationId, migrationStatus, "") + if errUpdateMigrationStatus != nil { + return errUpdateMigrationStatus + } + return err + } + } + + err = d.updateMigrationStatus(migrationId, mView.MigrationStatusComplete, "") + if err != nil { + return err + } + return nil +} + +func (d dbMigrationServiceImpl) GetMigrationReport(migrationId string, includeBuildSamples bool) (*mView.MigrationReport, error) { + mRunEnt, err := d.repo.GetMigrationRun(migrationId) + if mRunEnt == nil { + return nil, fmt.Errorf("migration with id=%s not found", migrationId) + } + + result := mView.MigrationReport{ + Status: mRunEnt.Status, + StartedAt: mRunEnt.StartedAt, + ElapsedTime: time.Since(mRunEnt.StartedAt).String(), + SuccessBuildsCount: 0, + ErrorBuildsCount: 0, + ErrorBuilds: nil, + } + if !mRunEnt.FinishedAt.IsZero() { + result.ElapsedTime = mRunEnt.FinishedAt.Sub(mRunEnt.StartedAt).String() + result.FinishedAt = &mRunEnt.FinishedAt + } + + var migratedVersions []mEntity.MigratedVersionResultEntity + err = d.cp.GetConnection().Model(&migratedVersions). + ColumnExpr(`migrated_version.*, + b.metadata->>'previous_version' previous_version, + b.metadata->>'previous_version_package_id' previous_version_package_id`). + Join("inner join build b"). + JoinOn("migrated_version.build_id = b.build_id"). + Where("migrated_version.migration_id = ?", migrationId). + Select() + + for _, mv := range migratedVersions { + if mv.Error != "" { + result.ErrorBuilds = append(result.ErrorBuilds, mView.MigrationError{ + PackageId: mv.PackageId, + Version: mv.Version, + Revision: mv.Revision, + Error: mv.Error, + BuildId: mv.BuildId, + BuildType: mv.BuildType, + PreviousVersion: mv.PreviousVersion, + PreviousVersionPackageId: mv.PreviousVersionPackageId, + }) + + result.ErrorBuildsCount += 1 + } else { + result.SuccessBuildsCount += 1 + } + } + + migrationChanges := make(map[string]int) + _, err = d.cp.GetConnection().Query(pg.Scan(&migrationChanges), `select changes from migration_changes where migration_id = ?`, migrationId) + + for change, count := range migrationChanges { + migrationChange := mView.MigrationChange{ + ChangedField: change, + AffectedBuildsCount: count, + } + if includeBuildSamples { + changedVersion := new(mEntity.MigratedVersionChangesResultEntity) + err = d.cp.GetConnection().Model(changedVersion). + ColumnExpr(`migrated_version_changes.*, + b.metadata->>'build_type' build_type, + b.metadata->>'previous_version' previous_version, + b.metadata->>'previous_version_package_id' previous_version_package_id`). + Join("inner join build b"). + JoinOn("migrated_version_changes.build_id = b.build_id"). + Where("migrated_version_changes.migration_id = ?", migrationId). + Where("? = any(unique_changes)", change). + Order("build_id"). + Limit(1). + Select() + migrationChange.AffectedBuildSample = mEntity.MakeSuspiciousBuildView(*changedVersion) + } + result.MigrationChanges = append(result.MigrationChanges, migrationChange) + } + _, err = d.cp.GetConnection().Query(pg.Scan(&result.SuspiciousBuildsCount), + `select count(*) from migrated_version_changes where migration_id = ?`, migrationId) + + return &result, err +} + +func (d dbMigrationServiceImpl) GetSuspiciousBuilds(migrationId string, changedField string, limit int, page int) ([]mView.SuspiciousMigrationBuild, error) { + changedVersions := make([]mEntity.MigratedVersionChangesResultEntity, 0) + err := d.cp.GetConnection().Model(&changedVersions). + ColumnExpr(`migrated_version_changes.*, + b.metadata->>'build_type' build_type, + b.metadata->>'previous_version' previous_version, + b.metadata->>'previous_version_package_id' previous_version_package_id`). + Join("inner join build b"). + JoinOn("migrated_version_changes.build_id = b.build_id"). + Where("migrated_version_changes.migration_id = ?", migrationId). + Where("(? = '') or (? = any(unique_changes))", changedField, changedField). + Order("build_id"). + Limit(limit). + Offset(limit * page). + Select() + if err != nil { + return nil, err + } + suspiciousBuilds := make([]mView.SuspiciousMigrationBuild, 0) + for _, changedVersion := range changedVersions { + suspiciousBuilds = append(suspiciousBuilds, *mEntity.MakeSuspiciousBuildView(changedVersion)) + } + return suspiciousBuilds, nil +} + +func (d dbMigrationServiceImpl) rebuildAllVersions(packageIds []string, versionsIn []string, migrationId string) error { + err := d.updateMigrationStatus(migrationId, "", "rebuildAllRevisions_start") + if err != nil { + return err + } + + getLatestIndependentVersionsQuery := makeLatestIndependentVersionsQuery(packageIds, versionsIn) + getNotLatestVersionsQuery := makeNotLatestVersionsQuery(packageIds, versionsIn) + + var independentVersions []entity.PublishedVersionEntity + var dependentVersions []entity.PublishedVersionEntity + + _, err = queryWithRetry(d.cp.GetConnection(), &independentVersions, getLatestIndependentVersionsQuery) + if err != nil { + log.Errorf("Failed to read latest versions: %v", err.Error()) + return err + } + if len(independentVersions) <= 0 { + _, err = queryWithRetry(d.cp.GetConnection(), &dependentVersions, getNotLatestVersionsQuery) + if err != nil { + log.Errorf("Failed to read non-latest versions: %v", err.Error()) + return err + } + } + + iteration := 0 + migrationCancelled := false +MigrationProcess: + for len(independentVersions) > 0 || len(dependentVersions) > 0 { + iteration += 1 + + // TODO: add better logging with iteration number, etc + + round := 0 + var versionsThisRound int + + for len(independentVersions) > 0 { + versionsThisRound = len(independentVersions) + round = round + 1 + buildsMap := make(map[string]entity.PublishedVersionEntity, 0) + log.Debugf("Start adding tasks to rebuild %v versions. Round: %v", versionsThisRound, round) + err := d.updateMigrationStatus(migrationId, "", "rebuildIndependentVersions_adding_tasks_round_"+strconv.Itoa(round)) + if err != nil { + return err + } + noChangelog := false + for i, versionEnt := range independentVersions { + log.Infof("[%v / %v] addTaskToRebuild start. Version: %v@%v@%v", i+1, versionsThisRound, versionEnt.PackageId, versionEnt.Version, versionEnt.Revision) + buildId, err := d.addTaskToRebuild(migrationId, versionEnt, noChangelog) + if err != nil { + log.Errorf("[%v / %v] Failed to add task to rebuild version: %v", i+1, versionsThisRound, err.Error()) + + mvEnt := mEntity.MigratedVersionEntity{ + PackageId: versionEnt.PackageId, + Version: versionEnt.Version, + Revision: versionEnt.Revision, + Error: fmt.Sprintf("addTaskToRebuild failed: %v", err.Error()), + BuildId: buildId, + MigrationId: migrationId, + BuildType: view.BuildType, + NoChangelog: noChangelog, + } + _, err := d.cp.GetConnection().Model(&mvEnt).Insert() + if err != nil { + log.Errorf("[%v / %v] Failed to store error for %v@%v@%v : %v", i+1, versionsThisRound, versionEnt.PackageId, versionEnt.Version, versionEnt.Revision, err.Error()) + continue + } + } else { + buildsMap[buildId] = versionEnt + log.Infof("[%v / %v] addTaskToRebuild complete. BuildId: %v. Version %v@%v@%v NoChangelog: %v", i+1, versionsThisRound, buildId, versionEnt.PackageId, versionEnt.Version, versionEnt.Revision, noChangelog) + } + } + err = d.updateMigrationStatus(migrationId, "", "rebuildIndependentVersions_waiting_builds_round_"+strconv.Itoa(round)) + if err != nil { + return err + } + log.Infof("Waiting for all builds to finish. Round: %v", round) + buildsThisRound := len(buildsMap) + finishedBuilds := 0 + for len(buildsMap) > 0 { + log.Infof("Finished builds: %v / %v. Round: %v", finishedBuilds, buildsThisRound, round) + time.Sleep(15 * time.Second) + buildIdsList := getMapKeys(buildsMap) + buildEnts, err := d.getBuilds(buildIdsList) + if err != nil { + log.Errorf("Failed to get builds statuses: %v", err.Error()) + return err + } + for _, buildEnt := range buildEnts { + buildVersion := strings.Split(buildEnt.Version, "@")[0] + buildRevision := strings.Split(buildEnt.Version, "@")[1] + buildPackageId := buildEnt.PackageId + + buildRevisionInt := 1 + + mvEnt := mEntity.MigratedVersionEntity{ + PackageId: buildPackageId, + Version: buildVersion, + Revision: buildRevisionInt, + Error: "", + BuildId: buildEnt.BuildId, + MigrationId: migrationId, + BuildType: view.BuildType, + NoChangelog: noChangelog, + } + + if buildRevision != "" { + buildRevisionInt, err = strconv.Atoi(buildRevision) + if err != nil { + mvEnt.Error = fmt.Sprintf("Unable to convert revision value '%s' to int", buildRevision) + _, err = d.cp.GetConnection().Model(&mvEnt).Insert() + if err != nil { + log.Errorf("failed to store MigratedVersionEntity %+v: %s", mvEnt, err) + } + continue + } + mvEnt.Revision = buildRevisionInt + } + + if buildEnt.Status == string(view.StatusComplete) { + finishedBuilds = finishedBuilds + 1 + delete(buildsMap, buildEnt.BuildId) + _, err = d.cp.GetConnection().Model(&mvEnt).Insert() + if err != nil { + log.Errorf("failed to store MigratedVersionEntity %+v: %s", mvEnt, err) + } + continue + } + if buildEnt.Status == string(view.StatusError) { + if buildEnt.Details == CancelledMigrationError { + migrationCancelled = true + break MigrationProcess + } + + finishedBuilds = finishedBuilds + 1 + + errorDetails := buildEnt.Details + if errorDetails == "" { + errorDetails = "No error details.." + } + + delete(buildsMap, buildEnt.BuildId) + + log.Errorf("Builder failed to build %v. Details: %v", buildEnt.BuildId, errorDetails) + + mvEnt.Error = errorDetails + + _, err = d.cp.GetConnection().Model(&mvEnt).Insert() + if err != nil { + log.Errorf("failed to store MigratedVersionEntity %+v: %s", mvEnt, err) + } + continue + } + } + } + _, err = queryWithRetry(d.cp.GetConnection(), &independentVersions, getLatestIndependentVersionsQuery) + if err != nil { + log.Errorf("Failed to read latest versions: %v", err.Error()) + return err + } + } + + ////////////////////////// + + _, err = queryWithRetry(d.cp.GetConnection(), &dependentVersions, getNotLatestVersionsQuery) + if err != nil { + log.Errorf("Failed to read non-latest versions: %v", err.Error()) + return err + } + + totalNumberOfVersions := len(dependentVersions) + buildsMap := make(map[string]entity.PublishedVersionEntity, 0) + noChangelog := true + err = d.updateMigrationStatus(migrationId, "", "rebuildNotLatestRevisions_adding_builds") + if err != nil { + return err + } + + log.Infof("Start adding tasks to rebuild %v versions.", totalNumberOfVersions) + + for i, versionEnt := range dependentVersions { + log.Debugf("[%v / %v] addTaskToRebuild start. Version: %v@%v@%v", i+1, totalNumberOfVersions, versionEnt.PackageId, versionEnt.Version, versionEnt.Revision) + + buildId, err := d.addTaskToRebuild(migrationId, versionEnt, noChangelog) + if err != nil { + log.Errorf("[%v / %v] Failed to add task to rebuild version: %v", i+1, totalNumberOfVersions, err.Error()) + + mvEnt := mEntity.MigratedVersionEntity{ + PackageId: versionEnt.PackageId, + Version: versionEnt.Version, + Revision: versionEnt.Revision, + Error: fmt.Sprintf("addTaskToRebuild failed: %v", err.Error()), + BuildId: buildId, + MigrationId: migrationId, + BuildType: view.BuildType, + NoChangelog: noChangelog, + } + _, err := d.cp.GetConnection().Model(&mvEnt).Insert() + if err != nil { + log.Errorf("[%v / %v] Failed to store error for %v@%v@%v : %v", i+1, totalNumberOfVersions, versionEnt.PackageId, versionEnt.Version, versionEnt.Revision, err.Error()) + continue + } + } else { + log.Infof("[%v / %v] addTaskToRebuild complete. BuildId: %v. Version %v@%v@%v NoChangelog: %v", i+1, totalNumberOfVersions, buildId, versionEnt.PackageId, versionEnt.Version, versionEnt.Revision, noChangelog) + buildsMap[buildId] = versionEnt + } + } + err = d.updateMigrationStatus(migrationId, "", "rebuildNotLatestRevisions_waiting_builds") + if err != nil { + return err + } + + log.Info("Waiting for all builds to finish.") + buildsThisRound := len(buildsMap) + finishedBuilds := 0 + getBuildsFails := 0 + for len(buildsMap) > 0 { + log.Infof("Finished builds: %v / %v.", finishedBuilds, buildsThisRound) + time.Sleep(15 * time.Second) + buildIdsList := getMapKeys(buildsMap) + buildEnts, err := d.getBuilds(buildIdsList) + if err != nil { + log.Errorf("Failed to get builds statuses: %v", err.Error()) + // Try to wait in case of *temporary* DB outage for ~5 min + getBuildsFails += 1 + if getBuildsFails > 20 { + return err + } + } + getBuildsFails = 0 + for _, buildEnt := range buildEnts { + buildVersion := strings.Split(buildEnt.Version, "@")[0] + buildRevision := strings.Split(buildEnt.Version, "@")[1] + buildPackageId := buildEnt.PackageId + + buildRevisionInt := 1 + mvEnt := mEntity.MigratedVersionEntity{ + PackageId: buildPackageId, + Version: buildVersion, + Revision: buildRevisionInt, + Error: "", + BuildId: buildEnt.BuildId, + MigrationId: migrationId, + BuildType: view.BuildType, + NoChangelog: noChangelog, + } + + if buildRevision != "" { + buildRevisionInt, err = strconv.Atoi(buildRevision) + if err != nil { + mvEnt.Error = fmt.Sprintf("Unable to convert revision value '%s' to int", buildRevision) + _, err = d.cp.GetConnection().Model(&mvEnt).Insert() + if err != nil { + log.Errorf("failed to store MigratedVersionEntity %+v: %s", mvEnt, err) + } + continue + } + mvEnt.Revision = buildRevisionInt + } + + if buildEnt.Status == string(view.StatusComplete) { + finishedBuilds = finishedBuilds + 1 + + delete(buildsMap, buildEnt.BuildId) + _, err = d.cp.GetConnection().Model(&mvEnt).Insert() + if err != nil { + log.Errorf("failed to store MigratedVersionEntity %+v: %s", mvEnt, err) + } + continue + } + if buildEnt.Status == string(view.StatusError) { + if buildEnt.Details == CancelledMigrationError { + migrationCancelled = true + break MigrationProcess + } + + finishedBuilds = finishedBuilds + 1 + + errorDetails := buildEnt.Details + if errorDetails == "" { + errorDetails = "No error details.." + } + + delete(buildsMap, buildEnt.BuildId) + + log.Errorf("Builder failed to build %v. Details: %v", buildEnt.BuildId, errorDetails) + + mvEnt.Error = errorDetails + + _, err = d.cp.GetConnection().Model(&mvEnt).Insert() + if err != nil { + log.Errorf("failed to store MigratedVersionEntity %+v: %s", mvEnt, err) + } + continue + } + } + } + + ///////////////////// + + _, err = queryWithRetry(d.cp.GetConnection(), &independentVersions, getLatestIndependentVersionsQuery) + if err != nil { + log.Errorf("Failed to read latest versions: %v", err.Error()) + return err + } + + //////////////////// + + } + log.Info("Finished rebuilding all versions") + + if migrationCancelled { + return fmt.Errorf(CancelledMigrationError) + } + err = d.rebuildChangelogsAfterVersionsMigrations(migrationId) + if err != nil { + log.Errorf("Failed to rebuildChangelogsAfterVersionsMigrations: %v", err.Error()) + return err + } + + err = d.rebuildTextSearchTables(migrationId) + if err != nil { + log.Errorf("Failed to rebuildTextSearchTables: %v", err.Error()) + return err + } + return nil +} + +func (d dbMigrationServiceImpl) updateMigrationStatus(migrationId string, status string, stage string) error { + mEnt, err := d.repo.GetMigrationRun(migrationId) + if err != nil { + return err + } + if status != "" { + if status == mView.MigrationStatusComplete || status == mView.MigrationStatusFailed { + mEnt.FinishedAt = time.Now() + } + mEnt.Status = status + } + if stage != "" { + mEnt.Stage = stage + } + return d.repo.UpdateMigrationRun(mEnt) +} + +func (d dbMigrationServiceImpl) rebuildAllChangelogs(packageIds []string, versionsIn []string, migrationId string) error { + changelogQuery := makeAllChangelogForMigrationQuery(packageIds, versionsIn) + var migrationChangelogEntities []mEntity.MigrationChangelogEntity + + _, err := queryWithRetry(d.cp.GetConnection(), &migrationChangelogEntities, changelogQuery) + if err != nil { + log.Errorf("Failed to get migrationChangelogEntities: %v", err.Error()) + return err + } + err = d.rebuildChangelog(migrationChangelogEntities, migrationId) + if err != nil { + log.Errorf("Failed to rebuildChangelog: %v", err.Error()) + return err + } + return nil +} + +func (d dbMigrationServiceImpl) rebuildChangelogsAfterVersionsMigrations(migrationId string) error { + changelogQuery := makeChangelogByMigratedVersionQuery(migrationId) + var migrationChangelogEntities []mEntity.MigrationChangelogEntity + _, err := queryWithRetry(d.cp.GetConnection(), &migrationChangelogEntities, changelogQuery) + if err != nil { + log.Errorf("Failed to get migrationChangelogEntities: %v", err.Error()) + return err + } + err = d.rebuildChangelog(migrationChangelogEntities, migrationId) + if err != nil { + log.Errorf("Failed to rebuildChangelog: %v", err.Error()) + return err + } + return nil +} + +func (d dbMigrationServiceImpl) rebuildChangelog(migrationChangelogs []mEntity.MigrationChangelogEntity, migrationId string) error { + err := d.updateMigrationStatus(migrationId, "", "rebuildChangelogs_start") + if err != nil { + return err + } + + buildsMap := make(map[string]interface{}, 0) + err = d.updateMigrationStatus(migrationId, "", "rebuildChangelogs_adding_builds") + if err != nil { + return err + } + for _, changelogEntity := range migrationChangelogs { + buildId, err := d.addChangelogTaskToRebuild(migrationId, changelogEntity) + if err != nil { + log.Errorf("Failed to add task to rebuild changelog. Package - %s. Version - %s. Revision - %d.Error - %v", changelogEntity.PackageId, changelogEntity.Version, changelogEntity.Revision, err.Error()) + mvEnt := mEntity.MigratedVersionEntity{ + PackageId: changelogEntity.PackageId, + Version: changelogEntity.Version, + Revision: changelogEntity.Revision, + Error: fmt.Sprintf("addChangelogTaskToRebuild failed: %v", err.Error()), + BuildId: buildId, + MigrationId: migrationId, + BuildType: view.ChangelogType, + } + _, err = d.cp.GetConnection().Model(&mvEnt).Insert() + if err != nil { + log.Errorf("Failed to store error for %v@%v@%v : %s", changelogEntity.PackageId, changelogEntity.Version, changelogEntity.Revision, err.Error()) + continue + } + } + buildsMap[buildId] = changelogEntity + log.Infof("addChangelogTaskToRebuild end. BuildId: %s", buildId) + } + err = d.updateMigrationStatus(migrationId, "", "rebuildChangelogs_waiting_builds") + if err != nil { + return err + } + log.Info("Waiting for all builds to finish.") + buildsThisRound := len(buildsMap) + finishedBuilds := 0 + migrationCancelled := false +MigrationProcess: + for len(buildsMap) > 0 { + log.Infof("Finished builds: %v / %v.", finishedBuilds, buildsThisRound) + time.Sleep(15 * time.Second) + buildIdsList := getMapKeysGeneric(buildsMap) + buildEnts, err := d.getBuilds(buildIdsList) + if err != nil { + log.Errorf("Failed to get builds statuses: %v", err.Error()) + return err + } + for _, buildEnt := range buildEnts { + buildVersion := strings.Split(buildEnt.Version, "@")[0] + buildRevision := strings.Split(buildEnt.Version, "@")[1] + buildPackageId := buildEnt.PackageId + + buildRevisionInt := 1 + + mvEnt := mEntity.MigratedVersionEntity{ + PackageId: buildPackageId, + Version: buildVersion, + Revision: buildRevisionInt, + Error: "", + BuildId: buildEnt.BuildId, + MigrationId: migrationId, + BuildType: view.ChangelogType, + } + + if buildRevision != "" { + buildRevisionInt, err = strconv.Atoi(buildRevision) + if err != nil { + mvEnt.Error = fmt.Sprintf("Unable to convert revision value '%s' to int", buildRevision) + _, err = d.cp.GetConnection().Model(&mvEnt).Insert() + if err != nil { + log.Errorf("failed to store MigratedVersionEntity %+v: %s", mvEnt, err) + } + continue + } + mvEnt.Revision = buildRevisionInt + } + + if buildEnt.Status == string(view.StatusComplete) { + finishedBuilds = finishedBuilds + 1 + delete(buildsMap, buildEnt.BuildId) + _, err = d.cp.GetConnection().Model(&mvEnt).Insert() + if err != nil { + log.Errorf("failed to store MigratedVersionEntity %+v: %s", mvEnt, err) + } + continue + } + if buildEnt.Status == string(view.StatusError) { + if buildEnt.Details == CancelledMigrationError { + migrationCancelled = true + break MigrationProcess + } + + finishedBuilds = finishedBuilds + 1 + + errorDetails := buildEnt.Details + if errorDetails == "" { + errorDetails = "No error details.." + } + + delete(buildsMap, buildEnt.BuildId) + + log.Errorf("Builder failed to build %v. Details: %v", buildEnt.BuildId, errorDetails) + + mvEnt.Error = errorDetails + + _, err = d.cp.GetConnection().Model(&mvEnt).Insert() + if err != nil { + log.Errorf("failed to store MigratedVersionEntity %+v: %s", mvEnt, err) + } + continue + } + } + } + log.Info("Finished rebuilding changelogs") + if migrationCancelled { + return fmt.Errorf(CancelledMigrationError) + } + return nil +} + +func (d dbMigrationServiceImpl) rebuildTextSearchTables(migrationId string) error { + err := d.updateMigrationStatus(migrationId, "", "rebuildTextSearchTables_start") + if err != nil { + return err + } + log.Info("Start rebuilding text search tables for changed search scopes") + + log.Info("Calculating ts_rest_operation_data") + calculateRestTextSearchDataQuery := fmt.Sprintf(` + insert into ts_rest_operation_data + select data_hash, + to_tsvector(jsonb_extract_path_text(search_scope, ?)) scope_request, + to_tsvector(jsonb_extract_path_text(search_scope, ?)) scope_response, + to_tsvector(jsonb_extract_path_text(search_scope, ?)) scope_annotation, + to_tsvector(jsonb_extract_path_text(search_scope, ?)) scope_properties, + to_tsvector(jsonb_extract_path_text(search_scope, ?)) scope_examples + from operation_data + where data_hash in ( + select distinct o.data_hash + from operation o + inner join migration."expired_ts_operation_data_%s" exp + on exp.package_id = o.package_id + and exp.version = o.version + and exp.revision = o.revision + where o.type = ? + ) + order by 1 + for update skip locked + on conflict (data_hash) do update + set scope_request = EXCLUDED.scope_request, + scope_response = EXCLUDED.scope_response, + scope_annotation = EXCLUDED.scope_annotation, + scope_properties = EXCLUDED.scope_properties, + scope_examples = EXCLUDED.scope_examples;`, migrationId) + _, err = d.cp.GetConnection().Exec(calculateRestTextSearchDataQuery, + view.RestScopeRequest, view.RestScopeResponse, view.RestScopeAnnotation, view.RestScopeProperties, view.RestScopeExamples, + view.RestApiType) + if err != nil { + return fmt.Errorf("failed to calculate ts_rest_operation_data: %w", err) + } + + log.Info("Calculating ts_graphql_operation_data") + calculateGraphqlTextSearchDataQuery := fmt.Sprintf(` + insert into ts_graphql_operation_data + select data_hash, + to_tsvector(jsonb_extract_path_text(search_scope, ?)) scope_argument, + to_tsvector(jsonb_extract_path_text(search_scope, ?)) scope_property, + to_tsvector(jsonb_extract_path_text(search_scope, ?)) scope_annotation + from operation_data + where data_hash in ( + select distinct o.data_hash + from operation o + inner join migration."expired_ts_operation_data_%s" exp + on exp.package_id = o.package_id + and exp.version = o.version + and exp.revision = o.revision + where o.type = ? + ) + order by 1 + for update skip locked + on conflict (data_hash) do update + set scope_argument = EXCLUDED.scope_argument, + scope_property = EXCLUDED.scope_property, + scope_annotation = EXCLUDED.scope_annotation;`, migrationId) + _, err = d.cp.GetConnection().Exec(calculateGraphqlTextSearchDataQuery, + view.GraphqlScopeArgument, view.GraphqlScopeProperty, view.GraphqlScopeAnnotation, + view.GraphqlApiType) + if err != nil { + return fmt.Errorf("failed to calculate ts_grahpql_operation_data: %w", err) + } + + log.Info("Calculating ts_operation_data") + calculateAllTextSearchDataQuery := fmt.Sprintf(` + insert into ts_operation_data + select data_hash, + to_tsvector(jsonb_extract_path_text(search_scope, ?)) scope_all + from operation_data + where data_hash in ( + select distinct o.data_hash + from operation o + inner join migration."expired_ts_operation_data_%s" exp + on exp.package_id = o.package_id + and exp.version = o.version + and exp.revision = o.revision + ) + order by 1 + for update skip locked + on conflict (data_hash) do update + set scope_all = EXCLUDED.scope_all`, migrationId) + _, err = d.cp.GetConnection().Exec(calculateAllTextSearchDataQuery, view.ScopeAll) + if err != nil { + return fmt.Errorf("failed to calculate ts_operation_data: %w", err) + } + log.Info("Finished rebuilding text search tables for changed search scopes") + err = d.updateMigrationStatus(migrationId, "", "rebuildTextSearchTables_end") + if err != nil { + return err + } + return nil +} diff --git a/qubership-apihub-service/migration/service/OperationsMigrationUtils.go b/qubership-apihub-service/migration/service/OperationsMigrationUtils.go new file mode 100644 index 0000000..5e697b1 --- /dev/null +++ b/qubership-apihub-service/migration/service/OperationsMigrationUtils.go @@ -0,0 +1,654 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "archive/zip" + "bytes" + "context" + "encoding/json" + "fmt" + "net/http" + "os" + "path/filepath" + "regexp" + "strings" + "time" + + "strconv" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + mEntity "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/migration/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + "github.com/go-pg/pg/v10" + "github.com/google/uuid" + log "github.com/sirupsen/logrus" +) + +const MigrationBuildPriority = -100 +const CancelledMigrationError = "cancelled" + +func (d dbMigrationServiceImpl) validateMinRequiredVersion(minRequiredMigrationVersion int) error { + var currentMigration entity.MigrationEntity + err := d.cp.GetConnection().Model(¤tMigration). + First() + + if err != nil { + return err + } + if currentMigration.Dirty { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.MigrationVersionIsDirty, + Message: exception.MigrationVersionIsDirtyMsg, + Params: map[string]interface{}{"currentVersion": currentMigration.Version}, + } + } + if currentMigration.Version < minRequiredMigrationVersion { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.MigrationVersionIsTooLow, + Message: exception.MigrationVersionIsTooLowMsg, + Params: map[string]interface{}{"currentVersion": currentMigration.Version, "requiredVersion": minRequiredMigrationVersion}, + } + } + return nil +} + +func (d dbMigrationServiceImpl) getTypeAndTitleFromPublishedFileData(filename string, checksum string) (string, string) { + fileData := new(entity.PublishedContentDataEntity) + err := d.cp.GetConnection().Model(fileData). + Where("checksum = ?", checksum). + First() + if err != nil { + log.Errorf("failed to get file data by checksum %v", checksum) + } + title := "" + fileType := view.Unknown + if fileData != nil && len(fileData.Data) > 0 { + fileType, title = service.GetContentInfo(filename, &fileData.Data) + } + + if title == "" { + log.Infof("failed to calculate title for %v", checksum) + title = getTitleFromFilename(filename) + } + if fileType == view.Unknown { + log.Infof("file %v has unknown type", filename) + } + return string(fileType), title +} + +func (d dbMigrationServiceImpl) addTaskToRebuild(migrationId string, versionEnt entity.PublishedVersionEntity, noChangelog bool) (string, error) { + buildId := uuid.New().String() + log.Debugf("Start creating task %v to rebuild %v@%v@%v NoChangelog: %v", buildId, versionEnt.PackageId, versionEnt.Version, versionEnt.Revision, noChangelog) + + buildEnt := entity.BuildEntity{ + BuildId: buildId, + Status: string(view.StatusNotStarted), + Details: "", + + PackageId: versionEnt.PackageId, + Version: fmt.Sprintf("%s@%v", versionEnt.Version, versionEnt.Revision), + + CreatedBy: "db migration", + RestartCount: 0, + Priority: MigrationBuildPriority, + Metadata: map[string]interface{}{ + "build_type": view.BuildType, + "previous_version": versionEnt.PreviousVersion, + "previous_version_package_id": versionEnt.PreviousVersionPackageId, + }, + } + + var config, data []byte + var err error + if d.systemInfoService.IsMinioStorageActive() && !d.systemInfoService.IsMinioStoreOnlyBuildResult() { + savedSourcesQuery := ` + select config, archive_checksum + from published_sources + where package_id = ? + and version = ? + and revision = ? + limit 1 + ` + configEntity, err := d.getPublishedSrcDataConfigEntity(savedSourcesQuery, versionEnt.PackageId, versionEnt.Version, versionEnt.Revision) + if err != nil { + return "", err + } + if configEntity.ArchiveChecksum != "" { + file, err := d.minioStorageService.GetFile(context.Background(), view.PUBLISHED_SOURCES_ARCHIVES_TABLE, configEntity.ArchiveChecksum) + if err != nil { + return "", err + } + config = configEntity.Config + data = file + } + } else { + savedSourcesQuery := ` + select psa.checksum as archive_checksum, psa.data, ps.config, ps.package_id + from published_sources_archives psa, published_sources ps + where ps.package_id = ? + and ps.version = ? + and ps.revision = ? + and ps.archive_checksum = psa.checksum + limit 1 + ` + configEntity, err := d.getPublishedSrcDataConfigEntity(savedSourcesQuery, versionEnt.PackageId, versionEnt.Version, versionEnt.Revision) + if err != nil { + return "", err + } + data = configEntity.Data + config = configEntity.Config + } + var buildSourceEnt *entity.BuildSourceEntity + if len(data) > 0 { + buildSourceEnt, err = d.makeBuildSourceEntityFromSources(migrationId, buildId, noChangelog, &versionEnt, config, data) + } else { + buildSourceEnt, err = d.makeBuildSourceEntityFromPublishedFiles(migrationId, buildId, noChangelog, &versionEnt) + } + if err != nil { + return "", err + } + + err = d.storeVersionBuildTask(buildEnt, *buildSourceEnt) + if err != nil { + return "", err + } + + log.Debugf("Created task %v to rebuild %v@%v@%v NoChangelog: %v", buildId, versionEnt.PackageId, versionEnt.Version, versionEnt.Revision, noChangelog) + + return buildId, nil +} + +func (d dbMigrationServiceImpl) getPublishedSrcDataConfigEntity(query, packageId, version string, revision int) (*entity.PublishedSrcDataConfigEntity, error) { + savedSources := new(entity.PublishedSrcDataConfigEntity) + _, err := d.cp.GetConnection().Query(savedSources, query, packageId, version, revision) + if err != nil { + return nil, err + } + return savedSources, nil +} + +func (d dbMigrationServiceImpl) addChangelogTaskToRebuild(migrationId string, changelogEnt mEntity.MigrationChangelogEntity) (string, error) { + config := view.BuildConfig{ + PackageId: changelogEnt.PackageId, + Version: fmt.Sprintf("%s@%d", changelogEnt.Version, changelogEnt.Revision), + PreviousVersionPackageId: changelogEnt.PreviousPackageId, + PreviousVersion: fmt.Sprintf("%s@%d", changelogEnt.PreviousVersion, changelogEnt.PreviousRevision), + BuildType: view.ChangelogType, + CreatedBy: "db migration", + PublishedAt: time.Now(), + MigrationBuild: true, + MigrationId: migrationId, + } + status := view.StatusNotStarted + + buildId := uuid.New().String() + + buildEnt := entity.BuildEntity{ + BuildId: buildId, + Status: string(status), + Details: "", + + PackageId: config.PackageId, + Version: config.Version, + + CreatedBy: config.CreatedBy, + RestartCount: 0, + Priority: MigrationBuildPriority, + Metadata: map[string]interface{}{ + "build_type": config.BuildType, + "previous_version": config.PreviousVersion, + "previous_version_package_id": config.PreviousVersionPackageId, + }, + } + + confAsMap, err := view.BuildConfigToMap(config) + if err != nil { + return "", err + } + + sourceEnt := entity.BuildSourceEntity{ + BuildId: buildEnt.BuildId, + Config: *confAsMap, + } + err = d.storeVersionBuildTask(buildEnt, sourceEnt) + if err != nil { + return "", err + } + + return buildId, nil +} + +func (d dbMigrationServiceImpl) getVersionConfigReferences(packageId string, version string, revision int) ([]view.BCRef, error) { + var refEntities []entity.PublishedReferenceEntity + err := d.cp.GetConnection().Model(&refEntities). + Where("package_id = ?", packageId). + Where("version = ?", version). + Where("revision = ?", revision). + Select() + if err != nil { + return nil, err + } + configRefs := make([]view.BCRef, 0) + for _, refEnt := range refEntities { + configRefs = append(configRefs, view.BCRef{ + RefId: refEnt.RefPackageId, + Version: view.MakeVersionRefKey(refEnt.RefVersion, refEnt.RefRevision), + ParentRefId: refEnt.ParentRefPackageId, + ParentVersion: view.MakeVersionRefKey(refEnt.ParentRefVersion, refEnt.ParentRefRevision), + Excluded: refEnt.Excluded, + }) + } + return configRefs, nil +} + +func (d dbMigrationServiceImpl) makeBuildSourceEntityFromPublishedFiles(migrationId string, buildId string, noChangelog bool, versionEnt *entity.PublishedVersionEntity) (*entity.BuildSourceEntity, error) { + configRefs, err := d.getVersionConfigReferences(versionEnt.PackageId, versionEnt.Version, versionEnt.Revision) + if err != nil { + return nil, err + } + filesWithDataQuery := ` + select rc.*, pd.data as data + from published_version_revision_content rc, published_data pd + where rc.package_id = pd.package_id + and rc.checksum = pd.checksum + and rc.package_id = ? + and rc.version = ? + and rc.revision = ? + ` + var fileEntities []mEntity.PublishedContentMigrationEntity + _, err = d.cp.GetConnection().Query(&fileEntities, filesWithDataQuery, versionEnt.PackageId, versionEnt.Version, versionEnt.Revision) + if err != nil { + return nil, err + } + configFiles := make([]view.BCFile, 0) + + sourcesBuff := bytes.Buffer{} + zw := zip.NewWriter(&sourcesBuff) + for _, fileEnt := range fileEntities { + fw, err := zw.Create(fileEnt.FileId) + if err != nil { + return nil, err + } + _, err = fw.Write(fileEnt.Data) + if err != nil { + return nil, err + } + publish := true + configFiles = append(configFiles, view.BCFile{ + FileId: fileEnt.FileId, + Slug: fileEnt.Slug, + Index: fileEnt.Index, + Labels: fileEnt.Metadata.GetLabels(), + Publish: &publish, + BlobId: fileEnt.Metadata.GetBlobId(), + }) + } + err = zw.Close() + if err != nil { + return nil, err + } + + config := view.BuildConfig{ + PackageId: versionEnt.PackageId, + Version: fmt.Sprintf("%s@%v", versionEnt.Version, versionEnt.Revision), + BuildType: view.BuildType, + PreviousVersion: versionEnt.PreviousVersion, + PreviousVersionPackageId: versionEnt.PreviousVersionPackageId, + Status: versionEnt.Status, + Refs: configRefs, + Files: configFiles, + PublishId: buildId, + Metadata: view.BuildConfigMetadata{ + BranchName: versionEnt.Metadata.GetBranchName(), + RepositoryUrl: versionEnt.Metadata.GetRepositoryUrl(), + CloudName: versionEnt.Metadata.GetCloudName(), + CloudUrl: versionEnt.Metadata.GetCloudUrl(), + Namespace: versionEnt.Metadata.GetNamespace(), + VersionLabels: versionEnt.Labels, + }, + CreatedBy: versionEnt.CreatedBy, + NoChangelog: noChangelog, + PublishedAt: versionEnt.PublishedAt, + MigrationBuild: true, + MigrationId: migrationId, + } + confAsMap, err := view.BuildConfigToMap(config) + if err != nil { + return nil, err + } + + sourceEnt := entity.BuildSourceEntity{ + BuildId: buildId, + Source: sourcesBuff.Bytes(), + Config: *confAsMap, + } + + return &sourceEnt, nil +} + +func (d dbMigrationServiceImpl) makeBuildSourceEntityFromSources(migrationId string, buildId string, noChangelog bool, versionEnt *entity.PublishedVersionEntity, buildConfigData []byte, sourceData []byte) (*entity.BuildSourceEntity, error) { + var buildConfig view.BuildConfig + err := json.Unmarshal(buildConfigData, &buildConfig) + if err != nil { + return nil, fmt.Errorf("failed to unmarshal build config from sources: %v", err.Error()) + } + if len(buildConfig.Files)+len(buildConfig.Refs) == 0 { + return nil, fmt.Errorf("empty build config") + } + if len(sourceData) <= 0 { + return nil, fmt.Errorf("failed to read sources archive for version: %v", *versionEnt) + } + + publishedFilesQuery := ` + select * + from published_version_revision_content + where package_id = ? + and version = ? + and revision = ? + ` + var fileEntities []entity.PublishedContentEntity + _, err = d.cp.GetConnection().Query(&fileEntities, publishedFilesQuery, versionEnt.PackageId, versionEnt.Version, versionEnt.Revision) + if err != nil { + return nil, err + } + publishedFileEntitiesMap := make(map[string]entity.PublishedContentEntity, 0) + for _, fileEnt := range fileEntities { + publishedFileEntitiesMap[fileEnt.FileId] = fileEnt + } + for i, file := range buildConfig.Files { + if file.Publish != nil && *file.Publish == true { + publishedFileEnt, exists := publishedFileEntitiesMap[file.FileId] + if !exists { + return nil, fmt.Errorf("published file %v not found", file.FileId) + } + buildConfig.Files[i].Slug = publishedFileEnt.Slug + buildConfig.Files[i].Index = publishedFileEnt.Index + buildConfig.Files[i].BlobId = publishedFileEnt.Metadata.GetBlobId() + buildConfig.Files[i].Labels = publishedFileEnt.Metadata.GetLabels() + } + } + buildConfig.Refs, err = d.getVersionConfigReferences(versionEnt.PackageId, versionEnt.Version, versionEnt.Revision) + if err != nil { + return nil, err + } + config := view.BuildConfig{ + PackageId: versionEnt.PackageId, + Version: view.MakeVersionRefKey(versionEnt.Version, versionEnt.Revision), + BuildType: view.BuildType, + PreviousVersion: versionEnt.PreviousVersion, + PreviousVersionPackageId: versionEnt.PreviousVersionPackageId, + Status: versionEnt.Status, + Refs: buildConfig.Refs, + Files: buildConfig.Files, + PublishId: buildId, + Metadata: view.BuildConfigMetadata{ + BranchName: versionEnt.Metadata.GetBranchName(), + RepositoryUrl: versionEnt.Metadata.GetRepositoryUrl(), + CloudName: versionEnt.Metadata.GetCloudName(), + CloudUrl: versionEnt.Metadata.GetCloudUrl(), + Namespace: versionEnt.Metadata.GetNamespace(), + VersionLabels: versionEnt.Labels, + }, + CreatedBy: versionEnt.CreatedBy, + NoChangelog: noChangelog, + PublishedAt: versionEnt.PublishedAt, + MigrationBuild: true, + MigrationId: migrationId, + } + + confAsMap, err := view.BuildConfigToMap(config) + if err != nil { + return nil, err + } + + sourceEnt := entity.BuildSourceEntity{ + BuildId: buildId, + Source: sourceData, + Config: *confAsMap, + } + + return &sourceEnt, nil +} +func (d dbMigrationServiceImpl) storeVersionBuildTask(buildEnt entity.BuildEntity, sourceEnt entity.BuildSourceEntity) error { + ctx := context.Background() + return d.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + _, err := tx.Model(&buildEnt).Insert() + if err != nil { + return err + } + _, err = tx.Model(&sourceEnt).Insert() + if err != nil { + return err + } + + return nil + }) +} + +func (d dbMigrationServiceImpl) getBuilds(buildIds []string) ([]entity.BuildEntity, error) { + var result []entity.BuildEntity + if len(buildIds) == 0 { + return nil, nil + } + err := d.cp.GetConnection().Model(&result). + Where("build_id in (?)", pg.In(buildIds)). + Select() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func getMapKeys(m map[string]entity.PublishedVersionEntity) []string { + result := make([]string, 0) + for k := range m { + result = append(result, k) + } + return result +} +func getMapKeysGeneric(m map[string]interface{}) []string { + result := make([]string, 0) + for k := range m { + result = append(result, k) + } + return result +} + +func getTitleFromFilename(filename string) string { + name := strings.TrimSuffix(filename, filepath.Ext(filename)) + return strings.Title(strings.ToLower(name)) +} + +func (d dbMigrationServiceImpl) cleanupEmptyVersions() error { + selectEmptyVersionsQuery := ` + with doccount as ( + select package_id, version, revision, count(*) as cnt from published_version_revision_content as content group by package_id, version, revision + ), refcount as ( + select package_id, version, revision, count(*) as cnt from published_version_reference as refs group by package_id, version, revision + ) + select pv.package_id, pv.version, pv.revision from + published_version pv + left join doccount + on pv.package_id = doccount.package_id + and pv.version = doccount.version + and pv.revision = doccount.revision + left join refcount + on pv.package_id = refcount.package_id + and pv.version = refcount.version + and pv.revision = refcount.revision + where doccount.cnt is null and refcount.cnt is null` + var emptyVersions []entity.PublishedVersionEntity + _, err := d.cp.GetConnection().Query(&emptyVersions, selectEmptyVersionsQuery) + if err != nil { + log.Errorf("Failed to read empty versions: %v", err.Error()) + return err + } + for _, ver := range emptyVersions { + deleteFromBuildDebug := "delete from published_version where package_id = '" + ver.PackageId + "' and version='" + ver.Version + "' and revision=" + strconv.Itoa(ver.Revision) + _, err = d.cp.GetConnection().Exec(deleteFromBuildDebug) + if err != nil { + return err + } + } + return nil +} + +func (d dbMigrationServiceImpl) cleanForRebuild(packageIds []string, versions []string, buildType string) error { + deleteQuery := "delete from migrated_version where 1=1 " + + if len(packageIds) > 0 { + var wherePackageIn = " and package_id in (" + for i, pkg := range packageIds { + if i > 0 { + wherePackageIn += "," + } + wherePackageIn += fmt.Sprintf("'%s'", pkg) + } + wherePackageIn += ") " + + deleteQuery += wherePackageIn + } + + var whereVersionIn string + if len(versions) > 0 { + whereVersionIn = " and version in (" + for i, ver := range versions { + if i > 0 { + whereVersionIn += "," + } + verSplit := strings.Split(ver, "@") + whereVersionIn += fmt.Sprintf("'%s'", verSplit[0]) + } + whereVersionIn += ") " + deleteQuery += whereVersionIn + } + + if buildType != "" { + deleteQuery += fmt.Sprintf(" and build_type = '%s'", buildType) + } + + _, err := d.cp.GetConnection().Exec(deleteQuery) + if err != nil { + return err + } + + // deleteFromBuildDebug := `delete from build where created_by = 'db migration'` + // _, err = d.cp.GetConnection().Exec(deleteFromBuildDebug) + // if err != nil { + // return err + // } + + return nil +} + +func (d dbMigrationServiceImpl) createMigrationTables() error { + versionMigrationTable := ` + create table if not exists migrated_version ( + package_id varchar, + version varchar, + revision int, + error varchar, + build_id varchar, + migration_id varchar, + build_type varchar, + no_changelog bool + ); + + alter table migrated_version add column if not exists build_type varchar; + alter table migrated_version add column if not exists no_changelog bool; + + create table if not exists migration_run ( + id varchar, + started_at timestamp without time zone, + status varchar, + stage varchar, + package_ids varchar[], + versions varchar[], + is_rebuild bool, + is_rebuild_changelog_only bool, + current_builder_version varchar, + error_details varchar, + finished_at timestamp without time zone, + updated_at timestamp without time zone + ); + + alter table migration_run add column if not exists is_rebuild_changelog_only bool; + ` + + _, err := d.cp.GetConnection().Exec(versionMigrationTable) + if err != nil { + return err + } + return nil +} + +var downMigrationFileRegexp = regexp.MustCompile(`^[0-9]+_.+\.down\.sql$`) +var upMigrationFileRegexp = regexp.MustCompile(`^[0-9]+_.+\.up\.sql$`) + +func (d *dbMigrationServiceImpl) getMigrationFilenamesMap() (map[int]string, map[int]string, error) { + folder, err := os.Open(d.migrationsFolder) + if err != nil { + return nil, nil, err + } + defer folder.Close() + fileNames, err := folder.Readdirnames(-1) + if err != nil { + return nil, nil, err + } + upMigrations := make(map[int]string, 0) + downMigrations := make(map[int]string, 0) + maxUpMigrationNumber := -1 + for _, file := range fileNames { + if upMigrationFileRegexp.MatchString(file) { + num, _ := strconv.Atoi(strings.Split(file, `_`)[0]) + if _, exists := upMigrations[num]; exists { + return nil, nil, fmt.Errorf("found duplicate migration number, migration is not possible: %v", file) + } + upMigrations[num] = d.migrationsFolder + "/" + file + if maxUpMigrationNumber < num { + maxUpMigrationNumber = num + } + } + if downMigrationFileRegexp.MatchString(file) { + num, _ := strconv.Atoi(strings.Split(file, `_`)[0]) + if _, exists := downMigrations[num]; exists { + return nil, nil, fmt.Errorf("found duplicate migration number, migration is not possible: %v", file) + } + downMigrations[num] = d.migrationsFolder + "/" + file + } + } + if maxUpMigrationNumber != len(upMigrations) { + return nil, nil, fmt.Errorf("highest migration number (%v) should be equal to a total number of migrations (%v)", maxUpMigrationNumber, len(upMigrations)) + } + for num := range downMigrations { + if _, exists := upMigrations[num]; !exists { + return nil, nil, fmt.Errorf("down migration '%v' doesn't belong to any of up migrations", downMigrations[num]) + } + } + return upMigrations, downMigrations, nil +} + +func calculateMigrationHash(migrationNum int, data []byte) string { + return utils.GetEncodedChecksum([]byte(strconv.Itoa(migrationNum)), data) +} diff --git a/qubership-apihub-service/migration/service/QueryUtils.go b/qubership-apihub-service/migration/service/QueryUtils.go new file mode 100644 index 0000000..ceca710 --- /dev/null +++ b/qubership-apihub-service/migration/service/QueryUtils.go @@ -0,0 +1,257 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "fmt" + "strings" + + "github.com/go-pg/pg/v10" + + log "github.com/sirupsen/logrus" +) + +func makeLatestIndependentVersionsQuery(packageIds []string, versionsIn []string) string { + var wherePackageIn string + if len(packageIds) > 0 { + wherePackageIn = " and package_id in (" + for i, pkg := range packageIds { + if i > 0 { + wherePackageIn += "," + } + wherePackageIn += fmt.Sprintf("'%s'", pkg) + } + wherePackageIn += ") " + } + + var whereVersionIn string + if len(versionsIn) > 0 { + whereVersionIn = " and version in (" + for i, ver := range versionsIn { + if i > 0 { + whereVersionIn += "," + } + verSplit := strings.Split(ver, "@") + whereVersionIn += fmt.Sprintf("'%s'", verSplit[0]) + } + whereVersionIn += ") " + } + + getLatestIndependentVersionsQuery := ` + with maxrev as ( + select package_id, version, max(revision) as revision + from published_version where deleted_at is null ` + + if wherePackageIn != "" { + getLatestIndependentVersionsQuery += wherePackageIn + } + if whereVersionIn != "" { + getLatestIndependentVersionsQuery += whereVersionIn + } + + getLatestIndependentVersionsQuery += + ` group by package_id, version + ) + select pv.* from + published_version pv + inner join maxrev + on pv.package_id = maxrev.package_id + and pv.version = maxrev.version + and pv.revision = maxrev.revision + and pv.deleted_at is null + inner join package_group pkg on pv.package_id = pkg.id + where + (pv.previous_version is null + or ( + exists( + select 1 from published_version ppv inner join package_group ppg on + (CASE WHEN (pv.previous_version_package_id IS NULL OR pv.previous_version_package_id = '') THEN pv.package_id ELSE pv.previous_version_package_id END) = ppg.id + where pv.previous_version=ppv.version and (ppv.deleted_at is not null or ppg.deleted_at is not null) + ) + and exists( + select 1 from migrated_version mv + inner join maxrev + on (CASE WHEN (pv.previous_version_package_id IS NULL OR pv.previous_version_package_id = '') THEN pv.package_id ELSE pv.previous_version_package_id END) = maxrev.package_id + and pv.previous_version = maxrev.version + where mv.version = pv.previous_version + and mv.revision = maxrev.revision /* prev version max rev */ + and mv.build_type = 'build' + and mv.error is null + and (CASE WHEN (pv.previous_version_package_id IS NULL OR pv.previous_version_package_id = '') THEN pv.package_id ELSE pv.previous_version_package_id END) = mv.package_id + ) + ) + ) + and not exists( + select 1 from migrated_version mv + where mv.version = pv.version + and mv.package_id = pv.package_id + and mv.revision = pv.revision + and mv.build_type = 'build' + ) and pkg.deleted_at is null + ` + return getLatestIndependentVersionsQuery +} + +func makeNotLatestVersionsQuery(packageIds []string, versionsIn []string) string { + var wherePackageIn string + if len(packageIds) > 0 { + wherePackageIn = " and package_id in (" + for i, pkg := range packageIds { + if i > 0 { + wherePackageIn += "," + } + wherePackageIn += fmt.Sprintf("'%s'", pkg) + } + wherePackageIn += ") " + } + + var whereVersionIn string + if len(versionsIn) > 0 { + whereVersionIn = " and version in (" + for i, ver := range versionsIn { + if i > 0 { + whereVersionIn += "," + } + verSplit := strings.Split(ver, "@") + whereVersionIn += fmt.Sprintf("'%s'", verSplit[0]) + } + whereVersionIn += ") " + } + + getNotLatestVersionsQuery := ` + with maxrev as ( + select package_id, version, max(revision) as revision + from published_version where deleted_at is null ` + + if wherePackageIn != "" { + getNotLatestVersionsQuery += wherePackageIn + log.Infof("wherePackageIn=%s", wherePackageIn) + } + if whereVersionIn != "" { + getNotLatestVersionsQuery += whereVersionIn + log.Infof("whereVersionIn=%s", whereVersionIn) + } + + getNotLatestVersionsQuery += + ` group by package_id, version + ) + select pv.* from + published_version pv + inner join package_group pkg on pv.package_id = pkg.id + where pv.deleted_at is null + and not exists( + select 1 from migrated_version mv + where mv.version = pv.version + and mv.package_id = pv.package_id + and mv.revision = pv.revision + and mv.build_type = 'build' + ) + and exists( + select 1 from migrated_version mv + inner join maxrev + on (CASE WHEN (pv.previous_version_package_id IS NULL OR pv.previous_version_package_id = '') THEN pv.package_id ELSE pv.previous_version_package_id END) = maxrev.package_id + and pv.previous_version = maxrev.version + where mv.version = pv.previous_version + and mv.revision = maxrev.revision /* prev version max rev */ + and mv.build_type = 'build' + and mv.error is null + and (CASE WHEN (pv.previous_version_package_id IS NULL OR pv.previous_version_package_id = '') THEN pv.package_id ELSE pv.previous_version_package_id END) = mv.package_id + ) + and pkg.deleted_at is null` + + return getNotLatestVersionsQuery +} + +func makeAllChangelogForMigrationQuery(packageIds, versions []string) string { + query := `SELECT distinct package_id, version, revision, previous_package_id, previous_version, previous_revision + FROM version_comparison + WHERE package_id != '' + and version != '' and previous_version != ''` + if len(packageIds) != 0 || len(versions) != 0 { + var wherePackageIn string + if len(packageIds) > 0 { + wherePackageIn = " and package_id in (" + for i, pkg := range packageIds { + if i > 0 { + wherePackageIn += "," + } + wherePackageIn += fmt.Sprintf("'%s'", pkg) + } + wherePackageIn += ") " + } + + var whereVersionIn string + if len(versions) > 0 { + whereVersionIn = " and version in (" + for i, ver := range versions { + if i > 0 { + whereVersionIn += "," + } + verSplit := strings.Split(ver, "@") + whereVersionIn += fmt.Sprintf("'%s'", verSplit[0]) + } + whereVersionIn += ") " + } + if wherePackageIn != "" { + query += wherePackageIn + log.Infof("wherePackageIn=%s", wherePackageIn) + } + if whereVersionIn != "" { + query += whereVersionIn + log.Infof("whereVersionIn=%s", whereVersionIn) + } + } + query += " order by package_id, version, revision, previous_package_id, previous_version, previous_revision" + return query +} + +func makeChangelogByMigratedVersionQuery(migrationId string) string { + query := `SELECT distinct c.package_id, c.version, c.revision, c.previous_package_id, c.previous_version, c.previous_revision FROM version_comparison as c + inner join migrated_version mv + on c.package_id = mv.package_id + and c.version = mv.version + and c.revision = mv.revision + and mv.build_type = 'build' + and mv.no_changelog = true + where c.version != '' and c.previous_version != ''` + + if migrationId != "" { + query += fmt.Sprintf(" and mv.migration_id = '%s'", migrationId) + } + + query += ` order by c.package_id,c.version,c.revision,c.previous_package_id,c.previous_version,c.previous_revision` + return query +} + +const retryAttemptsCount = 5 + +func queryWithRetry(conn *pg.DB, model, query interface{}, params ...interface{}) (pg.Result, error) { + var err error + var result pg.Result + attempts := retryAttemptsCount + for attempts >= 0 { + result, err = conn.Query(model, query, params) + if err != nil { + if strings.Contains(err.Error(), "connection pool timeout") { + attempts-- + continue + } else { + return result, err + } + } + return result, nil + } + return nil, fmt.Errorf("queryWithRetry: %d attempts failed: %w", retryAttemptsCount, err) +} diff --git a/qubership-apihub-service/migration/service/SoftMigrations.go b/qubership-apihub-service/migration/service/SoftMigrations.go new file mode 100644 index 0000000..d30f7c2 --- /dev/null +++ b/qubership-apihub-service/migration/service/SoftMigrations.go @@ -0,0 +1,407 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "crypto/sha1" + "encoding/hex" + "fmt" + "strconv" + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/go-pg/pg/v10" + "github.com/gosimple/slug" + log "github.com/sirupsen/logrus" +) + +const typeAndTitleMigrationVersion = 28 +const searchTablesMigrationVersion = 35 +const filesOperationsMigrationVersion = 57 +const groupToDashboardVersion = 100 +const personalWorkspaces = 102 +const draftBlobIds = 133 + +// SoftMigrateDb The function implements migrations that can't be made via SQL query. +// Executes only required migrations based on current vs new versions. +func (d dbMigrationServiceImpl) SoftMigrateDb(currentVersion int, newVersion int, migrationRequired bool) error { + if (currentVersion < typeAndTitleMigrationVersion && typeAndTitleMigrationVersion <= newVersion) || + (migrationRequired && typeAndTitleMigrationVersion == currentVersion && typeAndTitleMigrationVersion == newVersion) { + //async migration because it could take a lot of time to execute + utils.SafeAsync(func() { + err := d.updateTypeAndTitleForPublishedFiles() + if err != nil { + log.Error(err) + } + }) + } + if (currentVersion < searchTablesMigrationVersion && searchTablesMigrationVersion <= newVersion) || + (migrationRequired && searchTablesMigrationVersion == currentVersion && searchTablesMigrationVersion == newVersion) { + //async migration because it could take a lot of time to execute + utils.SafeAsync(func() { + err := d.fillTextSearchTables() + if err != nil { + log.Error(err) + } + }) + } + + if currentVersion < groupToDashboardVersion && groupToDashboardVersion <= newVersion || + (migrationRequired && groupToDashboardVersion == currentVersion && groupToDashboardVersion == newVersion) { + //async migration because it could take a lot of time to execute + utils.SafeAsync(func() { + err := d.groupsToDashboards() + if err != nil { + log.Error(err) + } + }) + } + + if currentVersion < personalWorkspaces && personalWorkspaces <= newVersion || + (migrationRequired && personalWorkspaces == currentVersion && personalWorkspaces == newVersion) { + //async migration because it could take a lot of time to execute + utils.SafeAsync(func() { + err := d.generatePersonalWorkspaceIds() + if err != nil { + log.Error(err) + } + }) + } + if currentVersion < draftBlobIds && draftBlobIds <= newVersion || + (migrationRequired && draftBlobIds == currentVersion && draftBlobIds == newVersion) { + //async migration because it could take a lot of time to execute + utils.SafeAsync(func() { + err := d.calculateDraftBlobIds() + if err != nil { + log.Error(err) + } + }) + } + + return nil +} + +func (d dbMigrationServiceImpl) updateTypeAndTitleForPublishedFiles() error { + err := d.validateMinRequiredVersion(typeAndTitleMigrationVersion) + if err != nil { + return err + } + allPublishedFiles := make([]entity.PublishedContentEntity, 0) + err = d.cp.GetConnection().Model(&allPublishedFiles). + Select() + if err != nil { + if err == pg.ErrNoRows { + return nil + } + return err + } + total := len(allPublishedFiles) + log.Infof("start calculating type and title for published files") + for i, fileEnt := range allPublishedFiles { + log.Infof("[%v / %v] calculating type and title..", i, total) + fileType, title := d.getTypeAndTitleFromPublishedFileData(fileEnt.Name, fileEnt.Checksum) + if title == "" { + title = getTitleFromFilename(fileEnt.Name) + } + fileEntTmp := fileEnt + fileEntTmp.Title = title + fileEntTmp.DataType = fileType + _, err = d.cp.GetConnection().Model(&fileEntTmp). + Column("title", "data_type"). + WherePK(). + Update() + if err != nil { + log.Errorf("failed to calculate type or title for file %v with checksum %v: %v", fileEnt.FileId, fileEnt.Checksum, err.Error()) + } + } + log.Infof("finished calculating type and title for published files") + return nil +} + +func (d dbMigrationServiceImpl) fillTextSearchTables() error { + err := d.validateMinRequiredVersion(searchTablesMigrationVersion) + if err != nil { + return err + } + + var filesToCalculate []entity.PublishedContentEntity + + //select files from not deleted versions that have no entries in any of ts_ tables + filesWithoutTextSearchDataQuery := ` + with maxrev as + ( + select package_id, version, max(revision) as revision + from published_version + group by package_id, version + ) + select vc.* from published_version_revision_content vc + inner join maxrev + on maxrev.package_id = vc.package_id + and maxrev.version = vc.version + and maxrev.revision = vc.revision + inner join published_version pv + on pv.package_id = vc.package_id + and pv.version = vc.version + and pv.revision = vc.revision + and pv.deleted_at is null + where vc.data_type != 'unknown' + and vc.checksum not in( + select t.checksum from ts_published_data_path_split t where t.package_id = vc.package_id + union + select t.checksum from ts_published_data_custom_split t where t.package_id = vc.package_id + union + select t.checksum from ts_published_data_errors t where t.package_id = vc.package_id + );` + _, err = d.cp.GetConnection().Query(&filesToCalculate, filesWithoutTextSearchDataQuery) + if err != nil { + if err == pg.ErrNoRows { + return nil + } + return err + } + //splits each lexem by / + //(e.g. lexem /api-v1/customerTroubleTicket_final will be split into 2 lexems: api-v1, customerTroubleTicket_final) + insertTextSearchPathSplitQuery := ` + insert into ts_published_data_path_split(package_id, checksum, search_vector) + select package_id, checksum, to_tsvector(replace(convert_from(data, 'UTF-8'),'/',' ')) as search_vector + from published_data + where package_id = ? and checksum = ? + on conflict (package_id, checksum) do update set search_vector = excluded.search_vector;` + + //splits each lexem by / and then by '-', '_' and capital letters + //(e.g. lexem /api-v1/customerTroubleTicket_final will be split into 6 lexems: api, v1, customer, trouble, ticket, final) + insertTextSearchCustomSplitQuery := ` + insert into ts_published_data_custom_split(package_id, checksum, search_vector) + select package_id, checksum, to_tsvector(regexp_replace(replace(replace(replace(convert_from(data, 'UTF-8'),'/',' '),'-',' '),'_',' '), '([A-Z])', ' \1', 'g')) as search_vector + from published_data + where package_id = ? and checksum = ? + on conflict (package_id, checksum) do update set search_vector = excluded.search_vector;` + + insertTextSearchErrorQuery := ` + insert into ts_published_data_errors(package_id, checksum, error) + values (?, ?, ?) + on conflict (package_id, checksum) do update set error = excluded.error;` + + total := len(filesToCalculate) + log.Infof("start filling text search tables") + for i, file := range filesToCalculate { + log.Infof("[%v / %v] calculating lexems..", i, total) + _, err := d.cp.GetConnection().Exec(insertTextSearchPathSplitQuery, file.PackageId, file.Checksum) + if err != nil { + log.Warnf("Failed to store text search path split vector for '%v' in version %v: %v", file.FileId, file.Version, err.Error()) + _, err = d.cp.GetConnection().Exec(insertTextSearchErrorQuery, file.PackageId, file.Checksum, err.Error()) + if err != nil { + log.Errorf("Failed to store error for '%v' in version %v: %v", file.FileId, file.Version, err.Error()) + continue + } + } + _, err = d.cp.GetConnection().Exec(insertTextSearchCustomSplitQuery, file.PackageId, file.Checksum) + if err != nil { + log.Warnf("Failed to store text search custom split vector for '%v' in version %v: %v", file.FileId, file.Version, err.Error()) + _, err = d.cp.GetConnection().Exec(insertTextSearchErrorQuery, file.PackageId, file.Checksum, err.Error()) + if err != nil { + log.Errorf("Failed to store error for '%v' in version %v: %v", file.FileId, file.Version, err.Error()) + continue + } + } + } + log.Infof("finished filling text search tables") + return nil +} + +// Actually it's custom code for agent +func (d dbMigrationServiceImpl) groupsToDashboards() error { + selectNamespaceGroupsQuery := "select distinct pv.package_id from published_version pv inner join package_group pg on pv.package_id=pg.id where pv.package_id ilike 'QS.RUNENV.%' and pg.kind='group'" + + nsGroupIds := make([]string, 0) + + _, err := d.cp.GetConnection().Query(&nsGroupIds, selectNamespaceGroupsQuery) + if err != nil { + if err == pg.ErrNoRows { + return nil + } + return err + } + if len(nsGroupIds) == 0 { + return nil + } + + insertDashboardQuery := "insert into package_group (id, kind, name, alias, parent_id, image_url, description, deleted_at, created_at, created_by, deleted_by, default_role, default_released_version, service_name, release_version_pattern, exclude_from_search, rest_grouping_prefix) " + + "values (?, 'dashboard', 'snapshot', ?, ?, '', '', null, ?, 'db migration', '', 'viewer', '', '', '', true, '') on conflict(id) do nothing;" + + log.Infof("groupsToDashboards: creating dashboards and moving data") + total := len(nsGroupIds) + for i, id := range nsGroupIds { + if (i+1)%10 == 0 { + log.Infof("groupsToDashboards: processed %d/%d", i+1, total) + } + + dashAlias := "SNAPSHOT-DASH" + dashId := id + "." + dashAlias + _, err := d.cp.GetConnection().Exec(insertDashboardQuery, dashId, dashAlias, id, time.Now()) + if err != nil { + return fmt.Errorf("failed to create snapshot dashboard %s", dashId) + } + + _, err = d.transitionRepository.MoveAllData(id, dashId) + if err != nil { + return fmt.Errorf("failed to move data from group %s to dashboard %s: %w", id, dashId, err) + } + } + log.Infof("groupsToDashboards: done") + return nil +} + +func (d dbMigrationServiceImpl) generatePersonalWorkspaceIds() error { + log.Info("Start generating personal workspace ids") + getAllUsersQuery := `select * from user_data` + userEnts := make([]entity.UserEntity, 0) + _, err := d.cp.GetConnection().Query(&userEnts, getAllUsersQuery) + if err != nil { + return fmt.Errorf("failed to get all users: %w", err) + } + for _, user := range userEnts { + privatePackageId, err := d.generateUserPrivatePackageId(user.Id) + if err != nil { + return fmt.Errorf("failed to generate privatePackageId for user: %w", err) + } + _, err = d.cp.GetConnection().Exec(`update user_data set private_package_id = ? where user_id = ?`, privatePackageId, user.Id) + if err != nil { + return fmt.Errorf("failed to update private_package_id for user: %w", err) + } + } + log.Info("Successfully generated personal workspace ids") + return nil +} + +func (d dbMigrationServiceImpl) generateUserPrivatePackageId(userId string) (string, error) { + userIdSlug := slug.Make(userId) + privatePackageId := userIdSlug + privatePackageIdTaken, err := d.privatePackageIdExists(privatePackageId, userId) + if err != nil { + return "", err + } + i := 1 + for privatePackageIdTaken { + privatePackageId = userIdSlug + "-" + strconv.Itoa(i) + privatePackageIdTaken, err = d.privatePackageIdExists(privatePackageId, userId) + if err != nil { + return "", err + } + i++ + } + packageEnt, err := d.getPackageIncludingDeleted(privatePackageId) + if err != nil { + return "", err + } + for packageEnt != nil { + i++ + privatePackageId = userIdSlug + "-" + strconv.Itoa(i) + packageEnt, err = d.getPackageIncludingDeleted(privatePackageId) + if err != nil { + return "", err + } + } + return privatePackageId, nil +} + +func (d dbMigrationServiceImpl) privatePackageIdExists(privatePackageId string, excludedUserId string) (bool, error) { + userEnt := new(entity.UserEntity) + err := d.cp.GetConnection().Model(userEnt). + Where("private_package_id = ?", privatePackageId). + Where("user_id != ?", excludedUserId). + First() + if err != nil { + if err == pg.ErrNoRows { + return false, nil + } + return false, err + } + return userEnt.PrivatePackageId == privatePackageId, nil +} + +func (d dbMigrationServiceImpl) getPackageIncludingDeleted(id string) (*entity.PackageEntity, error) { + result := new(entity.PackageEntity) + err := d.cp.GetConnection().Model(result). + Where("id = ?", id). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (d dbMigrationServiceImpl) calculateDraftBlobIds() error { + log.Info("Start calculating blobIds for all draft files") + type draftFile struct { + ProjectId string `pg:"project_id"` + BranchName string `pg:"branch_name"` + FileId string `pg:"file_id"` + Status string `pg:"status"` + Data string `pg:"data"` + } + getDraftFiles := ` + select + project_id, + branch_name, + file_id, + status, + data + from branch_draft_content + where coalesce(commit_id, '') != '' + and data is not null + and coalesce(blob_id, '') = '' + order by project_id, branch_name, file_id + limit ?; + ` + updateDraftFileBlobId := ` + update branch_draft_content set blob_id = ? + where project_id = ? + and branch_name = ? + and file_id = ? + ` + limit := 10 + for { + draftFilesWithCommitId := make([]draftFile, 0) + _, err := d.cp.GetConnection().Query(&draftFilesWithCommitId, getDraftFiles, limit) + if err != nil { + return fmt.Errorf("failed to get draft files with commitId: %w", err) + } + for _, draftFile := range draftFilesWithCommitId { + _, err := d.cp.GetConnection().Exec(updateDraftFileBlobId, calculateGitBlobId(draftFile.Data), draftFile.ProjectId, draftFile.BranchName, draftFile.FileId) + if err != nil { + return fmt.Errorf("failed to get draft files with commitId: %w", err) + } + } + if len(draftFilesWithCommitId) < limit { + break + } + } + + log.Info("Successfully calculated new blobIds") + return nil +} + +func calculateGitBlobId(s string) string { + p := fmt.Sprintf("blob %d\x00", len(s)) + h := sha1.New() + h.Write([]byte(p)) + h.Write([]byte(s)) + return hex.EncodeToString(h.Sum([]byte(nil))) +} diff --git a/qubership-apihub-service/migration/view/report.go b/qubership-apihub-service/migration/view/report.go new file mode 100644 index 0000000..5df5e37 --- /dev/null +++ b/qubership-apihub-service/migration/view/report.go @@ -0,0 +1,62 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +import "time" + +type MigrationReport struct { + Status string `json:"status"` + StartedAt time.Time `json:"startedAt"` + FinishedAt *time.Time `json:"finishedAt,omitempty"` + ElapsedTime string `json:"elapsedTime"` + SuccessBuildsCount int `json:"successBuildsCount,omitempty"` + ErrorBuildsCount int `json:"errorBuildsCount,omitempty"` + SuspiciousBuildsCount int `json:"suspiciousBuildsCount,omitempty"` + ErrorBuilds []MigrationError `json:"errorBuilds,omitempty"` + MigrationChanges []MigrationChange `json:"migrationChanges,omitempty"` +} + +type MigrationError struct { + PackageId string `json:"packageId,omitempty"` + Version string `json:"version,omitempty"` + Revision int `json:"revision,omitempty"` + Error string `json:"error,omitempty"` + BuildId string `json:"buildId"` + BuildType string `json:"buildType,omitempty"` + PreviousVersion string `json:"previousVersion,omitempty"` + PreviousVersionPackageId string `json:"previousVersionPackageId,omitempty"` +} + +type MigrationChange struct { + ChangedField string `json:"changedField"` + AffectedBuildsCount int `json:"affectedBuildsCount"` + AffectedBuildSample *SuspiciousMigrationBuild `json:"affectedBuildSample,omitempty"` +} + +type SuspiciousMigrationBuild struct { + PackageId string `json:"packageId,omitempty"` + Version string `json:"version,omitempty"` + Revision int `json:"revision,omitempty"` + BuildId string `json:"buildId"` + Changes map[string]interface{} `json:"changes"` + BuildType string `json:"buildType"` + PreviousVersion string `json:"previousVersion,omitempty"` + PreviousVersionPackageId string `json:"previousVersionPackageId,omitempty"` +} + +const MigrationStatusRunning = "running" +const MigrationStatusComplete = "complete" +const MigrationStatusFailed = "failed" +const MigrationStatusCancelled = "cancelled" diff --git a/qubership-apihub-service/migration/view/request.go b/qubership-apihub-service/migration/view/request.go new file mode 100644 index 0000000..b72ad30 --- /dev/null +++ b/qubership-apihub-service/migration/view/request.go @@ -0,0 +1,24 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type MigrationRequest struct { + PackageIds []string `json:"packageIds,omitempty"` + Versions []string `json:"versions,omitempty"` + Rebuild bool `json:"rebuild,omitempty"` + CurrentBuilderVersion string `json:"currentBuilderVersion,omitempty"` + RebuildChangelogOnly bool `json:"rebuildChangelogOnly"` + SkipValidation bool `json:"skipValidation"` +} diff --git a/qubership-apihub-service/ot/LICENSE b/qubership-apihub-service/ot/LICENSE new file mode 100644 index 0000000..4dd9296 --- /dev/null +++ b/qubership-apihub-service/ot/LICENSE @@ -0,0 +1,22 @@ +Copyright (c) 2019 Martin Schnabel. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/qubership-apihub-service/ot/README.md b/qubership-apihub-service/ot/README.md new file mode 100644 index 0000000..5b1da83 --- /dev/null +++ b/qubership-apihub-service/ot/README.md @@ -0,0 +1,14 @@ +ot +== + +ot is a minimal operational-transformation library for colaborative editing. + +It was extracted from my long abondoned https://github.com/mb0/lab project and modified to operate +on slices of unicode runes. The original code was based on +https://github.com/Operational-Transformation/ot.js by Tim Baumann (MIT License). + +License +------- + +Copyright (c) 2019 Martin Schnabel. All rights reserved. +Use of the source code is governed by a BSD-style license that can found in the LICENSE file. diff --git a/qubership-apihub-service/ot/client.go b/qubership-apihub-service/ot/client.go new file mode 100644 index 0000000..c967417 --- /dev/null +++ b/qubership-apihub-service/ot/client.go @@ -0,0 +1,79 @@ +// Copyright 2014 Martin Schnabel. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ot + +import "fmt" + +// Client represent a client document with synchronization mechanisms. +// The client has three states: +// 1. A synchronized client sends applied ops immediately and … +// 2. waits for an acknowledgement from the server, meanwhile buffering applied ops. +// 3. The buffer is composed with new ops and sent immediately when the pending ack arrives. +type Client struct { + Doc *Doc // the document + Rev int // last acknowledged revision + Wait Ops // pending ops or nil + Buf Ops // buffered ops or nil + // Send is called when a new revision can be sent to the server. + Send func(rev int, ops Ops) +} + +// Apply applies ops to the document and buffers or sends the server update. +// An error is returned if the ops could not be applied. +func (c *Client) Apply(ops Ops) error { + var err error + if err = c.Doc.Apply(ops); err != nil { + return err + } + switch { + case c.Buf != nil: + if c.Buf, err = Compose(c.Buf, ops); err != nil { + return err + } + case c.Wait != nil: + c.Buf = ops + default: + c.Wait = ops + c.Send(c.Rev, ops) + } + return nil +} + +// Ack acknowledges a pending server update and sends buffered updates if any. +// An error is returned if no update is pending. +func (c *Client) Ack() error { + switch { + case c.Buf != nil: + c.Send(c.Rev+1, c.Buf) + c.Wait, c.Buf = c.Buf, nil + case c.Wait != nil: + c.Wait = nil + default: + return fmt.Errorf("no pending operation") + } + c.Rev++ + return nil +} + +// Recv receives server updates originating from other participants. +// An error is returned if the server update could not be applied. +func (c *Client) Recv(ops Ops) error { + var err error + if c.Wait != nil { + if ops, c.Wait, err = Transform(ops, c.Wait); err != nil { + return err + } + } + if c.Buf != nil { + if ops, c.Buf, err = Transform(ops, c.Buf); err != nil { + return err + } + } + if err = c.Doc.Apply(ops); err != nil { + return err + } + c.Rev++ + return nil +} diff --git a/qubership-apihub-service/ot/client_test.go b/qubership-apihub-service/ot/client_test.go new file mode 100644 index 0000000..e37f945 --- /dev/null +++ b/qubership-apihub-service/ot/client_test.go @@ -0,0 +1,76 @@ +// Copyright 2014 Martin Schnabel. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ot + +import "testing" + +func TestClient(t *testing.T) { + var sent []Ops + doc := NewDocFromStr("old!") + c := &Client{Doc: doc, Send: func(rev int, ops Ops) { + sent = append(sent, ops) + }} + a := Ops{{S: "g"}, {N: 4}} + err := c.Apply(a) + if err != nil { + t.Error(err) + } + if s := doc.String(); s != "gold!" { + t.Errorf(`expected "gold!" got %q`, s) + } + if !a.Equal(c.Wait) || !a.Equal(sent[0]) { + t.Error("expected waiting for ack") + } + b := Ops{{N: 2}, {N: -2}, {N: 1}} + err = c.Apply(b) + if err != nil { + t.Error(err) + } + if s := doc.String(); s != "go!" { + t.Errorf(`expected "go!" got %q`, s) + } + if !b.Equal(c.Buf) || len(sent) != 1 { + t.Error("expected buffering") + } + err = c.Apply(Ops{{N: 2}, {S: " cool"}, {N: 1}}) + if err != nil { + t.Error(err) + } + if s := doc.String(); s != "go cool!" { + t.Errorf(`expected "go cool!" got %q`, s) + } + cb := Ops{{N: 2}, {N: -2}, {S: " cool"}, {N: 1}} + if !cb.Equal(c.Buf) || len(sent) != 1 { + t.Error("expected combinig buffer") + } + err = c.Recv(Ops{{N: 1}, {S: " is"}, {N: 3}}) + if err != nil { + t.Error(err) + } + if s := doc.String(); s != "go is cool!" { + t.Errorf(`expected "go is cool!" got %q`, s) + } + if !c.Wait.Equal(Ops{{S: "g"}, {N: 7}}) { + t.Error("expected transform wait", c.Wait) + } + cb = Ops{{N: 5}, {N: -2}, {S: " cool"}, {N: 1}} + if !c.Buf.Equal(cb) { + t.Error("expected transform buf", c.Buf) + } + err = c.Ack() + if err != nil { + t.Error(err) + } + if c.Buf != nil || !cb.Equal(c.Wait) || len(sent) != 2 || !cb.Equal(sent[1]) { + t.Error("expected flushing buffer") + } + err = c.Ack() + if err != nil { + t.Error(err) + } + if c.Buf != nil || c.Wait != nil || len(sent) != 2 { + t.Error("expected flushed") + } +} diff --git a/qubership-apihub-service/ot/doc.go b/qubership-apihub-service/ot/doc.go new file mode 100644 index 0000000..b719980 --- /dev/null +++ b/qubership-apihub-service/ot/doc.go @@ -0,0 +1,214 @@ +// Copyright 2014 Martin Schnabel. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ot + +import ( + "bufio" + "bytes" + "fmt" + "io" + "unicode/utf8" +) + +var Zero Pos + +type Pos struct { + Index int + Line int + Offset int +} + +func (p Pos) Valid() bool { + return p.Line >= 0 && p.Offset >= 0 +} + +// Doc represents a utf8-text document as lines of runes. +// All lines en in an implicit trailing newline. +type Doc struct { + Lines [][]rune + Size int +} + +func NewDoc(r io.Reader) (*Doc, error) { + br := bufio.NewReader(r) + var d Doc + var err error + for err == nil { + var data []byte + data, err = br.ReadSlice('\n') + if err == nil { + d.Size += 1 + data = data[:len(data)-1] + } + line := make([]rune, 0, utf8.RuneCount(data)) + for len(data) > 0 { + r, rl := utf8.DecodeRune(data) + if r == utf8.RuneError { + return nil, fmt.Errorf("invalid rune in at %d:%d", + len(d.Lines), len(line)) + } + data = data[rl:] + line = append(line, r) + } + d.Size += len(line) + d.Lines = append(d.Lines, line) + } + if err != nil && err != io.EOF { + return nil, err + } + return &d, nil +} + +func NewDocFromStr(s string) *Doc { + var d Doc + var b int + for i, r := range s { + d.Size++ + if r == '\n' { + d.Lines = append(d.Lines, []rune(s[b:i])) + b = i + 1 + } + } + d.Lines = append(d.Lines, []rune(s[b:])) + return &d +} + +func (doc *Doc) Pos(index int, last Pos) Pos { + n := index - last.Index + last.Offset + for i, l := range doc.Lines[last.Line:] { + if len(l) >= n { + return Pos{index, i + last.Line, n} + } + n -= len(l) + 1 + } + return Pos{index, -1, -1} +} + +type posop struct { + Pos + Op +} + +// Apply applies the operation sequence ops to the document. +// An error is returned if applying ops failed. +func (doc *Doc) Apply(ops Ops) error { + d := doc.Lines + p, pops := Zero, make([]posop, 0, len(ops)) + for _, op := range ops { + switch { + case op.N > 0: + p = doc.Pos(p.Index+op.N, p) + if !p.Valid() { + return fmt.Errorf("invalid document index %d", p.Index) + } + case op.N < 0: + pops = append(pops, posop{p, op}) + p = doc.Pos(p.Index-op.N, p) + if !p.Valid() { + return fmt.Errorf("invalid document index %d", p.Index) + } + case op.S != "": + pops = append(pops, posop{p, op}) + } + } + /*if p.Line != len(d)-1 || p.Offset != len(d[p.Line]) { + return fmt.Errorf("operation didn't operate on the whole document") + }*/ + for i := len(pops) - 1; i >= 0; i-- { + pop := pops[i] + switch { + case pop.N < 0: + doc.Size += pop.N + end := doc.Pos(pop.Index-pop.N, pop.Pos) + if !end.Valid() { + return fmt.Errorf("invalid document index %d", end.Index) + } + line := d[pop.Line] + if pop.Line == end.Line { + rest := line[end.Offset:] + d[pop.Line] = append(line[:pop.Offset], rest...) + break + } + rest := d[end.Line][end.Offset:] + d[pop.Line] = append(line[:pop.Offset], rest...) + d = append(d[:pop.Line+1], d[end.Line+1:]...) + case pop.S != "": + insd := NewDocFromStr(pop.S) + doc.Size += insd.Size + insl := insd.Lines + line := d[pop.Line] + last := len(insl) - 1 + insl[last] = append(insl[last], line[pop.Offset:]...) + insl[0] = append(line[:pop.Offset], insl[0]...) + if len(insl) == 1 { + d[pop.Line] = insl[0] + break + } + need := len(d) + len(insl) - 1 + if cap(d) < need { + nd := make([][]rune, len(d), need) + copy(nd, d) + d = nd + } + d = d[:need] + copy(d[pop.Line+len(insl):], d[pop.Line+1:]) + copy(d[pop.Line:], insl) + } + } + doc.Lines = d + return nil +} + +func (doc Doc) Extract(from, to int) *Doc { + off := doc.Pos(from, Zero) + end := doc.Pos(to, off) + if off.Line == end.Line { + return &Doc{Lines: [][]rune{ + doc.Lines[off.Line][off.Offset:end.Offset], + }} + } + nd := make([][]rune, 0, 1+end.Line-off.Line) + nd = append(nd, doc.Lines[off.Line][off.Offset:]) + for i := off.Line + 1; i < end.Line; i++ { + nd = append(nd, doc.Lines[i]) + } + nd = append(nd, doc.Lines[end.Line][:end.Offset]) + return &Doc{nd, to - from} +} + +func (doc Doc) WriteTo(w io.Writer) (nn int64, err error) { + rw, ok := w.(runeWriter) + if !ok { + rw = bufio.NewWriter(w) + } + var n int + for i, l := range doc.Lines { + if i > 0 { + n, err = rw.WriteRune('\n') + nn += int64(n) + if err != nil { + return + } + } + for _, r := range l { + n, err = rw.WriteRune(r) + nn += int64(n) + if err != nil { + return + } + } + } + return +} + +func (doc Doc) String() string { + var buf bytes.Buffer + doc.WriteTo(&buf) + return buf.String() +} + +type runeWriter interface { + WriteRune(rune) (int, error) +} diff --git a/qubership-apihub-service/ot/doc_test.go b/qubership-apihub-service/ot/doc_test.go new file mode 100644 index 0000000..a0722f1 --- /dev/null +++ b/qubership-apihub-service/ot/doc_test.go @@ -0,0 +1,56 @@ +// Copyright 2014 Martin Schnabel. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ot + +import "testing" + +func TestDocPos(t *testing.T) { + doc := NewDocFromStr("abc") + off := doc.Pos(3, Zero) + if !off.Valid() { + t.Error("eof is not valid") + } +} + +func TestDocApply(t *testing.T) { + tests := []struct { + text string + want string + ops Ops + }{ + {"abc", "atag", Ops{ + {N: 1}, + {S: "tag"}, + {N: -2}, + }}, + {"abc\ndef", "\nabc\ndef", Ops{ + {S: "\n"}, + {N: 7}, + }}, + {"abc\ndef\nghi", "abcghi", Ops{ + {N: 3}, + {N: -5}, + {N: 3}, + }}, + {"abc\ndef\nghi", "ahoi", Ops{ + {N: 1}, + {N: -3}, + {S: "h"}, + {N: -4}, + {S: "o"}, + {N: -2}, + {N: 1}, + }}, + } + for i, test := range tests { + doc := NewDocFromStr(test.text) + if err := doc.Apply(test.ops); err != nil { + t.Errorf("test %d error: %s", i, err) + } + if got := doc.String(); got != test.want { + t.Errorf("test %d want %q got %q", i, test.want, got) + } + } +} diff --git a/qubership-apihub-service/ot/ops.go b/qubership-apihub-service/ot/ops.go new file mode 100644 index 0000000..2473760 --- /dev/null +++ b/qubership-apihub-service/ot/ops.go @@ -0,0 +1,325 @@ +// Copyright 2014 Martin Schnabel. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package ot provides operational transformation utilities for utf8 text collaboration. +// +// The code is based on, and in part compatible with +// https://github.com/Operational-Transformation/ot.js by Tim Baumann (MIT License). +package ot + +import ( + "encoding/json" + "fmt" +) + +var noop Op + +// Op represents a single operation. +type Op struct { + // N signifies the operation type: + // > 0: Retain N runes + // < 0: Delete -N runes + // == 0: Noop or Insert string S + N int + S string +} + +// MarshalJSON encodes op either as json number or string. +func (op *Op) MarshalJSON() ([]byte, error) { + if op.N == 0 { + return json.Marshal(op.S) + } + return json.Marshal(op.N) +} + +// UnmarshalJSON decodes a json number or string into op. +func (op *Op) UnmarshalJSON(raw []byte) error { + if len(raw) > 0 && raw[0] == '"' { + return json.Unmarshal(raw, &op.S) + } + return json.Unmarshal(raw, &op.N) +} + +// Ops represents a sequence of operations. +type Ops []Op + +// Count returns the number of retained, deleted and inserted runes. +func (ops Ops) Count() (ret, del, ins int) { + for _, op := range ops { + switch { + case op.N > 0: + ret += op.N + case op.N < 0: + del += -op.N + case op.N == 0: + for _ = range op.S { + ins++ + } + } + } + return +} + +// Equal returns if other equals ops. +func (ops Ops) Equal(other Ops) bool { + if len(ops) != len(other) { + return false + } + for i, o := range other { + if o != ops[i] { + return false + } + } + return true +} + +// Merge attempts to merge consecutive operations in place and returns the sequence. +func Merge(ops Ops) Ops { + o, l := -1, len(ops) + for _, op := range ops { + if op == noop { + l-- + continue + } + var last Op + if o > -1 { + last = ops[o] + } + switch { + case last.N == 0 && last.S != "" && op.N == 0: + op.S = last.S + op.S + l-- + case last.N < 0 && op.N < 0, last.N > 0 && op.N > 0: + op.N += last.N + l-- + default: + o += 1 + } + ops[o] = op + } + return ops[:l] +} + +// getop returns the current sequence count and the next valid operation in ops or noop. +func getop(i int, ops Ops) (int, Op) { + for ; i < len(ops); i++ { + op := ops[i] + if op != noop { + return i + 1, op + } + } + return i, noop +} + +// sign return the sign of n. +func sign(n int) int { + switch { + case n < 0: + return -1 + case n > 0: + return 1 + } + return 0 +} + +// Compose returns an operation sequence composed from the consecutive ops a and b. +// An error is returned if the composition failed. +func Compose(a, b Ops) (ab Ops, err error) { + if len(a) == 0 || len(b) == 0 { + return + } + reta, _, ins := a.Count() + retb, del, _ := b.Count() + if reta+ins != retb+del { + err = fmt.Errorf("compose requires consecutive ops") + return + } + ia, oa := getop(0, a) + ib, ob := getop(0, b) + for oa != noop || ob != noop { + if oa.N < 0 { // delete a + ab = append(ab, oa) + ia, oa = getop(ia, a) + continue + } + if ob.N == 0 && ob.S != "" { // insert b + ab = append(ab, ob) + ib, ob = getop(ib, b) + continue + } + if oa == noop || ob == noop { + err = fmt.Errorf("compose encountered a short operation sequence") + return + } + switch { + case oa.N > 0 && ob.N > 0: // both retain + switch sign(oa.N - ob.N) { + case 1: + oa.N -= ob.N + ab = append(ab, ob) + ib, ob = getop(ib, b) + case -1: + ob.N -= oa.N + ab = append(ab, oa) + ia, oa = getop(ia, a) + default: + ab = append(ab, oa) + ia, oa = getop(ia, a) + ib, ob = getop(ib, b) + } + case oa.N == 0 && ob.N < 0: // insert delete + switch sign(len(oa.S) + ob.N) { + case 1: + oa = Op{S: string(oa.S[-ob.N:])} + ib, ob = getop(ib, b) + case -1: + ob.N += len(oa.S) + ia, oa = getop(ia, a) + default: + ia, oa = getop(ia, a) + ib, ob = getop(ib, b) + } + case oa.N == 0 && ob.N > 0: // insert retain + switch sign(len(oa.S) - ob.N) { + case 1: + ab = append(ab, Op{S: string(oa.S[:ob.N])}) + oa = Op{S: string(oa.S[ob.N:])} + ib, ob = getop(ib, b) + case -1: + ob.N -= len(oa.S) + ab = append(ab, oa) + ia, oa = getop(ia, a) + default: + ab = append(ab, oa) + ia, oa = getop(ia, a) + ib, ob = getop(ib, b) + } + case oa.N > 0 && ob.N < 0: // retain delete + switch sign(oa.N + ob.N) { + case 1: + oa.N += ob.N + ab = append(ab, ob) + ib, ob = getop(ib, b) + case -1: + ob.N += oa.N + oa.N *= -1 + ab = append(ab, oa) + ia, oa = getop(ia, a) + default: + ab = append(ab, ob) + ia, oa = getop(ia, a) + ib, ob = getop(ib, b) + } + default: + panic("This should never have happened.") + } + } + ab = Merge(ab) + return +} + +// Transform returns two operation sequences derived from the concurrent ops a and b. +// An error is returned if the transformation failed. +func Transform(a, b Ops) (a1, b1 Ops, err error) { + if len(a) == 0 || len(b) == 0 { + return + } + reta, dela, _ := a.Count() + retb, delb, _ := b.Count() + if reta+dela != retb+delb { + err = fmt.Errorf("transform requires concurrent ops") + return + } + ia, oa := getop(0, a) + ib, ob := getop(0, b) + for oa != noop || ob != noop { + var om Op + if oa.N == 0 && oa.S != "" { // insert a + om.N = len(oa.S) + a1 = append(a1, oa) + b1 = append(b1, om) + ia, oa = getop(ia, a) + continue + } + if ob.N == 0 && ob.S != "" { // insert b + om.N = len(ob.S) + a1 = append(a1, om) + b1 = append(b1, ob) + ib, ob = getop(ib, b) + continue + } + if oa == noop || ob == noop { + err = fmt.Errorf("transform encountered a short operation sequence") + return + } + switch { + case oa.N > 0 && ob.N > 0: // both retain + switch sign(oa.N - ob.N) { + case 1: + om.N = ob.N + oa.N -= ob.N + ib, ob = getop(ib, b) + case -1: + om.N = oa.N + ob.N -= oa.N + ia, oa = getop(ia, a) + default: + om.N = oa.N + ia, oa = getop(ia, a) + ib, ob = getop(ib, b) + } + a1 = append(a1, om) + b1 = append(b1, om) + case oa.N < 0 && ob.N < 0: // both delete + switch sign(-oa.N + ob.N) { + case 1: + oa.N -= ob.N + ib, ob = getop(ib, b) + case -1: + ob.N -= oa.N + ia, oa = getop(ia, a) + default: + ia, oa = getop(ia, a) + ib, ob = getop(ib, b) + } + case oa.N < 0 && ob.N > 0: // delete, retain + switch sign(-oa.N - ob.N) { + case 1: + om.N = -ob.N + oa.N += ob.N + ib, ob = getop(ib, b) + case -1: + om.N = oa.N + ob.N += oa.N + ia, oa = getop(ia, a) + default: + om.N = oa.N + ia, oa = getop(ia, a) + ib, ob = getop(ib, b) + } + a1 = append(a1, om) + case oa.N > 0 && ob.N < 0: // retain, delete + switch sign(oa.N + ob.N) { + case 1: + om.N = ob.N + oa.N += ob.N + ib, ob = getop(ib, b) + case -1: + om.N = -oa.N + ob.N += oa.N + ia, oa = getop(ia, a) + default: + om.N = -oa.N + ia, oa = getop(ia, a) + ib, ob = getop(ib, b) + } + b1 = append(b1, om) + default: + err = fmt.Errorf("transform failed with incompatible operation sequences") + return + } + } + a1, b1 = Merge(a1), Merge(b1) + return +} diff --git a/qubership-apihub-service/ot/ops_test.go b/qubership-apihub-service/ot/ops_test.go new file mode 100644 index 0000000..92702e7 --- /dev/null +++ b/qubership-apihub-service/ot/ops_test.go @@ -0,0 +1,154 @@ +// Copyright 2014 Martin Schnabel. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ot + +import ( + "encoding/json" + "testing" +) + +func TestOpsCount(t *testing.T) { + var o Ops + checklen := func(bl, tl int) { + ret, del, ins := o.Count() + if l := ret + del; l != bl { + t.Errorf("base len %d != %d", l, bl) + } + if l := ret + ins; l != tl { + t.Errorf("taget len %d != %d", l, tl) + } + } + checklen(0, 0) + o = append(o, Op{N: 5}) + checklen(5, 5) + o = append(o, Op{S: "abc"}) + checklen(5, 8) + o = append(o, Op{N: 2}) + checklen(7, 10) + o = append(o, Op{N: -2}) + checklen(9, 10) +} + +func TestOpsMerge(t *testing.T) { + o := Ops{ + {N: 5}, {N: 2}, {}, + {S: "lo"}, {S: "rem"}, {}, + {N: -3}, {N: -2}, {}, + } + if mo := Merge(o); len(mo) != 3 { + t.Errorf("got %+v", mo) + } +} + +func TestOpsEqual(t *testing.T) { + var a, b Ops + if !a.Equal(b) || !b.Equal(a) { + t.Errorf("expect equal %v %v", a, b) + } + a = Ops{{N: 7}, {S: "lorem"}, {N: -5}} + if a.Equal(b) || b.Equal(a) { + t.Errorf("expect not equal %v %v", a, b) + } + b = Ops{{N: 7}, {S: "lorem"}, {N: -5}} + if !a.Equal(b) || !b.Equal(a) { + t.Errorf("expect equal %v %v", a, b) + } +} + +func TestOpsEncoding(t *testing.T) { + e := `[7,"lorem",-5]` + o := Ops{{N: 7}, {S: "lorem"}, {N: -5}} + oe, err := json.Marshal(o) + if err != nil { + t.Error(err) + } + if string(oe) != e { + t.Errorf("expected %s got %s", e, oe) + } + var eo Ops + err = json.Unmarshal([]byte(e), &eo) + if err != nil { + t.Error(err) + } + if !o.Equal(eo) { + t.Errorf("expected %v got %v", o, eo) + } +} + +var composeTests = []struct { + a, b, ab Ops +}{ + { + a: Ops{{N: 3}, {N: -1}}, + b: Ops{{N: 1}, {S: "tag"}, {N: 2}}, + ab: Ops{{N: 1}, {S: "tag"}, {N: 2}, {N: -1}}, + }, + { + a: Ops{{N: 1}, {S: "tag"}, {N: 2}}, + b: Ops{{N: 4}, {N: -2}}, + ab: Ops{{N: 1}, {S: "tag"}, {N: -2}}, + }, + { + a: Ops{{N: 1}, {S: "tag"}}, + b: Ops{{N: 2}, {N: -1}, {N: 1}}, + ab: Ops{{N: 1}, {S: "tg"}}, + }, +} + +func TestOpsCompose(t *testing.T) { + for _, c := range composeTests { + ab, err := Compose(c.a, c.b) + if err != nil { + t.Error(err) + } + if !ab.Equal(c.ab) { + t.Errorf("expected %v got %v", c.ab, ab) + } + } +} + +var transformTests = []struct { + a, b, a1, b1 Ops +}{ + { + a: Ops{{N: 1}, {S: "tag"}, {N: 2}}, + b: Ops{{N: 2}, {N: -1}}, + a1: Ops{{N: 1}, {S: "tag"}, {N: 1}}, + b1: Ops{{N: 5}, {N: -1}}, + }, + { + a: Ops{{N: 1}, {S: "tag"}, {N: 2}}, + b: Ops{{N: 1}, {S: "tag"}, {N: 2}}, + a1: Ops{{N: 1}, {S: "tag"}, {N: 5}}, + b1: Ops{{N: 4}, {S: "tag"}, {N: 2}}, + }, + { + a: Ops{{N: 1}, {N: -2}}, + b: Ops{{N: 2}, {N: -1}}, + a1: Ops{{N: 1}, {N: -1}}, + b1: Ops{{N: 1}}, + }, + { + a: Ops{{N: 2}, {N: -1}}, + b: Ops{{N: 1}, {N: -2}}, + a1: Ops{{N: 1}}, + b1: Ops{{N: 1}, {N: -1}}, + }, +} + +func TestOpsTransform(t *testing.T) { + for _, c := range transformTests { + a1, b1, err := Transform(c.a, c.b) + if err != nil { + t.Error(err) + } + if !a1.Equal(c.a1) { + t.Errorf("expected %v got %v", c.a1, a1) + } + if !b1.Equal(c.b1) { + t.Errorf("expected %v got %v", c.b1, b1) + } + } +} diff --git a/qubership-apihub-service/ot/server.go b/qubership-apihub-service/ot/server.go new file mode 100644 index 0000000..bea4207 --- /dev/null +++ b/qubership-apihub-service/ot/server.go @@ -0,0 +1,39 @@ +// Copyright 2014 Martin Schnabel. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ot + +import "fmt" + +// ServerDoc represents shared document with revision history. +type ServerDoc struct { + Doc *Doc + History []Ops +} + +// Recv transforms, applies, and returns client ops and its revision. +// An error is returned if the ops could not be applied. +// Sending the derived ops to connected clients is the caller's responsibility. +func (s *ServerDoc) Recv(rev int, ops Ops) (Ops, error) { + if rev < 0 || len(s.History) < rev { + return nil, fmt.Errorf("Revision not in history") + } + var err error + // transform ops against all operations that happened since rev + for _, other := range s.History[rev:] { + if ops, _, err = Transform(ops, other); err != nil { + return nil, err + } + } + // apply to document + if err = s.Doc.Apply(ops); err != nil { + return nil, err + } + s.History = append(s.History, ops) + return ops, nil +} + +func (s *ServerDoc) Rev() int { + return len(s.History) +} diff --git a/qubership-apihub-service/ot/server_test.go b/qubership-apihub-service/ot/server_test.go new file mode 100644 index 0000000..258192e --- /dev/null +++ b/qubership-apihub-service/ot/server_test.go @@ -0,0 +1,31 @@ +// Copyright 2014 Martin Schnabel. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ot + +import "testing" + +func TestServer(t *testing.T) { + doc := NewDocFromStr("abc") + s := &ServerDoc{doc, nil} + _, err := s.Recv(1, Ops{}) + if err == nil || s.Rev() != 0 { + t.Error("expected error") + } + a := Ops{{N: 1}, {S: "tag"}, {N: 2}} + a1, err := s.Recv(0, a) + if err != nil || s.Rev() != 1 { + t.Error(err) + } + if !a1.Equal(a) { + t.Errorf("expected %v got %v", a, a1) + } + b1, err := s.Recv(0, Ops{{N: 1}, {N: -2}}) + if err != nil || s.Rev() != 2 { + t.Error(err) + } + if !b1.Equal(Ops{{N: 4}, {N: -2}}) { + t.Errorf("expected %v got %v", a, a1) + } +} diff --git a/qubership-apihub-service/repository/ActivityTrackingRepository.go b/qubership-apihub-service/repository/ActivityTrackingRepository.go new file mode 100644 index 0000000..f34e5a9 --- /dev/null +++ b/qubership-apihub-service/repository/ActivityTrackingRepository.go @@ -0,0 +1,124 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repository + +import ( + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/db" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/go-pg/pg/v10" + "github.com/go-pg/pg/v10/orm" +) + +type ActivityTrackingRepository interface { + CreateEvent(ent *entity.ActivityTrackingEntity) error + + GetEventsForPackages_deprecated(packageIds []string, limit int, page int, textFilter string, types []string) ([]entity.EnrichedActivityTrackingEntity_deprecated, error) + GetEventsForPackages(packageIds []string, limit int, page int, textFilter string, types []string) ([]entity.EnrichedActivityTrackingEntity, error) +} + +func NewActivityTrackingRepository(cp db.ConnectionProvider) ActivityTrackingRepository { + return &activityTrackingRepositoryImpl{ + cp: cp, + } +} + +type activityTrackingRepositoryImpl struct { + cp db.ConnectionProvider +} + +func (a activityTrackingRepositoryImpl) CreateEvent(ent *entity.ActivityTrackingEntity) error { + _, err := a.cp.GetConnection().Model(ent).Insert() + if err != nil { + return err + } + return nil +} + +func (a activityTrackingRepositoryImpl) GetEventsForPackages_deprecated(packageIds []string, limit int, page int, textFilter string, types []string) ([]entity.EnrichedActivityTrackingEntity_deprecated, error) { + var result []entity.EnrichedActivityTrackingEntity_deprecated + + query := a.cp.GetConnection().Model(&result). + ColumnExpr("at.*").ColumnExpr("get_latest_revision(at.package_id, at.data #>> '{version}') != (at.data #>> '{revision}')::int as not_latest_revision").ColumnExpr("pkg.name as pkg_name, pkg.kind as pkg_kind, usr.name as usr_name"). + Join("inner join package_group as pkg").JoinOn("at.package_id=pkg.id"). + Join("inner join user_data as usr").JoinOn("at.user_id=usr.user_id") + + if len(packageIds) > 0 { + query.Where("at.package_id in (?)", pg.In(packageIds)) + } + if len(types) > 0 { + query.Where("at.e_type in (?)", pg.In(types)) + } + if textFilter != "" { + textFilter = "%" + utils.LikeEscaped(textFilter) + "%" + query.WhereGroup(func(query *orm.Query) (*orm.Query, error) { + return query.Where("pkg.name ilike ?", textFilter).WhereOr("usr.name ilike ?", textFilter), nil + }) + } + query.Order("date DESC").Limit(limit).Offset(limit * page) + + err := query.Select() + if err != nil { + return nil, err + } + + if err != nil { + if err != pg.ErrNoRows { + return nil, err + } + } + return result, nil +} +func (a activityTrackingRepositoryImpl) GetEventsForPackages(packageIds []string, limit int, page int, textFilter string, types []string) ([]entity.EnrichedActivityTrackingEntity, error) { + var result []entity.EnrichedActivityTrackingEntity + + query := a.cp.GetConnection().Model(&result). + ColumnExpr("at.*"). + ColumnExpr("get_latest_revision(at.package_id, at.data #>> '{version}') != (at.data #>> '{revision}')::int as not_latest_revision"). + ColumnExpr("pkg.name as pkg_name, pkg.kind as pkg_kind"). + ColumnExpr("usr.name as prl_usr_name, usr.email as prl_usr_email, usr.avatar_url as prl_usr_avatar_url"). + ColumnExpr("apikey.id as prl_apikey_id, apikey.name as prl_apikey_name"). + ColumnExpr("case when coalesce(usr.name, apikey.name) is null then at.user_id else usr.user_id end prl_usr_id"). + Join("inner join package_group as pkg").JoinOn("at.package_id=pkg.id"). + Join("left join user_data as usr").JoinOn("at.user_id=usr.user_id"). + Join("left join apihub_api_keys as apikey").JoinOn("at.user_id=apikey.id") + + if len(packageIds) > 0 { + query.Where("at.package_id in (?)", pg.In(packageIds)) + } + if len(types) > 0 { + query.Where("at.e_type in (?)", pg.In(types)) + } + if textFilter != "" { + textFilter = "%" + utils.LikeEscaped(textFilter) + "%" + query.WhereGroup(func(query *orm.Query) (*orm.Query, error) { + //TODO: Check if usr is empty bbecause of apikey:: is it corrrect? + return query.Where("pkg.name ilike ?", textFilter).WhereOr("usr.name ilike ?", textFilter), nil + }) + } + query.Order("date DESC").Limit(limit).Offset(limit * page) + + err := query.Select() + if err != nil { + return nil, err + } + + if err != nil { + if err != pg.ErrNoRows { + return nil, err + } + } + return result, nil +} diff --git a/qubership-apihub-service/repository/AgentRepository.go b/qubership-apihub-service/repository/AgentRepository.go new file mode 100644 index 0000000..9deb5fc --- /dev/null +++ b/qubership-apihub-service/repository/AgentRepository.go @@ -0,0 +1,72 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repository + +import ( + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/db" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/go-pg/pg/v10" +) + +type AgentRepository interface { + CreateOrUpdateAgent(ent entity.AgentEntity) error + ListAgents(onlyActive bool) ([]entity.AgentEntity, error) + GetAgent(id string) (*entity.AgentEntity, error) +} + +func NewAgentRepository(cp db.ConnectionProvider) AgentRepository { + return agentRepositoryImpl{cp: cp} +} + +type agentRepositoryImpl struct { + cp db.ConnectionProvider +} + +func (a agentRepositoryImpl) CreateOrUpdateAgent(ent entity.AgentEntity) error { + _, err := a.cp.GetConnection().Model(&ent).OnConflict("(agent_id) DO UPDATE").Insert() + if err != nil { + return err + } + return nil +} + +func (a agentRepositoryImpl) ListAgents(onlyActive bool) ([]entity.AgentEntity, error) { + var result []entity.AgentEntity + query := a.cp.GetConnection().Model(&result) + if onlyActive { + query.Where("last_active > (now() - interval '30 seconds')") + } + query.Order("agent_id ASC") + + err := query.Select() + if err != nil { + return nil, err + } + return result, nil +} + +func (a agentRepositoryImpl) GetAgent(id string) (*entity.AgentEntity, error) { + result := new(entity.AgentEntity) + err := a.cp.GetConnection().Model(result). + Where("agent_id = ?", id). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} diff --git a/qubership-apihub-service/repository/ApihubApiKeyRepository.go b/qubership-apihub-service/repository/ApihubApiKeyRepository.go new file mode 100644 index 0000000..1e3ad31 --- /dev/null +++ b/qubership-apihub-service/repository/ApihubApiKeyRepository.go @@ -0,0 +1,32 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repository + +import ( + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" +) + +type ApihubApiKeyRepository interface { + SaveApiKey_deprecated(apihubApiKeyEntity *entity.ApihubApiKeyEntity_deprecated) error + SaveApiKey(apihubApiKeyEntity *entity.ApihubApiKeyEntity) error + RevokeApiKey(id string, userId string) error + GetPackageApiKeys_deprecated(packageId string) ([]entity.ApihubApiKeyEntity_deprecated, error) + GetPackageApiKeys_v3_deprecated(packageId string) ([]entity.ApihubApiKeyUserEntity_deprecated, error) + GetPackageApiKeys(packageId string) ([]entity.ApihubApiKeyUserEntity, error) + GetApiKeyByHash(apiKeyHash string) (*entity.ApihubApiKeyEntity, error) + GetPackageApiKey_deprecated(apiKeyId string, packageId string) (*entity.ApihubApiKeyUserEntity_deprecated, error) + GetPackageApiKey(apiKeyId string, packageId string) (*entity.ApihubApiKeyUserEntity, error) + GetApiKey(apiKeyId string) (*entity.ApihubApiKeyEntity, error) +} diff --git a/qubership-apihub-service/repository/ApihubApiKeyRepositoryPG.go b/qubership-apihub-service/repository/ApihubApiKeyRepositoryPG.go new file mode 100644 index 0000000..668be95 --- /dev/null +++ b/qubership-apihub-service/repository/ApihubApiKeyRepositoryPG.go @@ -0,0 +1,182 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repository + +import ( + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/db" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/go-pg/pg/v10" +) + +func NewApihubApiKeyRepositoryPG(cp db.ConnectionProvider) (ApihubApiKeyRepository, error) { + return &apihubApiKeyRepositoryImpl{cp: cp}, nil +} + +type apihubApiKeyRepositoryImpl struct { + cp db.ConnectionProvider +} + +func (r apihubApiKeyRepositoryImpl) SaveApiKey_deprecated(apihubApiKeyEntity *entity.ApihubApiKeyEntity_deprecated) error { + _, err := r.cp.GetConnection().Model(apihubApiKeyEntity).Insert() + return err +} + +func (r apihubApiKeyRepositoryImpl) SaveApiKey(apihubApiKeyEntity *entity.ApihubApiKeyEntity) error { + _, err := r.cp.GetConnection().Model(apihubApiKeyEntity).Insert() + return err +} + +func (r apihubApiKeyRepositoryImpl) RevokeApiKey(id string, userId string) error { + timeNow := time.Now() + _, err := r.cp.GetConnection().Model(&entity.ApihubApiKeyEntity{DeletedBy: userId, DeletedAt: &timeNow}). + Where("id = ?", id). + Set("deleted_by = ?deleted_by"). + Set("deleted_at = ?deleted_at"). + Update() + return err +} + +func (r apihubApiKeyRepositoryImpl) GetPackageApiKeys_deprecated(packageId string) ([]entity.ApihubApiKeyEntity_deprecated, error) { + var result []entity.ApihubApiKeyEntity_deprecated + err := r.cp.GetConnection().Model(&result). + Where("package_id = ?", packageId). + Select() + if err != nil { + if err != pg.ErrNoRows { + return nil, err + } + } + return result, nil +} + +func (r apihubApiKeyRepositoryImpl) GetPackageApiKeys_v3_deprecated(packageId string) ([]entity.ApihubApiKeyUserEntity_deprecated, error) { + var result []entity.ApihubApiKeyUserEntity_deprecated + err := r.cp.GetConnection().Model(&result). + ColumnExpr("apihub_api_keys.*"). + ColumnExpr("coalesce(u.name, '') as user_name"). + ColumnExpr("coalesce(u.email, '') as user_email"). + ColumnExpr("coalesce(u.avatar_url, '') as user_avatar_url"). + Join("left join user_data u"). + JoinOn("u.user_id = apihub_api_keys.created_by"). + Where("apihub_api_keys.package_id = ?", packageId). + Select() + if err != nil { + if err != pg.ErrNoRows { + return nil, err + } + } + return result, nil +} + +func (r apihubApiKeyRepositoryImpl) GetPackageApiKeys(packageId string) ([]entity.ApihubApiKeyUserEntity, error) { + var result []entity.ApihubApiKeyUserEntity + err := r.cp.GetConnection().Model(&result). + ColumnExpr("apihub_api_keys.*"). + ColumnExpr("coalesce(u.name, '') as user_name"). + ColumnExpr("coalesce(u.email, '') as user_email"). + ColumnExpr("coalesce(u.avatar_url, '') as user_avatar_url"). + Join("left join user_data u"). + JoinOn("u.user_id = apihub_api_keys.created_by"). + ColumnExpr("coalesce(cfu.name, '') as created_for_user_name"). + ColumnExpr("coalesce(cfu.email, '') as created_for_user_email"). + ColumnExpr("coalesce(cfu.avatar_url, '') as created_for_user_avatar_url"). + Join("left join user_data cfu"). + JoinOn("cfu.user_id = apihub_api_keys.created_for"). + Where("apihub_api_keys.package_id = ?", packageId). + Select() + if err != nil { + if err != pg.ErrNoRows { + return nil, err + } + } + return result, nil +} + +func (r apihubApiKeyRepositoryImpl) GetApiKeyByHash(apiKeyHash string) (*entity.ApihubApiKeyEntity, error) { + ent := new(entity.ApihubApiKeyEntity) + err := r.cp.GetConnection().Model(ent). + Where("api_key = ?", apiKeyHash). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return ent, nil +} + +func (r apihubApiKeyRepositoryImpl) GetPackageApiKey_deprecated(apiKeyId string, packageId string) (*entity.ApihubApiKeyUserEntity_deprecated, error) { + ent := new(entity.ApihubApiKeyUserEntity_deprecated) + err := r.cp.GetConnection().Model(ent). + ColumnExpr("apihub_api_keys.*"). + ColumnExpr("coalesce(u.name, '') as user_name"). + ColumnExpr("coalesce(u.email, '') as user_email"). + ColumnExpr("coalesce(u.avatar_url, '') as user_avatar_url"). + Join("left join user_data u"). + JoinOn("u.user_id = apihub_api_keys.created_by"). + Where("apihub_api_keys.id = ?", apiKeyId). + Where("apihub_api_keys.package_id = ?", packageId). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return ent, nil +} + +func (r apihubApiKeyRepositoryImpl) GetPackageApiKey(apiKeyId string, packageId string) (*entity.ApihubApiKeyUserEntity, error) { + ent := new(entity.ApihubApiKeyUserEntity) + err := r.cp.GetConnection().Model(ent). + ColumnExpr("apihub_api_keys.*"). + ColumnExpr("coalesce(u.name, '') as user_name"). + ColumnExpr("coalesce(u.email, '') as user_email"). + ColumnExpr("coalesce(u.avatar_url, '') as user_avatar_url"). + Join("left join user_data u"). + JoinOn("u.user_id = apihub_api_keys.created_by"). + ColumnExpr("coalesce(cfu.name, '') as created_for_user_name"). + ColumnExpr("coalesce(cfu.email, '') as created_for_user_email"). + ColumnExpr("coalesce(cfu.avatar_url, '') as created_for_user_avatar_url"). + Join("left join user_data cfu"). + JoinOn("cfu.user_id = apihub_api_keys.created_for"). + Where("apihub_api_keys.id = ?", apiKeyId). + Where("apihub_api_keys.package_id = ?", packageId). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return ent, nil +} + +func (r apihubApiKeyRepositoryImpl) GetApiKey(apiKeyId string) (*entity.ApihubApiKeyEntity, error) { + ent := new(entity.ApihubApiKeyEntity) + err := r.cp.GetConnection().Model(ent). + Where("id = ?", apiKeyId). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return ent, nil +} diff --git a/qubership-apihub-service/repository/BranchRepository.go b/qubership-apihub-service/repository/BranchRepository.go new file mode 100644 index 0000000..cbda780 --- /dev/null +++ b/qubership-apihub-service/repository/BranchRepository.go @@ -0,0 +1,26 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repository + +import ( + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" +) + +type BranchRepository interface { + SetChangeType(projectId string, branchName string, changeType string) error + SetDraftEditors(projectId string, branchName string, editors []string) error + GetBranchDraft(projectId string, branchName string) (*entity.BranchDraftEntity, error) + GetBranchDrafts() ([]entity.BranchDraftEntity, error) +} diff --git a/qubership-apihub-service/repository/BranchRepositoryPG.go b/qubership-apihub-service/repository/BranchRepositoryPG.go new file mode 100644 index 0000000..a2e0aca --- /dev/null +++ b/qubership-apihub-service/repository/BranchRepositoryPG.go @@ -0,0 +1,81 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repository + +import ( + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/db" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/go-pg/pg/v10" +) + +func NewBranchRepositoryPG(cp db.ConnectionProvider) (BranchRepository, error) { + return &branchRepositoryImpl{cp: cp}, nil +} + +type branchRepositoryImpl struct { + cp db.ConnectionProvider +} + +func (b branchRepositoryImpl) SetChangeType(projectId string, branchName string, changeType string) error { + _, err := b.cp.GetConnection().Model(&entity.BranchDraftEntity{ChangeType: changeType}). + Column("change_type"). + Where("project_id = ?", projectId). + Where("branch_name = ?", branchName). + Update() + if err != nil { + return err + } + return nil +} + +func (b branchRepositoryImpl) SetDraftEditors(projectId string, branchName string, editors []string) error { + _, err := b.cp.GetConnection().Model(&entity.BranchDraftEntity{Editors: editors}). + Column("editors"). + Where("project_id = ?", projectId). + Where("branch_name = ?", branchName). + Update() + if err != nil { + return err + } + return nil +} + +func (b branchRepositoryImpl) GetBranchDraft(projectId string, branchName string) (*entity.BranchDraftEntity, error) { + result := new(entity.BranchDraftEntity) + err := b.cp.GetConnection().Model(result). + Where("project_id = ?", projectId). + Where("branch_name = ?", branchName). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (b branchRepositoryImpl) GetBranchDrafts() ([]entity.BranchDraftEntity, error) { + var result []entity.BranchDraftEntity + err := b.cp.GetConnection().Model(&result). + Select() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} diff --git a/qubership-apihub-service/repository/BuildCleanupRepository.go b/qubership-apihub-service/repository/BuildCleanupRepository.go new file mode 100644 index 0000000..deb6d7f --- /dev/null +++ b/qubership-apihub-service/repository/BuildCleanupRepository.go @@ -0,0 +1,441 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repository + +import ( + "context" + "fmt" + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/db" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + "github.com/go-pg/pg/v10" + "github.com/pkg/errors" +) + +type BuildCleanupRepository interface { + GetLastCleanup() (*entity.BuildCleanupEntity, error) + RemoveOldBuildEntities(runId int, scheduledAt time.Time) error + RemoveMigrationBuildData() (deletedRows int, err error) + GetRemoveCandidateOldBuildEntitiesIds() ([]string, error) + RemoveOldBuildSourcesByIds(ctx context.Context, ids []string, runId int, scheduledAt time.Time) error + GetRemoveMigrationBuildIds() ([]string, error) + RemoveMigrationBuildSourceData(ids []string) (deletedRows int, err error) + RemoveUnreferencedOperationData(runId int) error + StoreCleanup(ent *entity.BuildCleanupEntity) error + GetCleanup(runId int) (*entity.BuildCleanupEntity, error) +} + +func NewBuildCleanupRepository(cp db.ConnectionProvider) BuildCleanupRepository { + return &buildCleanUpRepositoryImpl{ + cp: cp, + } +} + +type buildCleanUpRepositoryImpl struct { + cp db.ConnectionProvider +} + +func (b buildCleanUpRepositoryImpl) GetLastCleanup() (*entity.BuildCleanupEntity, error) { + result := new(entity.BuildCleanupEntity) + err := b.cp.GetConnection().Model(result). + OrderExpr("run_id DESC").Limit(1). + Select() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (b buildCleanUpRepositoryImpl) RemoveOldBuildEntities(runId int, scheduledAt time.Time) error { + ctx := context.Background() + var deletedBuildSources, deletedBuildResults int + err := b.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + cleanupEnt, err := b.getCleanupTx(tx, runId) + if err != nil { + return err + } + if cleanupEnt == nil { + return errors.Errorf("Failed to get cleanup run entity by id %d", runId) + } + + successBuildsRetention := time.Now().Add(-(time.Hour * 168)) // 1 week + failedBuildsRetention := time.Now().Add(-(time.Hour * 336)) // 2 weeks + + deletedBuildSources, err = b.removeOldBuildSources(tx, successBuildsRetention, failedBuildsRetention) + if err != nil { + return errors.Wrap(err, "Failed to remove old build sources") + } + deletedBuildResults, err = b.removeOldBuildResults(tx, successBuildsRetention, failedBuildsRetention) + if err != nil { + return errors.Wrap(err, "Failed to remove old build results") + } + cleanupEnt.BuildResult = deletedBuildResults + cleanupEnt.BuildSrc = deletedBuildSources + cleanupEnt.DeletedRows = cleanupEnt.DeletedRows + deletedBuildSources + deletedBuildResults + if err = b.updateCleanupTx(tx, *cleanupEnt); err != nil { + return err + } + + return nil + }) + if err != nil { + return err + } + + // Do not run vacuum in transaction + _, err = b.cp.GetConnection().Exec("vacuum full build_src") + if err != nil { + return errors.Wrap(err, "failed to run vacuum for table build_src") + } + _, err = b.cp.GetConnection().Exec("vacuum full build_result") + if err != nil { + return errors.Wrap(err, "failed to run vacuum for table build_result") + } + return err +} + +func (b buildCleanUpRepositoryImpl) RemoveOldBuildSourcesByIds(ctx context.Context, ids []string, runId int, scheduledAt time.Time) error { + err := b.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + cleanupEnt, err := b.getCleanupTx(tx, runId) + if err != nil { + return err + } + if cleanupEnt == nil { + return errors.Errorf("Failed to get cleanup run entity by id %d", runId) + } + + query := `delete from build_src + where build_id in (?)` + result, err := tx.Exec(query, pg.In(ids)) + if err != nil { + return fmt.Errorf("failed to delete builds from table build_src: %w", err) + } + deletedRows := result.RowsAffected() + + cleanupEnt.BuildSrc = deletedRows + cleanupEnt.DeletedRows = cleanupEnt.DeletedRows + deletedRows + if err = b.updateCleanupTx(tx, *cleanupEnt); err != nil { + return err + } + return nil + }) + if err != nil { + return err + } + _, err = b.cp.GetConnection().Exec("vacuum full build_src") + if err != nil { + return errors.Wrap(err, "failed to run vacuum for table build_src") + } + return err +} + +func (b buildCleanUpRepositoryImpl) GetRemoveCandidateOldBuildEntitiesIds() ([]string, error) { + successBuildsRetention := time.Now().Add(-(time.Hour * 168)) // 1 week + failedBuildsRetention := time.Now().Add(-(time.Hour * 336)) // 2 weeks + + return b.getRemoveCandidateOldBuildEntities(successBuildsRetention, failedBuildsRetention) +} + +func (b buildCleanUpRepositoryImpl) StoreCleanup(ent *entity.BuildCleanupEntity) error { + _, err := b.cp.GetConnection().Model(ent).Insert() + return err +} + +func (b buildCleanUpRepositoryImpl) updateCleanupTx(tx *pg.Tx, ent entity.BuildCleanupEntity) error { + _, err := tx.Model(&ent).Where("run_id = ?", ent.RunId).Update() + return err +} + +func (b buildCleanUpRepositoryImpl) updateCleanup(ent entity.BuildCleanupEntity) error { + _, err := b.cp.GetConnection().Model(&ent).Where("run_id = ?", ent.RunId).Update() + return err +} + +func (b buildCleanUpRepositoryImpl) GetCleanup(runId int) (*entity.BuildCleanupEntity, error) { + ent := new(entity.BuildCleanupEntity) + err := b.cp.GetConnection().Model(ent).Where("run_id = ?", runId).Select() + return ent, err +} + +func (b buildCleanUpRepositoryImpl) getCleanupTx(tx *pg.Tx, runId int) (*entity.BuildCleanupEntity, error) { + ent := new(entity.BuildCleanupEntity) + err := tx.Model(ent).Where("run_id = ?", runId).Select() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + } + return ent, err +} + +func (b buildCleanUpRepositoryImpl) removeOldBuildResults(tx *pg.Tx, successBuildsRetention, failedBuildsRetention time.Time) (deletedRows int, err error) { + query := `with builds as + (select build_id from build where + (status = ? and last_active <= ?) or + (status = ? and last_active <= ?)) + delete from build_result + where build_result.build_id in (select builds.build_id from builds)` + result, err := tx.Exec(query, view.StatusError, failedBuildsRetention, view.StatusComplete, successBuildsRetention) + if err != nil { + return 0, fmt.Errorf("failed to delete builds from table build_result: %w", err) + } + deletedRows = result.RowsAffected() + + return deletedRows, err +} + +func (b buildCleanUpRepositoryImpl) removeOldBuildSources(tx *pg.Tx, successBuildsRetention, failedBuildsRetention time.Time) (deletedRows int, err error) { + query := `with builds as + (select build_id from build where + (status = ? and last_active <= ?) or + (status = ? and last_active <= ?)) + delete from build_src + where build_src.build_id in (select builds.build_id from builds)` + result, err := tx.Exec(query, view.StatusError, failedBuildsRetention, view.StatusComplete, successBuildsRetention) + if err != nil { + return 0, fmt.Errorf("failed to delete builds from table build_src: %w", err) + } + deletedRows = result.RowsAffected() + + return deletedRows, err +} + +func (b buildCleanUpRepositoryImpl) getRemoveCandidateOldBuildEntities(successBuildsRetention, failedBuildsRetention time.Time) ([]string, error) { + var result []string + var ents []entity.BuildIdEntity + + query := `select build_id from build where + (status = ? and last_active <= ?) or + (status = ? and last_active <= ?)` + _, err := b.cp.GetConnection().Query(&ents, query, view.StatusError, failedBuildsRetention, view.StatusComplete, successBuildsRetention) + if err != nil { + return nil, err + } + for _, ent := range ents { + result = append(result, ent.Id) + } + return result, nil +} + +func (b buildCleanUpRepositoryImpl) RemoveMigrationBuildData() (deletedRows int, err error) { + ctx := context.Background() + err = b.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + query := `with builds as (select build_id from build where created_by = ?) + delete from build_result + where build_result.build_id in (select builds.build_id from builds)` + result, err := tx.Exec(query, "db migration") + if err != nil { + return err + } + deletedRows += result.RowsAffected() + + query = `with builds as (select build_id from build where created_by = ?) + delete from build_src + where build_src.build_id in (select builds.build_id from builds)` + result, err = tx.Exec(query, "db migration") + if err != nil { + return err + } + deletedRows += result.RowsAffected() + + return nil + }) + if err != nil { + return deletedRows, err + } + + // Do not run vacuum in transaction + _, err = b.cp.GetConnection().Exec("vacuum full build_src") + if err != nil { + return deletedRows, errors.Wrap(err, "failed to run vacuum for table build_src") + } + _, err = b.cp.GetConnection().Exec("vacuum full build_result") + if err != nil { + return deletedRows, errors.Wrap(err, "failed to run vacuum for table build_result") + } + + return deletedRows, nil +} + +func (b buildCleanUpRepositoryImpl) GetRemoveMigrationBuildIds() ([]string, error) { + var result []string + var ents []entity.BuildIdEntity + + query := `select build_id from build where created_by = ?` + _, err := b.cp.GetConnection().Query(&ents, query, "db migration") + if err != nil { + return nil, err + } + for _, ent := range ents { + result = append(result, ent.Id) + } + return result, nil +} + +func (b buildCleanUpRepositoryImpl) RemoveMigrationBuildSourceData(ids []string) (deletedRows int, err error) { + ctx := context.Background() + err = b.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + query := `delete from build_src + where build_id in (?)` + result, err := tx.Exec(query, pg.In(ids)) + if err != nil { + return err + } + deletedRows += result.RowsAffected() + + return nil + }) + if err != nil { + return deletedRows, err + } + + // Do not run vacuum in transaction + _, err = b.cp.GetConnection().Exec("vacuum full build_src") + if err != nil { + return deletedRows, errors.Wrap(err, "failed to run vacuum for table build_src") + } + + return deletedRows, nil +} + +func (b buildCleanUpRepositoryImpl) RemoveUnreferencedOperationData(runId int) error { + ctx := context.Background() + cleanupEnt, err := b.GetCleanup(runId) + if err != nil { + return err + } + if cleanupEnt == nil { + return errors.Errorf("Failed to get cleanup run entity by id %d", runId) + } + + var insertedRows int + err = b.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + _, err = tx.Exec("create table if not exists tmp_data_hash (data_hash varchar)") + if err != nil { + return err + } + _, err = tx.Exec("truncate table tmp_data_hash") + if err != nil { + return err + } + //insert data hashes for operations that no longer exist + insertResult, err := tx.Exec(` + insert into tmp_data_hash + select od.data_hash from operation_data od + left join operation o + on od.data_hash=o.data_hash + where o.package_id is null`) + if err != nil { + return err + } + insertedRows = insertResult.RowsAffected() + //insert data hashes for operations in deleted versions + insertResult, err = tx.Exec(` + insert into tmp_data_hash + select distinct od.data_hash from operation_data od + inner join operation o + on od.data_hash=o.data_hash + inner join published_version pv_del + on pv_del.package_id=o.package_id + and pv_del.version=o.version + and pv_del.revision=o.revision + and pv_del.deleted_at is not null + except + select distinct o.data_hash from operation o + inner join published_version pv + on pv.package_id=o.package_id + and pv.version=o.version + and pv.revision=o.revision + and pv.deleted_at is null;`) + if err != nil { + return err + } + insertedRows += insertResult.RowsAffected() + return nil + }) + if err != nil { + return errors.Wrap(err, "failed to create temporary table for cleanup job") + } + + limit := 20 + conn := b.cp.GetConnection() + for page := 0; page <= insertedRows/limit+1; page++ { + res, err := conn.Exec("delete from operation_data od where od.data_hash = any (select data_hash from tmp_data_hash order by data_hash limit ? offset ?)", limit, page*limit) + if err != nil { + return err + } + cleanupEnt.OperationData += res.RowsAffected() + cleanupEnt.DeletedRows += res.RowsAffected() + + res, err = conn.Exec("delete from ts_operation_data od where od.data_hash = any (select data_hash from tmp_data_hash order by data_hash limit ? offset ?)", limit, page*limit) + if err != nil { + return err + } + cleanupEnt.TsOperationData += res.RowsAffected() + cleanupEnt.DeletedRows += res.RowsAffected() + + res, err = conn.Exec("delete from ts_rest_operation_data od where od.data_hash = any (select data_hash from tmp_data_hash order by data_hash limit ? offset ?)", limit, page*limit) + if err != nil { + return err + } + cleanupEnt.TsRestOperationData += res.RowsAffected() + cleanupEnt.DeletedRows += res.RowsAffected() + + res, err = conn.Exec("delete from ts_graphql_operation_data od where od.data_hash = any (select data_hash from tmp_data_hash order by data_hash limit ? offset ?)", limit, page*limit) + if err != nil { + return err + } + cleanupEnt.TsGQLOperationData += res.RowsAffected() + cleanupEnt.DeletedRows += res.RowsAffected() + + err = b.updateCleanup(*cleanupEnt) + if err != nil { + return err + } + } + + _, err = conn.Exec("drop table if exists tmp_data_hash") + if err != nil { + return errors.Wrap(err, "failed to drop temporary table 'tmp_data_hash' for cleanup job") + } + + // Do not run vacuum in transaction + _, err = b.cp.GetConnection().Exec("vacuum full operation_data") + if err != nil { + return errors.Wrap(err, "failed to run vacuum for table operation_data") + } + + _, err = b.cp.GetConnection().Exec("vacuum full ts_operation_data") + if err != nil { + return errors.Wrap(err, "failed to run vacuum for table ts_operation_data") + } + + _, err = b.cp.GetConnection().Exec("vacuum full ts_rest_operation_data") + if err != nil { + return errors.Wrap(err, "failed to run vacuum for table ts_rest_operation_data") + } + + _, err = b.cp.GetConnection().Exec("vacuum full ts_graphql_operation_data") + if err != nil { + return errors.Wrap(err, "failed to run vacuum for table ts_graphql_operation_data") + } + + return nil +} diff --git a/qubership-apihub-service/repository/BuildRepository.go b/qubership-apihub-service/repository/BuildRepository.go new file mode 100644 index 0000000..7c5149e --- /dev/null +++ b/qubership-apihub-service/repository/BuildRepository.go @@ -0,0 +1,309 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repository + +import ( + "context" + "fmt" + "net/http" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/db" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + "github.com/go-pg/pg/v10" +) + +type BuildRepository interface { + StoreBuild(buildEntity entity.BuildEntity, sourceEntity entity.BuildSourceEntity, depends []entity.BuildDependencyEntity) error + UpdateBuildStatus(buildId string, status view.BuildStatusEnum, details string) error + GetBuild(buildId string) (*entity.BuildEntity, error) + GetBuilds(buildIds []string) ([]entity.BuildEntity, error) + GetBuildSrc(buildId string) (*entity.BuildSourceEntity, error) + + FindAndTakeFreeBuild(builderId string) (*entity.BuildEntity, error) + + GetBuildByChangelogSearchQuery(searchQuery entity.ChangelogBuildSearchQueryEntity) (*entity.BuildEntity, error) + GetBuildByDocumentGroupSearchQuery(searchQuery entity.DocumentGroupBuildSearchQueryEntity) (*entity.BuildEntity, error) + + UpdateBuildSourceConfig(buildId string, config map[string]interface{}) error +} + +func NewBuildRepositoryPG(cp db.ConnectionProvider) (BuildRepository, error) { + return &buildRepositoryImpl{cp: cp}, nil +} + +type buildRepositoryImpl struct { + cp db.ConnectionProvider +} + +func (b buildRepositoryImpl) GetBuild(buildId string) (*entity.BuildEntity, error) { + result := new(entity.BuildEntity) + err := b.cp.GetConnection().Model(result). + Where("build_id = ?", buildId). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (b buildRepositoryImpl) GetBuilds(buildIds []string) ([]entity.BuildEntity, error) { + var result []entity.BuildEntity + if len(buildIds) == 0 { + return nil, nil + } + err := b.cp.GetConnection().Model(&result). + Where("build_id in (?)", pg.In(buildIds)). + Select() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (b buildRepositoryImpl) GetBuildSrc(buildId string) (*entity.BuildSourceEntity, error) { + result := new(entity.BuildSourceEntity) + err := b.cp.GetConnection().Model(result). + Where("build_id = ?", buildId). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (b buildRepositoryImpl) StoreBuild(buildEntity entity.BuildEntity, sourceEntity entity.BuildSourceEntity, depends []entity.BuildDependencyEntity) error { + ctx := context.Background() + return b.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + _, err := tx.Model(&buildEntity).Insert() + if err != nil { + return fmt.Errorf("failed to insert build entity %+v with error %w", buildEntity, err) + } + + _, err = tx.Model(&sourceEntity).Insert() + if err != nil { + return fmt.Errorf("failed to insert build source entity %+v with error %w", sourceEntity, err) + } + + for _, dEnt := range depends { + _, err = tx.Model(&dEnt).Insert() + if err != nil { + return fmt.Errorf("failed to insert build depends entity %+v with error %w", dEnt, err) + } + } + return nil + }) +} + +const getBuildWithLock = "select * from build where build_id = ? limit 1 for no key update" + +func (b buildRepositoryImpl) UpdateBuildStatus(buildId string, status view.BuildStatusEnum, details string) error { + ctx := context.Background() + err := b.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + var ents []entity.BuildEntity + _, err := tx.Query(&ents, getBuildWithLock, buildId) + if err != nil { + return fmt.Errorf("failed to get build %s for status update: %w", buildId, err) + } + if len(ents) == 0 { + return fmt.Errorf("build with id = %s is not found for status update", buildId) + } + ent := &ents[0] + + buildStatus, err := view.BuildStatusFromString(ent.Status) + if err != nil { + return fmt.Errorf("invalid status for buildId %s: %s", ent.BuildId, err) + } + if buildStatus == view.StatusComplete || + (buildStatus == view.StatusError && status != view.StatusError) { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BuildAlreadyFinished, + Message: exception.BuildAlreadyFinishedMsg, + Params: map[string]interface{}{"buildId": buildId}, + } + } + //Append new error to existing one + if buildStatus == view.StatusError && status == view.StatusError && + ent.RestartCount >= 2 && ent.Details != "" { + details = fmt.Sprintf("%v: %v", ent.Details, details) + } + + query := tx.Model(ent). + Where("build_id = ?", buildId). + Set("status = ?", status). + Set("details = ?", details). + Set("last_active = now()") + _, err = query.Update() + if err != nil { + return err + } + return nil + }) + return err +} + +const buildKeepaliveTimeoutSec = 600 + +var queryItemToBuild = fmt.Sprintf("select * from build b where "+ + "(b.status='none' or (b.status='%s' and b.last_active < (now() - interval '%d seconds'))) and "+ + "(b.build_id not in (select distinct build_id from build_depends where depend_id in (select build.build_id from build where status='%s' or status='%s'))) "+ + "order by b.priority DESC, b.created_at ASC limit 1 for no key update skip locked", view.StatusRunning, buildKeepaliveTimeoutSec, view.StatusNotStarted, view.StatusRunning) + +func (b buildRepositoryImpl) FindAndTakeFreeBuild(builderId string) (*entity.BuildEntity, error) { + var result *entity.BuildEntity + var err error + for { + buildFailed := false + err = b.cp.GetConnection().RunInTransaction(context.Background(), func(tx *pg.Tx) error { + var ents []entity.BuildEntity + + _, err := tx.Query(&ents, queryItemToBuild) + if err != nil { + if err == pg.ErrNoRows { + return nil + } + return fmt.Errorf("failed to find free build: %w", err) + } + if len(ents) > 0 { + result = &ents[0] + + // we got build candidate + if result.RestartCount >= 2 { + query := tx.Model(result). + Where("build_id = ?", result.BuildId). + Set("status = ?", view.StatusError). + Set("details = ?", fmt.Sprintf("Restart count exceeded limit. Details: %v", result.Details)). + Set("last_active = now()") + _, err := query.Update() + if err != nil { + return err + } + buildFailed = true + return nil + } + + // take free build + isFirstRun := result.Status == string(view.StatusNotStarted) + + if !isFirstRun { + result.RestartCount += 1 + } + + result.Status = string(view.StatusRunning) + result.BuilderId = builderId + // TODO: add optimistic lock as well? + + _, err = tx.Model(result). + Set("status = ?status"). + Set("builder_id = ?builder_id"). + Set("restart_count = ?restart_count"). + Set("last_active = now()"). + Where("build_id = ?", result.BuildId). + Update() + if err != nil { + return fmt.Errorf("unable to update build status during takeBuild: %w", err) + } + + return nil + } + return nil + }) + if buildFailed { + continue + } + break + } + if err != nil { + return nil, err + } + return result, nil +} + +func (b buildRepositoryImpl) GetBuildByChangelogSearchQuery(searchQuery entity.ChangelogBuildSearchQueryEntity) (*entity.BuildEntity, error) { + var ent entity.BuildEntity + query := ` + with bs as ( + select * from build_src + where config->>'version' = ?version + and config->>'packageId' = ?package_id + and config->>'previousVersionPackageId' = ?previous_version_package_id + and config->>'previousVersion' = ?previous_version + and config->>'buildType' = ?build_type + and (config->>'comparisonRevision')::int = ?comparison_revision + and (config->>'comparisonPrevRevision')::int = ?comparison_prev_revision + ) + select b.* from build as b, bs + where b.build_id = bs.build_id + order by created_at desc + limit 1` + _, err := b.cp.GetConnection().Model(&searchQuery).QueryOne(&ent, query) + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return &ent, nil +} + +func (b buildRepositoryImpl) GetBuildByDocumentGroupSearchQuery(searchQuery entity.DocumentGroupBuildSearchQueryEntity) (*entity.BuildEntity, error) { + var ent entity.BuildEntity + query := ` + with bs as ( + select * from build_src + where config->>'version' = ?version + and config->>'packageId' = ?package_id + and config->>'buildType' = ?build_type + and config->>'format' = ?format + and config->>'apiType' = ?api_type + and config->>'groupName' = ?group_name + ) + select b.* from build as b, bs + where b.build_id = bs.build_id + order by created_at desc + limit 1` + _, err := b.cp.GetConnection().Model(&searchQuery).QueryOne(&ent, query) + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return &ent, nil +} + +func (b buildRepositoryImpl) UpdateBuildSourceConfig(buildId string, config map[string]interface{}) error { + var ent entity.BuildSourceEntity + _, err := b.cp.GetConnection().Model(&ent). + Where("build_id = ?", buildId). + Set("config = ?", config). + Update() + if err != nil { + return err + } + return nil +} diff --git a/qubership-apihub-service/repository/BuildResultRepository.go b/qubership-apihub-service/repository/BuildResultRepository.go new file mode 100644 index 0000000..cd12208 --- /dev/null +++ b/qubership-apihub-service/repository/BuildResultRepository.go @@ -0,0 +1,98 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repository + +import ( + "context" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/db" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/go-pg/pg/v10" + "github.com/pkg/errors" +) + +type BuildResultRepository interface { + StoreBuildResult(ent entity.BuildResultEntity) error + GetBuildResult(buildId string) (*entity.BuildResultEntity, error) + GetBuildResultWithOffset(offset int) (*entity.BuildResultEntity, error) + DeleteBuildResults(buildIds []string) error +} + +func NewBuildResultRepository(cp db.ConnectionProvider) BuildResultRepository { + return &buildResultRepositoryImpl{cp: cp} +} + +type buildResultRepositoryImpl struct { + cp db.ConnectionProvider +} + +func (b buildResultRepositoryImpl) StoreBuildResult(ent entity.BuildResultEntity) error { + _, err := b.cp.GetConnection().Model(&ent).Insert() + if err != nil { + return err + } + return nil + +} + +func (b buildResultRepositoryImpl) GetBuildResult(buildId string) (*entity.BuildResultEntity, error) { + result := new(entity.BuildResultEntity) + err := b.cp.GetConnection().Model(result). + Where("build_id = ?", buildId). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (b buildResultRepositoryImpl) GetBuildResultWithOffset(offset int) (*entity.BuildResultEntity, error) { + result := new(entity.BuildResultEntity) + err := b.cp.GetConnection().Model(result).Offset(offset).Limit(1). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (b buildResultRepositoryImpl) DeleteBuildResults(buildIds []string) error { + ctx := context.Background() + var deletedRows int + err := b.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + query := `delete from build_result + where build_id in (?)` + result, err := tx.Exec(query, pg.In(buildIds)) + if err != nil { + return err + } + deletedRows += result.RowsAffected() + return nil + }) + + if deletedRows > 0 { + _, err = b.cp.GetConnection().Exec("vacuum full build_result") + if err != nil { + return errors.Wrap(err, "failed to run vacuum for table build_result") + } + } + return nil +} diff --git a/qubership-apihub-service/repository/BusinessMetricRepository.go b/qubership-apihub-service/repository/BusinessMetricRepository.go new file mode 100644 index 0000000..d832989 --- /dev/null +++ b/qubership-apihub-service/repository/BusinessMetricRepository.go @@ -0,0 +1,66 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repository + +import ( + "fmt" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/db" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" +) + +type BusinessMetricRepository interface { + GetBusinessMetrics(parentPackageId string, hierarchyLevel int) ([]entity.BusinessMetricEntity, error) +} + +func NewBusinessMetricRepository(cp db.ConnectionProvider) BusinessMetricRepository { + return businessMetricRepositoryImpl{ + cp: cp, + } +} + +type businessMetricRepositoryImpl struct { + cp db.ConnectionProvider +} + +func (b businessMetricRepositoryImpl) GetBusinessMetrics(parentPackageId string, hierarchyLevel int) ([]entity.BusinessMetricEntity, error) { + result := make([]entity.BusinessMetricEntity, 0) + packageGroupCol := `d.key::varchar` + if hierarchyLevel > 0 { + packageGroupCol = `(string_to_array(d.key::varchar, '.'))[1]` + for level := 2; level <= hierarchyLevel; level++ { + packageGroupCol = fmt.Sprintf(`%s || coalesce(('.' || (string_to_array(d.key::varchar, '.'))[%d]), '')`, packageGroupCol, level) + } + } + businessMetricsQuery := fmt.Sprintf(` + select + to_date(year || '-' || month || '-' || day, 'YYYY-MM-DD')::varchar as date, + %s as package_id, + coalesce(u.name, b.user_id) as username, + metric, + sum(d.value::int) as value + from business_metric b left join user_data u on b.user_id = u.user_id, + jsonb_each_text(data) d + where (? = '' or d.key::varchar ilike ? || '.%%') + group by 1, 2, 3, 4 + order by 1, 2 + `, packageGroupCol) + + _, err := b.cp.GetConnection().Query(&result, businessMetricsQuery, parentPackageId, parentPackageId) + if err != nil { + return nil, err + } + return result, nil +} diff --git a/qubership-apihub-service/repository/DraftRepository.go b/qubership-apihub-service/repository/DraftRepository.go new file mode 100644 index 0000000..afe3127 --- /dev/null +++ b/qubership-apihub-service/repository/DraftRepository.go @@ -0,0 +1,51 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repository + +import ( + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type DraftRepository interface { + CreateBranchDraft(ent entity.BranchDraftEntity, contents []*entity.ContentDraftEntity, refs []entity.BranchRefDraftEntity) error + DeleteBranchDraft(projectId string, branchName string) error + CreateContent(content *entity.ContentDraftEntity) error + SetContents(contents []*entity.ContentDraftEntity) error + GetContent(projectId string, branchName string, fileId string) (*entity.ContentDraftEntity, error) + GetContentWithData(projectId string, branchName string, fileId string) (*entity.ContentDraftEntity, error) + UpdateContent(content *entity.ContentDraftEntity) error + UpdateContentMetadata(content *entity.ContentDraftEntity) error + UpdateContents(contents []*entity.ContentDraftEntity) error + UpdateContentsMetadata(contents []*entity.ContentDraftEntity) error + UpdateContentsConflicts(projectId string, branchName string, fileConflicts []view.FileConflict) error + UpdateContentData(projectId string, branchName string, fileId string, data []byte, mediaType string, status string, blobId string) error + UpdateContentStatus(projectId string, branchName string, fileId string, status string, lastStatus string) error + DeleteContent(projectId string, branchName string, fileId string) error + ReplaceContent(projectId string, branchName string, oldFileId string, newContent *entity.ContentDraftEntity) error + ContentExists(projectId string, branchName string, fileId string) (bool, error) + GetContents(projectId string, branchName string) ([]entity.ContentDraftEntity, error) + + CreateRef(ref *entity.BranchRefDraftEntity) error + GetRef(projectId string, branchName string, refProjectId string, refVersion string) (*entity.BranchRefDraftEntity, error) + DeleteRef(projectId string, branchName string, refProjectId string, refVersion string) error + UpdateRef(ref *entity.BranchRefDraftEntity) error + ReplaceRef(projectId string, branchName string, refProjectId string, refVersion string, ref *entity.BranchRefDraftEntity) error + GetRefs(projectId string, branchName string) ([]entity.BranchRefDraftEntity, error) + + DraftExists(projectId string, branchName string) (bool, error) + + UpdateFolderContents(projectId string, branchName string, fileIdsToDelete []string, fileIdsToMoveInFolder []string, fileIdsToMoveFromFolder []string) error +} diff --git a/qubership-apihub-service/repository/DraftRepositoryPG.go b/qubership-apihub-service/repository/DraftRepositoryPG.go new file mode 100644 index 0000000..9fb72ac --- /dev/null +++ b/qubership-apihub-service/repository/DraftRepositoryPG.go @@ -0,0 +1,594 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repository + +import ( + "context" + "strings" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/db" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + "github.com/go-pg/pg/v10" +) + +func NewDraftRepositoryPG(cp db.ConnectionProvider) (DraftRepository, error) { + return &draftRepositoryImpl{cp: cp}, nil +} + +type draftRepositoryImpl struct { + cp db.ConnectionProvider +} + +func (b draftRepositoryImpl) DeleteBranchDraft(projectId string, branchName string) error { + err := b.cp.GetConnection().RunInTransaction(context.Background(), func(tx *pg.Tx) error { + _, err := tx.Model(&entity.BranchDraftEntity{}). + Where("project_id = ?", projectId). + Where("branch_name = ?", branchName). + Delete() + if err != nil { + return err + } + _, err = tx.Model(&entity.ContentDraftEntity{}). + Where("project_id = ?", projectId). + Where("branch_name = ?", branchName). + Delete() + if err != nil { + return err + } + _, err = tx.Model(&entity.BranchRefDraftEntity{}). + Where("project_id = ?", projectId). + Where("branch_name = ?", branchName). + Delete() + return err + }) + + return err +} + +func (d draftRepositoryImpl) CreateBranchDraft(ent entity.BranchDraftEntity, contents []*entity.ContentDraftEntity, refs []entity.BranchRefDraftEntity) error { + err := d.cp.GetConnection().RunInTransaction(context.Background(), func(tx *pg.Tx) error { + _, err := tx.Model(&ent). + OnConflict("(project_id, branch_name) DO NOTHING"). + Insert() + if err != nil { + return err + } + if len(contents) > 0 { + for _, content := range contents { + _, err := tx.Model(content). + OnConflict("(project_id, branch_name, file_id) DO NOTHING"). + Insert() + if err != nil { + return err + } + } + } + if len(refs) > 0 { + _, err = tx.Model(&refs). + OnConflict("(project_id, branch_name, reference_package_id, reference_version) DO NOTHING"). + Insert() + if err != nil { + return err + } + } + + if err != nil { + return err + } + folders := make([]string, 0) + files := make([]*entity.ContentDraftEntity, 0) + excludedFiles := make([]string, 0) + filesToMoveInFolder := make(map[string]bool, 0) + filesToDelete := make(map[string]bool, 0) + for index, file := range contents { + if file.IsFolder { + folders = append(folders, file.FileId) + } + if file.Status == string(view.StatusExcluded) { + excludedFiles = append(excludedFiles, file.FileId) + } + if file.FromFolder || file.Publish || len(file.Labels) > 0 { + continue + } + files = append(files, contents[index]) + } + for _, folder := range folders { + for _, file := range files { + if strings.HasPrefix(file.FileId, folder) && file.FileId != folder { + if file.IsFolder { + filesToDelete[file.FileId] = true + } else { + filesToMoveInFolder[file.FileId] = true + } + } + } + } + + fileIdsToMoveInFolder := make([]string, 0) + fileIdsToMoveFromFolder := make([]string, 0) + fileIdsToDelete := make([]string, 0) + for fileToUpdate := range filesToMoveInFolder { + fileIdsToMoveInFolder = append(fileIdsToMoveInFolder, fileToUpdate) + } + for _, excludedFileId := range excludedFiles { + folderForExcludedFile := findFolderForFileEnts(excludedFileId, contents) + if folderForExcludedFile == "" { + continue + } + fileIdsToMoveFromFolder = append(fileIdsToMoveFromFolder, findAllFilesForFolderEnts(folderForExcludedFile, contents)...) + filesToDelete[folderForExcludedFile] = true + } + for fileToDelete := range filesToDelete { + fileIdsToDelete = append(fileIdsToDelete, fileToDelete) + } + + err = d.UpdateFolderContents(ent.ProjectId, ent.BranchName, fileIdsToDelete, fileIdsToMoveInFolder, fileIdsToMoveFromFolder) + return err + }) + return err +} + +func (d draftRepositoryImpl) CreateContent(content *entity.ContentDraftEntity) error { + _, err := d.cp.GetConnection().Model(content).Insert() + if err != nil { + return err + } + return nil +} + +func (d draftRepositoryImpl) SetContents(contents []*entity.ContentDraftEntity) error { + if len(contents) == 0 { + return nil + } + ctx := context.Background() + err := d.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + for _, content := range contents { + _, err := tx.Model(content). + OnConflict("(project_id, branch_name, file_id) DO UPDATE"). + Insert() + if err != nil { + return err + } + } + return nil + }) + if err != nil { + return err + } + return nil +} + +func (d draftRepositoryImpl) GetContent(projectId string, branchName string, fileId string) (*entity.ContentDraftEntity, error) { + result := new(entity.ContentDraftEntity) + err := d.cp.GetConnection().Model(result). + ExcludeColumn("data"). + Where("project_id = ?", projectId). + Where("branch_name = ?", branchName). + Where("file_id = ?", fileId). + Where("is_folder = false"). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (d draftRepositoryImpl) GetContentWithData(projectId string, branchName string, fileId string) (*entity.ContentDraftEntity, error) { + result := new(entity.ContentDraftEntity) + err := d.cp.GetConnection().Model(result). + Where("project_id = ?", projectId). + Where("branch_name = ?", branchName). + Where("file_id = ?", fileId). + Where("is_folder = false"). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (d draftRepositoryImpl) UpdateContent(content *entity.ContentDraftEntity) error { + _, err := d.cp.GetConnection().Model(content). + Where("project_id = ?", content.ProjectId). + Where("branch_name = ?", content.BranchName). + Where("file_id = ?", content.FileId). + Update() + if err != nil { + return err + } + return nil +} + +func (d draftRepositoryImpl) UpdateContentMetadata(content *entity.ContentDraftEntity) error { + _, err := d.cp.GetConnection().Model(content). + Column("publish", "labels", "from_folder"). + Where("project_id = ?", content.ProjectId). + Where("branch_name = ?", content.BranchName). + Where("file_id = ?", content.FileId). + Update() + if err != nil { + return err + } + return nil +} + +func (d draftRepositoryImpl) UpdateContents(contents []*entity.ContentDraftEntity) error { + + ctx := context.Background() + err := d.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + for _, content := range contents { + _, err := tx.Model(content). + Where("project_id = ?", content.ProjectId). + Where("branch_name = ?", content.BranchName). + Where("file_id = ?", content.FileId). + Update() + if err != nil { + return err + } + + } + return nil + }) + + if err != nil { + return err + } + return nil +} + +func (d draftRepositoryImpl) UpdateContentsMetadata(contents []*entity.ContentDraftEntity) error { + + ctx := context.Background() + err := d.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + for _, content := range contents { + _, err := tx.Model(content). + Column("publish", "labels", "from_folder"). + Where("project_id = ?", content.ProjectId). + Where("branch_name = ?", content.BranchName). + Where("file_id = ?", content.FileId). + Update() + if err != nil { + return err + } + + } + return nil + }) + + if err != nil { + return err + } + return nil +} + +func (d draftRepositoryImpl) UpdateContentsConflicts(projectId string, branchName string, fileConflicts []view.FileConflict) error { + ctx := context.Background() + err := d.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + for _, conflict := range fileConflicts { + contentEnt := &entity.ContentDraftEntity{ConflictedBlobId: conflict.ConflictedBlobId} + if conflict.ConflictedFileId != nil { + contentEnt.ConflictedFileId = *conflict.ConflictedFileId + } + _, err := tx.Model(contentEnt). + Where("project_id = ?", projectId). + Where("branch_name = ?", branchName). + Where("file_id = ?", conflict.FileId). + Set("conflicted_blob_id = ?conflicted_blob_id"). + Set("conflicted_file_id = ?conflicted_file_id"). + Update() + if err != nil { + return err + } + + } + return nil + }) + + if err != nil { + return err + } + return nil +} + +func (d draftRepositoryImpl) UpdateContentData(projectId string, branchName string, fileId string, data []byte, mediaType string, status string, blobId string) error { + _, err := d.cp.GetConnection().Model(&entity.ContentDraftEntity{Data: data, MediaType: mediaType, Status: status, BlobId: blobId}). + Where("project_id = ?", projectId). + Where("branch_name = ?", branchName). + Where("file_id = ?", fileId). + Set("data = ?data"). + Set("media_type = ?media_type"). + Set("status = ?status"). + Set("blob_id = ?blob_id"). + Update() + if err != nil { + return err + } + return nil +} + +func (d draftRepositoryImpl) UpdateContentStatus(projectId string, branchName string, fileId string, status string, lastStatus string) error { + _, err := d.cp.GetConnection().Model(&entity.ContentDraftEntity{Status: status, LastStatus: lastStatus}). + Where("project_id = ?", projectId). + Where("branch_name = ?", branchName). + Where("file_id = ?", fileId). + Set("status = ?status"). + Set("last_status = ?last_status"). + Update() + if err != nil { + return err + } + return nil +} + +func (d draftRepositoryImpl) DeleteContent(projectId string, branchName string, fileId string) error { + ctx := context.Background() + return d.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + return d.deleteContent(tx, projectId, branchName, fileId) + }) +} + +func (d draftRepositoryImpl) deleteContent(tx *pg.Tx, projectId string, branchName string, fileId string) error { + _, err := tx.Model(&entity.ContentDraftEntity{}). + Where("project_id = ?", projectId). + Where("branch_name = ?", branchName). + Where("file_id = ?", fileId). + Delete() + if err != nil { + if err == pg.ErrNoRows { + return nil + } + return err + } + return nil +} + +func (d draftRepositoryImpl) ReplaceContent(projectId string, branchName string, oldFileId string, newContent *entity.ContentDraftEntity) error { + ctx := context.Background() + err := d.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + err := d.deleteContent(tx, projectId, branchName, oldFileId) + if err != nil { + return err + } + _, err = tx.Model(newContent). + OnConflict("(project_id, branch_name, file_id) DO UPDATE"). + Insert() + if err != nil { + return err + } + + return nil + }) + if err != nil { + return err + } + return nil +} + +func (d draftRepositoryImpl) ContentExists(projectId string, branchName string, fileId string) (bool, error) { + result := new(entity.ContentDraftEntity) + err := d.cp.GetConnection().Model(result). + Column("file_id"). + Where("project_id = ?", projectId). + Where("branch_name = ?", branchName). + Where("file_id = ?", fileId). + First() + if err != nil { + if err == pg.ErrNoRows { + return false, nil + } + return false, err + } + return true, nil +} + +func (d draftRepositoryImpl) GetContents(projectId string, branchName string) ([]entity.ContentDraftEntity, error) { + var result []entity.ContentDraftEntity + + err := d.cp.GetConnection().Model(&result). + ExcludeColumn("data"). + Where("project_id = ?", projectId). + Where("branch_name = ?", branchName). + Order("index ASC"). + Select() + if err != nil { + return nil, err + } + return result, nil +} + +func (d draftRepositoryImpl) CreateRef(ref *entity.BranchRefDraftEntity) error { + _, err := d.cp.GetConnection().Model(ref).Insert() + if err != nil { + return err + } + return nil +} + +func (d draftRepositoryImpl) GetRef(projectId string, branchName string, refProjectId string, refVersion string) (*entity.BranchRefDraftEntity, error) { + result := new(entity.BranchRefDraftEntity) + err := d.cp.GetConnection().Model(result). + Where("project_id = ?", projectId). + Where("branch_name = ?", branchName). + Where("reference_package_id = ?", refProjectId). + Where("reference_version = ?", refVersion). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (d draftRepositoryImpl) DeleteRef(projectId string, branchName string, refProjectId string, refVersion string) error { + ctx := context.Background() + return d.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + return d.deleteRef(tx, projectId, branchName, refProjectId, refVersion) + }) +} + +func (d draftRepositoryImpl) deleteRef(tx *pg.Tx, projectId string, branchName string, refProjectId string, refVersion string) error { + _, err := tx.Model(&entity.BranchRefDraftEntity{}). + Where("project_id = ?", projectId). + Where("branch_name = ?", branchName). + Where("reference_package_id = ?", refProjectId). + Where("reference_version = ?", refVersion). + Delete() + if err != nil { + return err + } + return nil +} + +func (d draftRepositoryImpl) UpdateRef(ref *entity.BranchRefDraftEntity) error { + _, err := d.cp.GetConnection().Model(ref). + Where("project_id = ?", ref.ProjectId). + Where("branch_name = ?", ref.BranchName). + Where("reference_package_id = ?", ref.RefPackageId). + Where("reference_version = ?", ref.RefVersion). + Update() + if err != nil { + return err + } + return nil +} + +func (d draftRepositoryImpl) ReplaceRef(projectId string, branchName string, refProjectId string, refVersion string, ref *entity.BranchRefDraftEntity) error { + ctx := context.Background() + err := d.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + _, err := tx.Model(ref).Insert() + if err != nil { + return err + } + err = d.deleteRef(tx, projectId, branchName, refProjectId, refVersion) + if err != nil { + return err + } + return nil + }) + if err != nil { + return err + } + return nil +} + +func (d draftRepositoryImpl) GetRefs(projectId string, branchName string) ([]entity.BranchRefDraftEntity, error) { + var result []entity.BranchRefDraftEntity + err := d.cp.GetConnection().Model(&result). + Where("project_id = ?", projectId). + Where("branch_name = ?", branchName). + Order("reference_version ASC"). + Select() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (d draftRepositoryImpl) DraftExists(projectId string, branchName string) (bool, error) { + contentCount, err := d.cp.GetConnection().Model(&entity.ContentDraftEntity{}). + Where("project_id = ?", projectId). + Where("branch_name = ?", branchName). + Count() + if err != nil { + return false, err + } + + refCount, err := d.cp.GetConnection().Model(&entity.BranchRefDraftEntity{}). + Where("project_id = ?", projectId). + Where("branch_name = ?", branchName). + Count() + if err != nil { + return false, err + } + + if (contentCount + refCount) > 0 { + return true, nil + } else { + return false, nil + } +} + +func findFolderForFileEnts(fileId string, allFiles []*entity.ContentDraftEntity) string { + for _, file := range allFiles { + if file.IsFolder && strings.HasPrefix(fileId, file.FileId) { + return file.FileId + } + } + return "" +} + +func findAllFilesForFolderEnts(folderFileId string, allFiles []*entity.ContentDraftEntity) []string { + filesForFolder := make([]string, 0) + for _, file := range allFiles { + if !file.IsFolder && file.FromFolder && strings.HasPrefix(file.FileId, folderFileId) { + filesForFolder = append(filesForFolder, file.FileId) + } + } + return filesForFolder +} + +func (d draftRepositoryImpl) UpdateFolderContents(projectId string, branchName string, fileIdsToDelete []string, fileIdsToMoveInFolder []string, fileIdsToMoveFromFolder []string) error { + err := d.cp.GetConnection().RunInTransaction(context.Background(), func(tx *pg.Tx) error { + if len(fileIdsToDelete) > 0 { + _, err := tx.Model(&entity.ContentDraftEntity{}). + Where("project_id = ?", projectId). + Where("branch_name = ?", branchName). + Where("file_id in (?)", pg.In(fileIdsToDelete)). + Delete() + if err != nil { + return err + } + } + + if len(fileIdsToMoveInFolder) > 0 { + _, err := tx.Model(&entity.ContentDraftEntity{}). + Where("project_id = ?", projectId). + Where("branch_name = ?", branchName). + Where("file_id in (?)", pg.In(fileIdsToMoveInFolder)). + Set("from_folder = ?", true). + Update() + if err != nil { + return err + } + } + + if len(fileIdsToMoveFromFolder) > 0 { + _, err := tx.Model(&entity.ContentDraftEntity{}). + Where("project_id = ?", projectId). + Where("branch_name = ?", branchName). + Where("file_id in (?)", pg.In(fileIdsToMoveFromFolder)). + Set("from_folder = ?", false). + Update() + if err != nil { + return err + } + } + + return nil + }) + return err +} diff --git a/qubership-apihub-service/repository/FavoritesRepository.go b/qubership-apihub-service/repository/FavoritesRepository.go new file mode 100644 index 0000000..c04adad --- /dev/null +++ b/qubership-apihub-service/repository/FavoritesRepository.go @@ -0,0 +1,24 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repository + +type FavoritesRepository interface { + AddProjectToFavorites(userId string, id string) error + AddPackageToFavorites(userId string, id string) error + RemoveProjectFromFavorites(userId string, id string) error + RemovePackageFromFavorites(userId string, id string) error + IsFavoriteProject(userId string, id string) (bool, error) + IsFavoritePackage(userId string, id string) (bool, error) +} diff --git a/qubership-apihub-service/repository/FavoritesRepositoryPG.go b/qubership-apihub-service/repository/FavoritesRepositoryPG.go new file mode 100644 index 0000000..859c698 --- /dev/null +++ b/qubership-apihub-service/repository/FavoritesRepositoryPG.go @@ -0,0 +1,93 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repository + +import ( + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/db" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/go-pg/pg/v10" +) + +func NewFavoritesRepositoryPG(cp db.ConnectionProvider) (FavoritesRepository, error) { + return &favoritesRepositoryImpl{cp: cp}, nil +} + +type favoritesRepositoryImpl struct { + cp db.ConnectionProvider +} + +func (f favoritesRepositoryImpl) AddProjectToFavorites(userId string, id string) error { + ent := &entity.FavoriteProjectEntity{UserId: userId, Id: id} + _, err := f.cp.GetConnection().Model(ent). + OnConflict("(user_id, project_id) DO UPDATE"). + Set("user_id = EXCLUDED.user_id, project_id = EXCLUDED.project_id"). + Insert() + return err +} + +func (f favoritesRepositoryImpl) AddPackageToFavorites(userId string, id string) error { + ent := &entity.FavoritePackageEntity{UserId: userId, Id: id} + _, err := f.cp.GetConnection().Model(ent). + OnConflict("(user_id, package_id) DO UPDATE"). + Set("user_id = EXCLUDED.user_id, package_id = EXCLUDED.package_id"). + Insert() + return err +} + +func (f favoritesRepositoryImpl) RemoveProjectFromFavorites(userId string, id string) error { + _, err := f.cp.GetConnection().Model(&entity.FavoriteProjectEntity{}). + Where("user_id = ?", userId). + Where("project_id = ?", id). + Delete() + return err +} + +func (f favoritesRepositoryImpl) RemovePackageFromFavorites(userId string, id string) error { + _, err := f.cp.GetConnection().Model(&entity.FavoritePackageEntity{}). + Where("user_id = ?", userId). + Where("package_id = ?", id). + Delete() + return err +} + +func (f favoritesRepositoryImpl) IsFavoriteProject(userId string, id string) (bool, error) { + result := new(entity.FavoriteProjectEntity) + err := f.cp.GetConnection().Model(result). + Where("user_id = ?", userId). + Where("project_id = ?", id). + First() + if err != nil { + if err == pg.ErrNoRows { + return false, nil + } + return false, err + } + return true, nil +} + +func (f favoritesRepositoryImpl) IsFavoritePackage(userId string, id string) (bool, error) { + result := new(entity.FavoritePackageEntity) + err := f.cp.GetConnection().Model(result). + Where("user_id = ?", userId). + Where("package_id = ?", id). + First() + if err != nil { + if err == pg.ErrNoRows { + return false, nil + } + return false, err + } + return true, nil +} diff --git a/qubership-apihub-service/repository/GitIntegrationRepository.go b/qubership-apihub-service/repository/GitIntegrationRepository.go new file mode 100644 index 0000000..1e3758b --- /dev/null +++ b/qubership-apihub-service/repository/GitIntegrationRepository.go @@ -0,0 +1,27 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repository + +import ( + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type GitIntegrationRepository interface { + SaveUserApiKey(apiKeyEntity entity.ApiKeyEntity) (*entity.ApiKeyEntity, error) + DeleteUserApiKey(integration view.GitIntegrationType, userId string) error + GetUserApiKey(integration view.GitIntegrationType, userId string) (*entity.ApiKeyEntity, error) + AddFailedRefreshAttempt(integration view.GitIntegrationType, userId string) error +} diff --git a/qubership-apihub-service/repository/GitIntegrationRepositoryPG.go b/qubership-apihub-service/repository/GitIntegrationRepositoryPG.go new file mode 100644 index 0000000..280c9de --- /dev/null +++ b/qubership-apihub-service/repository/GitIntegrationRepositoryPG.go @@ -0,0 +1,73 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repository + +import ( + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/db" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + "github.com/go-pg/pg/v10" +) + +func NewGitIntegrationRepositoryPG(cp db.ConnectionProvider) (GitIntegrationRepository, error) { + return &gitIntegrationRepositoryImpl{cp: cp}, nil +} + +type gitIntegrationRepositoryImpl struct { + cp db.ConnectionProvider +} + +func (i gitIntegrationRepositoryImpl) SaveUserApiKey(apiKeyEntity entity.ApiKeyEntity) (*entity.ApiKeyEntity, error) { + _, err := i.cp.GetConnection().Model(&apiKeyEntity). + OnConflict("(\"user_id\", \"integration_type\") DO UPDATE"). + Insert() + return &apiKeyEntity, err +} + +func (i gitIntegrationRepositoryImpl) DeleteUserApiKey(integration view.GitIntegrationType, userId string) error { + _, err := i.cp.GetConnection().Model(&entity.ApiKeyEntity{}). + Where("integration_type = ?", integration). + Where("user_id = ?", userId). + Delete() + return err +} + +func (i gitIntegrationRepositoryImpl) GetUserApiKey(integration view.GitIntegrationType, userId string) (*entity.ApiKeyEntity, error) { + result := new(entity.ApiKeyEntity) + err := i.cp.GetConnection().Model(result). + Where("user_id = ?", userId). + Where("integration_type = ?", integration). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (i gitIntegrationRepositoryImpl) AddFailedRefreshAttempt(integration view.GitIntegrationType, userId string) error { + result := new(entity.ApiKeyEntity) + _, err := i.cp.GetConnection().Model(result). + Where("user_id = ?", userId). + Where("integration_type = ?", integration). + Set("failed_refresh_attempts = failed_refresh_attempts + 1"). + Update() + if err != nil { + return err + } + return nil +} diff --git a/qubership-apihub-service/repository/MetricsRepository.go b/qubership-apihub-service/repository/MetricsRepository.go new file mode 100644 index 0000000..e3fc8ee --- /dev/null +++ b/qubership-apihub-service/repository/MetricsRepository.go @@ -0,0 +1,100 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repository + +import ( + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/db" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/metrics" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type MetricsRepository interface { + StartGetMetricsProcess() error +} + +func NewMetricsRepository(cp db.ConnectionProvider) MetricsRepository { + return &metricsRepositoryImpl{ + cp: cp, + } +} + +type metricsRepositoryImpl struct { + cp db.ConnectionProvider +} + +func (m metricsRepositoryImpl) StartGetMetricsProcess() error { + errorBuildsCount, err := m.getBuildCountByStatus(string(view.StatusError)) + if err != nil { + return err + } + metrics.FailedBuildCount.WithLabelValues().Set(float64(errorBuildsCount.BuildCount)) + + buildNoneStatusQueueSize, err := m.getBuildCountByStatus(string(view.StatusNotStarted)) + if err != nil { + return err + } + metrics.BuildNoneStatusQueueSize.WithLabelValues().Set(float64(buildNoneStatusQueueSize.BuildCount)) + + buildRunningStatusQueueSize, err := m.getBuildCountByStatus(string(view.StatusRunning)) + if err != nil { + return err + } + metrics.BuildRunningStatusQueueSize.WithLabelValues().Set(float64(buildRunningStatusQueueSize.BuildCount)) + + buildMaxAvgTimeMetrics, err := m.getBuildTimeMetrics() + if err != nil { + return err + } + metrics.MaxBuildTime.WithLabelValues().Set(float64(buildMaxAvgTimeMetrics.MaxBuildTime)) + metrics.AvgBuildTime.WithLabelValues().Set(float64(buildMaxAvgTimeMetrics.AvgBuildTime)) + + buildRetriesCount, err := m.getBuildRetriesCount() + if err != nil { + return err + } + metrics.NumberOfBuildRetries.WithLabelValues().Set(float64(buildRetriesCount.RetriesCount)) + return nil +} + +func (m metricsRepositoryImpl) getBuildCountByStatus(status string) (*entity.BuildByStatusCountEntity, error) { + result := new(entity.BuildByStatusCountEntity) + query := `select count(build_id) as build_count from build where status = ? and last_active >= now() - interval '1 day'` + _, err := m.cp.GetConnection().QueryOne(result, query, status) + if err != nil { + return nil, err + } + return result, nil +} + +func (m metricsRepositoryImpl) getBuildTimeMetrics() (*entity.BuildTimeMetricsEntity, error) { + result := new(entity.BuildTimeMetricsEntity) + query := `select EXTRACT(EPOCH FROM max(last_active - created_at))::int as max_build_time, EXTRACT(EPOCH FROM avg(last_active - created_at))::int as avg_build_time from build where status = 'complete' and last_active >= now() - interval '1 day'` + _, err := m.cp.GetConnection().QueryOne(result, query) + if err != nil { + return nil, err + } + return result, nil +} + +func (m metricsRepositoryImpl) getBuildRetriesCount() (*entity.BuildRetriesCountEntity, error) { + result := new(entity.BuildRetriesCountEntity) + query := `select sum(restart_count) as retries_count from build where last_active >= now() - interval '1 day'` + _, err := m.cp.GetConnection().QueryOne(result, query) + if err != nil { + return nil, err + } + return result, nil +} diff --git a/qubership-apihub-service/repository/OperationRepository.go b/qubership-apihub-service/repository/OperationRepository.go new file mode 100644 index 0000000..6d142d9 --- /dev/null +++ b/qubership-apihub-service/repository/OperationRepository.go @@ -0,0 +1,1789 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repository + +import ( + "context" + "fmt" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/db" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + "github.com/go-pg/pg/v10" + "github.com/go-pg/pg/v10/orm" +) + +type OperationRepository interface { + GetOperationsByIds(packageId string, version string, revision int, operationIds []string) ([]entity.OperationEntity, error) + GetOperations(packageId string, version string, revision int, operationType string, skipRefs bool, searchReq view.OperationListReq) ([]entity.OperationRichEntity, error) + GetOperationById(packageId string, version string, revision int, operationType string, operationId string) (*entity.OperationRichEntity, error) + GetOperationsTags(searchQuery entity.OperationTagsSearchQueryEntity, skipRefs bool) ([]string, error) + GetAllOperations(packageId string, version string, revision int) ([]entity.OperationEntity, error) + GetOperationChanges(comparisonId string, operationId string, severities []string) (*entity.OperationComparisonEntity, error) + GetChangelog_deprecated(searchQuery entity.ChangelogSearchQueryEntity) ([]entity.OperationComparisonChangelogEntity_deprecated, error) + GetChangelog(searchQuery entity.ChangelogSearchQueryEntity) ([]entity.OperationComparisonChangelogEntity, error) + SearchForOperations_deprecated(searchQuery *entity.OperationSearchQuery) ([]entity.OperationSearchResult_deprecated, error) + SearchForOperations(searchQuery *entity.OperationSearchQuery) ([]entity.OperationSearchResult, error) + GetOperationsTypeCount(packageId string, version string, revision int) ([]entity.OperationsTypeCountEntity, error) + GetOperationsTypeDataHashes(packageId string, version string, revision int) ([]entity.OperationsTypeDataHashEntity, error) + GetOperationDeprecatedItems(packageId string, version string, revision int, operationType string, operationId string) (*entity.OperationRichEntity, error) + GetDeprecatedOperationsSummary(packageId string, version string, revision int) ([]entity.DeprecatedOperationsSummaryEntity, error) + GetDeprecatedOperationsRefsSummary(packageId string, version string, revision int) ([]entity.DeprecatedOperationsSummaryEntity, error) + GetDeprecatedOperations(packageId string, version string, revision int, operationType string, searchReq view.DeprecatedOperationListReq) ([]entity.OperationRichEntity, error) + + AddOperationGroupHistory(ent *entity.OperationGroupHistoryEntity) error + CreateOperationGroup(ent *entity.OperationGroupEntity, templateEntity *entity.OperationGroupTemplateEntity) error + DeleteOperationGroup(ent *entity.OperationGroupEntity) error + ReplaceOperationGroup(oldGroupEntity *entity.OperationGroupEntity, newGroupEntity *entity.OperationGroupEntity, operationEntities []entity.GroupedOperationEntity, newTemplateEntity *entity.OperationGroupTemplateEntity) error + UpdateOperationGroup(oldGroupEntity *entity.OperationGroupEntity, newGroupEntity *entity.OperationGroupEntity, newTemplateEntity *entity.OperationGroupTemplateEntity, newGroupedOperations *[]entity.GroupedOperationEntity) error + GetOperationGroup(packageId string, version string, revision int, apiType string, groupName string) (*entity.OperationGroupEntity, error) + GetOperationGroupTemplateFile(packageId string, version string, revision int, apiType string, groupName string) (*entity.OperationGroupTemplateFileEntity, error) + CalculateOperationGroups(packageId string, version string, revision int, groupingPrefix string) ([]string, error) + GetVersionOperationGroups(packageId string, version string, revision int) ([]entity.OperationGroupCountEntity, error) + GetGroupedOperations(packageId string, version string, revision int, operationType string, groupName string, searchReq view.OperationListReq) ([]entity.OperationRichEntity, error) + GetOperationsByModelHash(packageId string, version string, revision int, apiType string, modelHash string) ([]entity.OperationModelsEntity, error) + GetOperationsByPathAndMethod(packageId string, version string, revision int, apiType string, path string, method string) ([]string, error) +} + +func NewOperationRepository(cp db.ConnectionProvider) OperationRepository { + return &operationRepositoryImpl{cp: cp} +} + +type operationRepositoryImpl struct { + cp db.ConnectionProvider +} + +func (o operationRepositoryImpl) GetOperationsByIds(packageId string, version string, revision int, operationIds []string) ([]entity.OperationEntity, error) { + if len(operationIds) == 0 { + return nil, nil + } + var result []entity.OperationEntity + err := o.cp.GetConnection().Model(&result). + Where("package_id = ?", packageId). + Where("version = ?", version). + Where("revision = ?", revision). + Where("operation_id in (?)", pg.In(operationIds)). + Select() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (o operationRepositoryImpl) GetOperationById(packageId string, version string, revision int, operationType string, operationId string) (*entity.OperationRichEntity, error) { + result := new(entity.OperationRichEntity) + err := o.cp.GetConnection().Model(result). + ColumnExpr("operation.*"). + Where("package_id = ?", packageId). + Where("version = ?", version). + Where("revision = ?", revision). + Where("type = ?", operationType). + Where("operation_id = ?", operationId). + Join("LEFT JOIN operation_data as op_data"). + JoinOn("operation.data_hash = op_data.data_hash"). + ColumnExpr("op_data.data"). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (o operationRepositoryImpl) GetOperationDeprecatedItems(packageId string, version string, revision int, operationType string, operationId string) (*entity.OperationRichEntity, error) { + result := new(entity.OperationRichEntity) + err := o.cp.GetConnection().Model(result). + ColumnExpr("operation.deprecated_items"). + Where("package_id = ?", packageId). + Where("version = ?", version). + Where("revision = ?", revision). + Where("type = ?", operationType). + Where("operation_id = ?", operationId). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (o operationRepositoryImpl) GetOperations(packageId string, version string, revision int, operationType string, skipRefs bool, searchReq view.OperationListReq) ([]entity.OperationRichEntity, error) { + var result []entity.OperationRichEntity + query := o.cp.GetConnection().Model(&result). + ColumnExpr("operation.*") + + if !skipRefs { + query.Join(`inner join + (with refs as( + select s.reference_id as package_id, s.reference_version as version, s.reference_revision as revision + from published_version_reference s + inner join published_version pv + on pv.package_id = s.reference_id + and pv.version = s.reference_version + and pv.revision = s.reference_revision + and pv.deleted_at is null + where s.package_id = ? + and s.version = ? + and s.revision = ? + and s.excluded = false + ) + select package_id, version, revision + from refs + union + select ? as package_id, ? as version, ? as revision + ) refs`, packageId, version, revision, packageId, version, revision) + query.JoinOn("operation.package_id = refs.package_id"). + JoinOn("operation.version = refs.version"). + JoinOn("operation.revision = refs.revision") + + if searchReq.RefPackageId != "" { + query.JoinOn("refs.package_id = ?", searchReq.RefPackageId) + } + } else { + query.Where("package_id = ?", packageId). + Where("version = ?", version). + Where("revision = ?", revision) + } + + if searchReq.EmptyGroup { + //todo try to replace this 'not in' condition with join + query.Where(`operation.operation_id not in ( + select operation_id from grouped_operation go + inner join operation_group og + on go.group_id = og.group_id + and og.package_id = ? + and og.version = ? + and og.revision = ? + and og.api_type = operation.type + where go.package_id = operation.package_id + and go.version = operation.version + and go.revision = operation.revision + )`, packageId, version, revision) + } else if searchReq.Group != "" { + query.Join(`inner join operation_group og`). + JoinOn("og.package_id = ?", packageId). + JoinOn("og.version = ?", version). + JoinOn("og.revision = ?", revision). + JoinOn("og.api_type = operation.type"). + JoinOn("og.group_name = ?", searchReq.Group). + Join("inner join grouped_operation go"). + JoinOn("go.group_id = og.group_id"). + JoinOn("go.package_id = operation.package_id"). + JoinOn("go.version = operation.version"). + JoinOn("go.revision = operation.revision"). + JoinOn("go.operation_id = operation.operation_id") + } + + query.Where("operation.type = ?", operationType) + + if searchReq.IncludeData { + query.Join("LEFT JOIN operation_data as op_data"). + JoinOn("operation.data_hash = op_data.data_hash"). + ColumnExpr("op_data.data") + } + query.Order("operation.package_id", + "operation.version", + "operation.revision", + "operation_id ASC"). + Offset(searchReq.Limit * searchReq.Page). + Limit(searchReq.Limit) + + if searchReq.CustomTagKey != "" && searchReq.CustomTagValue != "" { + query.Where("exists(select 1 from jsonb_each_text(operation.custom_tags) where key = ? and value = ?)", searchReq.CustomTagKey, searchReq.CustomTagValue) + } else if searchReq.TextFilter != "" { + searchReq.TextFilter = "%" + utils.LikeEscaped(searchReq.TextFilter) + "%" + query.WhereGroup(func(q *pg.Query) (*pg.Query, error) { + q = q.WhereOr("operation.title ilike ?", searchReq.TextFilter). + WhereOr("operation.metadata->>? ilike ?", "path", searchReq.TextFilter). + WhereOr("operation.metadata->>? ilike ?", "method", searchReq.TextFilter) + return q, nil + }) + } + + if searchReq.Kind != "" { + query.Where("kind = ?", searchReq.Kind) + } + if searchReq.ApiAudience != "" { + query.Where("api_audience = ?", searchReq.ApiAudience) + } + + if searchReq.Tag != "" { + searchReq.Tag = utils.LikeEscaped(searchReq.Tag) + query.Where(`exists( + select 1 from jsonb_array_elements(operation.metadata -> 'tags') a + where replace(a.value::text,'"','') like ?)`, searchReq.Tag) + } + + if searchReq.EmptyTag { + query.Where(`not exists(select 1 from jsonb_array_elements(operation.metadata -> 'tags') a + where a.value != '""') `) + } + + if searchReq.Deprecated != nil { + query.Where("operation.deprecated = ?", *searchReq.Deprecated) + } + + if len(searchReq.Ids) > 0 { + query.Where("operation.operation_id in (?)", pg.In(searchReq.Ids)) + } + + if len(searchReq.HashList) > 0 { + query.Where("operation.data_hash in (?)", pg.In(searchReq.HashList)) + } + if searchReq.DocumentSlug != "" { + query.Join("inner join published_version_revision_content as pvrc"). + JoinOn("operation.operation_id = any(pvrc.operation_ids)"). + JoinOn("pvrc.slug = ?", searchReq.DocumentSlug). + JoinOn("operation.package_id = pvrc.package_id"). + JoinOn("operation.version = pvrc.version"). + JoinOn("operation.revision = pvrc.revision") + } + err := query.Select() + if err != nil { + return nil, err + } + return result, nil +} + +func (o operationRepositoryImpl) GetDeprecatedOperations(packageId string, version string, revision int, operationType string, searchReq view.DeprecatedOperationListReq) ([]entity.OperationRichEntity, error) { + var result []entity.OperationRichEntity + query := o.cp.GetConnection().Model(&result). + ColumnExpr("operation.*") + + query.Join(`inner join + (with refs as( + select s.reference_id as package_id, s.reference_version as version, s.reference_revision as revision + from published_version_reference s + inner join published_version pv + on pv.package_id = s.reference_id + and pv.version = s.reference_version + and pv.revision = s.reference_revision + and pv.deleted_at is null + where s.package_id = ? + and s.version = ? + and s.revision = ? + and s.excluded = false + ) + select package_id, version, revision + from refs + union + select ? as package_id, ? as version, ? as revision + ) refs`, packageId, version, revision, packageId, version, revision) + query.JoinOn("operation.package_id = refs.package_id"). + JoinOn("operation.version = refs.version"). + JoinOn("operation.revision = refs.revision") + + if searchReq.RefPackageId != "" { + query.JoinOn("refs.package_id = ?", searchReq.RefPackageId) + } + + query.Where("operation.type = ?", operationType) + + query.Where(`((operation.deprecated_items is not null and jsonb_typeof(operation.deprecated_items) = 'array' and jsonb_array_length(operation.deprecated_items) != 0) + or operation.deprecated = true)`) + + query.Order("operation.package_id", + "operation.version", + "operation.revision", + "operation_id ASC"). + Offset(searchReq.Limit * searchReq.Page). + Limit(searchReq.Limit) + + if searchReq.TextFilter != "" { + searchReq.TextFilter = "%" + utils.LikeEscaped(searchReq.TextFilter) + "%" + query.WhereGroup(func(q *pg.Query) (*pg.Query, error) { + q = q.WhereOr("operation.title ilike ?", searchReq.TextFilter). + WhereOr("operation.metadata->>? ilike ?", "path", searchReq.TextFilter). + WhereOr("operation.metadata->>? ilike ?", "method", searchReq.TextFilter) + return q, nil + }) + } + if searchReq.Kind != "" { + query.Where("kind = ?", searchReq.Kind) + } + if searchReq.ApiAudience != "" { + query.Where("api_audience = ?", searchReq.ApiAudience) + } + + if len(searchReq.Tags) != 0 { + query.Where(`exists( + select 1 from jsonb_array_elements(operation.metadata -> 'tags') a + where replace(a.value::text,'"','') = any(?))`, pg.Array(searchReq.Tags)) + } + if searchReq.EmptyTag { + query.Where(`not exists(select 1 from jsonb_array_elements(operation.metadata -> 'tags') a + where a.value != '""') `) + } + if searchReq.EmptyGroup { + //todo try to replace this 'not in' condition with join + query.Where(`operation.operation_id not in ( + select operation_id from grouped_operation go + inner join operation_group og + on go.group_id = og.group_id + and og.package_id = ? + and og.version = ? + and og.revision = ? + and og.api_type = operation.type + where go.package_id = operation.package_id + and go.version = operation.version + and go.revision = operation.revision + )`, packageId, version, revision) + } else if searchReq.Group != "" { + query.Join(`inner join operation_group og`). + JoinOn("og.package_id = ?", packageId). + JoinOn("og.version = ?", version). + JoinOn("og.revision = ?", revision). + JoinOn("og.api_type = operation.type"). + JoinOn("og.group_name = ?", searchReq.Group). + Join("inner join grouped_operation go"). + JoinOn("go.group_id = og.group_id"). + JoinOn("go.package_id = operation.package_id"). + JoinOn("go.version = operation.version"). + JoinOn("go.revision = operation.revision"). + JoinOn("go.operation_id = operation.operation_id") + } + + if len(searchReq.Ids) > 0 { + query.Where("operation.operation_id in (?)", pg.In(searchReq.Ids)) + } + + if searchReq.DocumentSlug != "" { + query.Join("inner join published_version_revision_content as pvrc"). + JoinOn("operation.operation_id = any(pvrc.operation_ids)"). + JoinOn("pvrc.slug = ?", searchReq.DocumentSlug). + JoinOn("operation.package_id = pvrc.package_id"). + JoinOn("operation.version = pvrc.version"). + JoinOn("operation.revision = pvrc.revision") + } + + err := query.Select() + if err != nil { + return nil, err + } + return result, nil +} + +func (o operationRepositoryImpl) GetOperationsTags(searchQuery entity.OperationTagsSearchQueryEntity, skipRefs bool) ([]string, error) { + type Tag struct { + Tag string `pg:"tag"` + } + var tags []Tag + + var query string + if !skipRefs { + query = ` + with ops as ( + select operation.* from operation + inner join + (with refs as( + select s.reference_id as package_id, s.reference_version as version, s.reference_revision as revision + from published_version_reference s + inner join published_version pv + on pv.package_id = s.reference_id + and pv.version = s.reference_version + and pv.revision = s.reference_revision + and pv.deleted_at is null + where s.package_id = ?package_id + and s.version = ?version + and s.revision = ?revision + and s.excluded = false + ) + select package_id, version, revision + from refs + union + select ?package_id as package_id, ?version as version, ?revision as revision + ) refs + on operation.package_id = refs.package_id + and operation.version = refs.version + and operation.revision = refs.revision + where operation.type = ?type + and (?kind = '' or operation.kind = ?kind) + and (?api_audience = '' or operation.api_audience = ?api_audience) + ) + select tag from + ( + (select '' as tag + from ops o where + ?text_filter = '' + and not exists(select 1 from jsonb_array_elements(o.metadata -> 'tags') a where a.value != '""') + limit 1) + union + select distinct replace(a.value::text,'"','') as tag + from ops o, jsonb_array_elements(o.metadata -> 'tags') a + where (?text_filter = '' or replace(a.value::text,'"','') ilike ?text_filter) + ) t + order by tag asc + limit ?limit + offset ?offset;` + } else { + query = `select tag + from + ( + (select '' as tag + from operation o + where o.package_id = ?package_id + and o.version = ?version + and o.revision = ?revision + and o.type = ?type + and (?kind = '' or o.kind = ?kind) + and (?api_audience = '' or o.api_audience = ?api_audience) + and ?text_filter = '' + and not exists(select 1 from jsonb_array_elements(o.metadata -> 'tags') a where a.value != '""') + limit 1) + union + select distinct replace(a.value::text,'"','') as tag + from operation o, + jsonb_array_elements(o.metadata -> 'tags') a + where o.package_id = ?package_id + and o.version = ?version + and o.revision = ?revision + and o.type = ?type + and (?kind = '' or o.kind = ?kind) + and (?api_audience = '' or o.api_audience = ?api_audience) + and (?text_filter = '' or replace(a.value::text,'"','') ilike ?text_filter) + ) t + order by tag asc + limit ?limit + offset ?offset;` + } + + if searchQuery.TextFilter != "" { + searchQuery.TextFilter = "%" + utils.LikeEscaped(searchQuery.TextFilter) + "%" + } + + _, err := o.cp.GetConnection().Model(&searchQuery).Query(&tags, query) + if err != nil { + return nil, err + } + + result := make([]string, 0) + + for _, t := range tags { + result = append(result, t.Tag) + } + return result, nil +} + +func (o operationRepositoryImpl) GetAllOperations(packageId string, version string, revision int) ([]entity.OperationEntity, error) { + var result []entity.OperationEntity + err := o.cp.GetConnection().Model(&result). + Where("package_id = ?", packageId). + Where("version = ?", version). + Where("revision = ?", revision). + Select() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (o operationRepositoryImpl) GetOperationChanges(comparisonId string, operationId string, severities []string) (*entity.OperationComparisonEntity, error) { + result := new(entity.OperationComparisonEntity) + err := o.cp.GetConnection().Model(result). + Where("comparison_id = ?", comparisonId). + Where("operation_id = ?", operationId). + OrderExpr("data_hash, previous_data_hash"). + Limit(1). + Select() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (o operationRepositoryImpl) GetChangelog_deprecated(searchQuery entity.ChangelogSearchQueryEntity) ([]entity.OperationComparisonChangelogEntity_deprecated, error) { + var result []entity.OperationComparisonChangelogEntity_deprecated + + comparisonsQuery := o.cp.GetConnection().Model(&entity.OperationComparisonChangelogEntity_deprecated{}). + TableExpr("operation_comparison"). + ColumnExpr("case when data_hash is null then previous_package_id else package_id end operation_package_id"). + ColumnExpr("case when data_hash is null then previous_version else version end operation_version"). + ColumnExpr("case when data_hash is null then previous_revision else revision end operation_revision"). + ColumnExpr("operation_comparison.*"). + Where(`comparison_id in ( + select unnest(array_append(refs, ?)) id from version_comparison where (comparison_id = ?) + )`, searchQuery.ComparisonId, searchQuery.ComparisonId). + Where(`(? = '' or package_id = ? or previous_package_id = ?)`, searchQuery.RefPackageId, searchQuery.RefPackageId, searchQuery.RefPackageId) + + query := o.cp.GetConnection().Model(&result).With("comparisons", comparisonsQuery). + TableExpr("comparisons"). + ColumnExpr("operation_comparison.*"). + ColumnExpr("o.metadata"). + ColumnExpr("o.title"). + ColumnExpr("o.type"). + ColumnExpr("o.kind") + + query.Join("inner join operation o"). + JoinOn("o.package_id = operation_comparison.operation_package_id"). + JoinOn("o.version = operation_comparison.operation_version"). + JoinOn("o.revision = operation_comparison.operation_revision"). + JoinOn("o.operation_id = operation_comparison.operation_id") + if searchQuery.TextFilter != "" { + searchQuery.TextFilter = "%" + utils.LikeEscaped(searchQuery.TextFilter) + "%" + query.JoinOn("o.title ilike ? or o.metadata->>? ilike ? or o.metadata->>? ilike ?", searchQuery.TextFilter, "path", searchQuery.TextFilter, "method", searchQuery.TextFilter) + } + if searchQuery.ApiType != "" { + query.JoinOn("o.type = ?", searchQuery.ApiType) + } + if searchQuery.ApiKind != "" { + query.JoinOn("o.kind = ?", searchQuery.ApiKind) + } + if searchQuery.ApiAudience != "" { + query.JoinOn("o.api_audience = ?", searchQuery.ApiAudience) + } + if len(searchQuery.Tags) != 0 { + query.JoinOn(`exists( + select 1 from jsonb_array_elements(o.metadata -> 'tags') a + where replace(a.value::text,'"','') = any(?))`, pg.Array(searchQuery.Tags)) + } + if searchQuery.EmptyTag { + query.JoinOn(`not exists(select 1 from jsonb_array_elements(o.metadata -> 'tags') a + where a.value != '""') `) + } + + if searchQuery.EmptyGroup { + //this filter also excludes all deleted operations + query.Where(`operation_comparison.data_hash is not null and o.operation_id not in ( + select operation_id from grouped_operation go + inner join operation_group og + on go.group_id = og.group_id + and og.package_id = ? + and og.version = ? + and og.revision = ? + and og.api_type = o.type + where go.package_id = o.package_id + and go.version = o.version + and go.revision = o.revision)`, + searchQuery.GroupPackageId, searchQuery.GroupVersion, searchQuery.GroupRevision) + } else if searchQuery.Group != "" { + //this filter also excludes all deleted operations + query.Where(`operation_comparison.data_hash is not null and o.operation_id in ( + select operation_id from grouped_operation go + inner join operation_group og + on go.group_id = og.group_id + and og.package_id = ? + and og.version = ? + and og.revision = ? + and og.group_name = ? + and og.api_type = o.type + where go.package_id = o.package_id + and go.version = o.version + and go.revision = o.revision)`, + searchQuery.GroupPackageId, searchQuery.GroupVersion, searchQuery.GroupRevision, searchQuery.Group) + } + + if len(searchQuery.Severities) > 0 { + query.WhereGroup(func(query *orm.Query) (*orm.Query, error) { + for _, severity := range searchQuery.Severities { + query.WhereOr("(changes_summary->?)::int>0", severity) + } + return query, nil + }) + } + + if searchQuery.DocumentSlug != "" { + query.Join("inner join published_version_revision_content as pvrc"). + JoinOn("o.operation_id = any(pvrc.operation_ids)"). + JoinOn("pvrc.slug = ?", searchQuery.DocumentSlug). + JoinOn("o.package_id = pvrc.package_id"). + JoinOn("o.version = pvrc.version"). + JoinOn("o.revision = pvrc.revision") + } + + query.OrderExpr(`(operation_comparison.changes_summary -> 'breaking')::int > 0 DESC, +((operation_comparison.changes_summary -> 'deprecated')::int > 0 and +(operation_comparison.changes_summary -> 'breaking')::int = 0) DESC`, + ) + query.Order("o.package_id", + "o.version", + "o.revision", + "o.operation_id", + "o.data_hash ASC") + + if searchQuery.Limit > 0 { + query.Limit(searchQuery.Limit) + } + if searchQuery.Offset > 0 { + query.Offset(searchQuery.Offset) + } + err := query.Select() + if err != nil { + return nil, err + } + return result, nil +} + +func (o operationRepositoryImpl) GetChangelog(searchQuery entity.ChangelogSearchQueryEntity) ([]entity.OperationComparisonChangelogEntity, error) { + var result []entity.OperationComparisonChangelogEntity + + comparisonsQuery := o.cp.GetConnection().Model(&entity.OperationComparisonChangelogEntity{}). + TableExpr("operation_comparison"). + ColumnExpr("case when data_hash is null then previous_package_id else package_id end operation_package_id"). + ColumnExpr("case when data_hash is null then previous_version else version end operation_version"). + ColumnExpr("case when data_hash is null then previous_revision else revision end operation_revision"). + ColumnExpr("operation_comparison.*"). + Where(`comparison_id in ( + select unnest(array_append(refs, ?)) id from version_comparison where (comparison_id = ?) + )`, searchQuery.ComparisonId, searchQuery.ComparisonId). + Where(`(? = '' or package_id = ? or previous_package_id = ?)`, searchQuery.RefPackageId, searchQuery.RefPackageId, searchQuery.RefPackageId) + + query := o.cp.GetConnection().Model(&result).With("comparisons", comparisonsQuery). + TableExpr("comparisons"). + ColumnExpr("operation_comparison.*"). + ColumnExpr("o.metadata"). + ColumnExpr("curr_op.title title"). + ColumnExpr("prev_op.title previous_title"). + ColumnExpr("o.type"). + ColumnExpr("curr_op.kind kind"). + ColumnExpr("prev_op.kind previous_kind"). + ColumnExpr("curr_op.api_audience api_audience"). + ColumnExpr("prev_op.api_audience previous_api_audience") + + query.Join("left join operation curr_op"). + JoinOn("curr_op.package_id = operation_comparison.package_id"). + JoinOn("curr_op.version = operation_comparison.version"). + JoinOn("curr_op.revision = operation_comparison.revision"). + JoinOn("curr_op.operation_id = operation_comparison.operation_id") + query.Join("left join operation prev_op"). + JoinOn("prev_op.package_id = operation_comparison.previous_package_id"). + JoinOn("prev_op.version = operation_comparison.previous_version"). + JoinOn("prev_op.revision = operation_comparison.previous_revision"). + JoinOn("prev_op.operation_id = operation_comparison.operation_id") + query.Join("inner join operation o"). + JoinOn("o.package_id = operation_comparison.operation_package_id"). + JoinOn("o.version = operation_comparison.operation_version"). + JoinOn("o.revision = operation_comparison.operation_revision"). + JoinOn("o.operation_id = operation_comparison.operation_id") + if searchQuery.TextFilter != "" { + searchQuery.TextFilter = "%" + utils.LikeEscaped(searchQuery.TextFilter) + "%" + query.JoinOn("o.title ilike ? or o.metadata->>? ilike ? or o.metadata->>? ilike ?", searchQuery.TextFilter, "path", searchQuery.TextFilter, "method", searchQuery.TextFilter) + } + if searchQuery.ApiType != "" { + query.JoinOn("o.type = ?", searchQuery.ApiType) + } + if searchQuery.ApiKind != "" { + query.JoinOn("o.kind = ?", searchQuery.ApiKind) + } + if searchQuery.ApiAudience != "" { + query.JoinOn("o.api_audience = ?", searchQuery.ApiAudience) + } + if len(searchQuery.Tags) != 0 { + query.JoinOn(`exists( + select 1 from jsonb_array_elements(o.metadata -> 'tags') a + where replace(a.value::text,'"','') = any(?))`, pg.Array(searchQuery.Tags)) + } + if searchQuery.EmptyTag { + query.JoinOn(`not exists(select 1 from jsonb_array_elements(o.metadata -> 'tags') a + where a.value != '""') `) + } + + if searchQuery.EmptyGroup { + //this filter also excludes all deleted operations + query.Where(`operation_comparison.data_hash is not null and o.operation_id not in ( + select operation_id from grouped_operation go + inner join operation_group og + on go.group_id = og.group_id + and og.package_id = ? + and og.version = ? + and og.revision = ? + and og.api_type = o.type + where go.package_id = o.package_id + and go.version = o.version + and go.revision = o.revision)`, + searchQuery.GroupPackageId, searchQuery.GroupVersion, searchQuery.GroupRevision) + } else if searchQuery.Group != "" { + //this filter also excludes all deleted operations + query.Where(`operation_comparison.data_hash is not null and o.operation_id in ( + select operation_id from grouped_operation go + inner join operation_group og + on go.group_id = og.group_id + and og.package_id = ? + and og.version = ? + and og.revision = ? + and og.group_name = ? + and og.api_type = o.type + where go.package_id = o.package_id + and go.version = o.version + and go.revision = o.revision)`, + searchQuery.GroupPackageId, searchQuery.GroupVersion, searchQuery.GroupRevision, searchQuery.Group) + } + + if len(searchQuery.Severities) > 0 { + query.WhereGroup(func(query *orm.Query) (*orm.Query, error) { + for _, severity := range searchQuery.Severities { + query.WhereOr("(changes_summary->?)::int>0", severity) + } + return query, nil + }) + } + + if searchQuery.DocumentSlug != "" { + query.Join("inner join published_version_revision_content as pvrc"). + JoinOn("o.operation_id = any(pvrc.operation_ids)"). + JoinOn("pvrc.slug = ?", searchQuery.DocumentSlug). + JoinOn("o.package_id = pvrc.package_id"). + JoinOn("o.version = pvrc.version"). + JoinOn("o.revision = pvrc.revision") + } + + query.OrderExpr(`(operation_comparison.changes_summary -> 'breaking')::int > 0 DESC, +((operation_comparison.changes_summary -> 'deprecated')::int > 0 and +(operation_comparison.changes_summary -> 'breaking')::int = 0) DESC`, + ) + query.Order("o.package_id", + "o.version", + "o.revision", + "o.operation_id", + "o.data_hash ASC") + + if searchQuery.Limit > 0 { + query.Limit(searchQuery.Limit) + } + if searchQuery.Offset > 0 { + query.Offset(searchQuery.Offset) + } + err := query.Select() + if err != nil { + return nil, err + } + return result, nil +} + +// deprecated +func (o operationRepositoryImpl) SearchForOperations_deprecated(searchQuery *entity.OperationSearchQuery) ([]entity.OperationSearchResult_deprecated, error) { + _, err := o.cp.GetConnection().Exec("select to_tsquery(?)", searchQuery.SearchString) + if err != nil { + return nil, fmt.Errorf("invalid search string: %v", err.Error()) + } + searchQuery.TextFilter = "%" + utils.LikeEscaped(searchQuery.TextFilter) + "%" + var result []entity.OperationSearchResult_deprecated + operationsSearchQuery := ` + with maxrev as + ( + select package_id, version, pg.name as package_name, max(revision) as revision + from published_version pv + inner join package_group pg + on pg.id = pv.package_id + and pg.exclude_from_search = false + --where (?packages = '{}' or package_id = ANY(?packages)) + /* + for now packages list serves as a list of parents and packages, + after adding new parents list need to uncomment line above and change condition below to use parents list + */ + where (?packages = '{}' or package_id like ANY( + select id from unnest(?packages::text[]) id + union + select id||'.%' from unnest(?packages::text[]) id)) + and (?versions = '{}' or version = ANY(?versions)) + group by package_id, version, pg.name + ), + versions as + ( + select pv.package_id, pv.version, pv.revision, pv.published_at, pv.status, maxrev.package_name + from published_version pv + inner join maxrev + on pv.package_id = maxrev.package_id + and pv.version = maxrev.version + and pv.revision = maxrev.revision + where pv.deleted_at is null + and (?statuses = '{}' or pv.status = ANY(?statuses)) + and pv.published_at >= ?start_date + and pv.published_at <= ?end_date + ), + operations as + ( + select o.*, v.status version_status, v.package_name, v.published_at version_published_at + from operation o + inner join versions v + on v.package_id = o.package_id + and v.version = o.version + and v.revision = o.revision + and (?api_type = '' or o.type = ?api_type) + and (?methods = '{}' or o.metadata->>'method' = ANY(?methods)) + ) + select + o.package_id, + o.package_name name, + o.version, + o.revision, + o.version_status, + o.operation_id, + o.title, + o.deprecated, + o.type as api_type, + o.metadata, + parent_package_names(o.package_id) parent_names, + case + when init_rank > 0 then init_rank + version_status_tf + operation_open_count + else 0 + end rank, + + --debug + coalesce(?scope_weight) scope_weight, + coalesce(?open_count_weight) open_count_weight, + scope_tf, + title_tf, + version_status_tf, + operation_open_count + from operations o + left join ( + select ts.data_hash, max(rank) as rank from ( + with filtered as (select data_hash from operations) + select + ts.data_hash, + case when scope_rank = 0 then detailed_scope_rank + when detailed_scope_rank = 0 then scope_rank + else scope_rank * detailed_scope_rank end rank + from + ts_rest_operation_data ts, + filtered f, + to_tsquery(?search_filter) search_query, + --using coalesce to skip ts_rank evaluation for scopes that are not requested + coalesce(case when ?filter_response then null else 0 end, ts_rank(scope_response, search_query)) resp_rank, + coalesce(case when ?filter_request then null else 0 end, ts_rank(scope_request, search_query)) req_rank, + coalesce(case when ?filter_examples then null else 0 end, ts_rank(scope_examples, search_query)) example_rank, + coalesce(case when ?filter_annotation then null else 0 end, ts_rank(scope_annotation, search_query)) annotation_rank, + coalesce(case when ?filter_properties then null else 0 end, ts_rank(scope_properties, search_query)) properties_rank, + coalesce(resp_rank + req_rank) scope_rank, + coalesce(example_rank + annotation_rank + properties_rank) detailed_scope_rank + where ts.data_hash = f.data_hash + and + ( + ( + (?filter_request = false and ?filter_response = false) or + (?filter_request and search_query @@ scope_request) or + (?filter_response and search_query @@ scope_response) + ) + and + ( + (?filter_annotation = false and ?filter_examples = false and ?filter_properties = false) or + (?filter_annotation and search_query @@ scope_annotation) or + (?filter_examples and search_query @@ scope_examples) or + (?filter_properties and search_query @@ scope_properties) + ) + ) + ) ts + group by ts.data_hash + order by max(rank) desc + limit ?limit + offset ?offset + ) rest_ts + on rest_ts.data_hash = o.data_hash + and o.type = ?rest_api_type + and ?filter_all = false + left join ( + select ts.data_hash, max(rank) as rank from ( + with filtered as (select data_hash from operations) + select + ts.data_hash, + scope_rank rank + from + ts_graphql_operation_data ts, + filtered f, + to_tsquery(?search_filter) search_query, + --using coalesce to skip ts_rank evaluation for scopes that are not requested + coalesce(case when ?filter_annotation then null else 0 end, ts_rank(scope_annotation, search_query)) annotation_rank, + coalesce(case when ?filter_property then null else 0 end, ts_rank(scope_property, search_query)) property_rank, + coalesce(case when ?filter_argument then null else 0 end, ts_rank(scope_argument, search_query)) argument_rank, + coalesce(annotation_rank + property_rank + argument_rank) scope_rank + where ts.data_hash = f.data_hash + and + ( + (?filter_annotation = false and ?filter_property = false and ?filter_argument = false) or + (?filter_annotation and search_query @@ scope_annotation) or + (?filter_property and search_query @@ scope_property) or + (?filter_argument and search_query @@ scope_argument) + ) + ) ts + group by ts.data_hash + order by max(rank) desc + limit ?limit + offset ?offset + ) graphql_ts + on graphql_ts.data_hash = o.data_hash + and o.type = ?graphql_api_type + and ?filter_all = false + left join ( + select ts.data_hash, max(rank) as rank from ( + with filtered as (select data_hash from operations) + select + ts.data_hash, + scope_rank rank + from + ts_operation_data ts, + filtered f, + to_tsquery(?search_filter) search_query, + --using coalesce to skip ts_rank evaluation for scopes that are not requested + coalesce(case when ?filter_all then null else 0 end, ts_rank(scope_all, search_query)) scope_rank + where ts.data_hash = f.data_hash + and search_query @@ scope_all + ) ts + group by ts.data_hash + order by max(rank) desc + limit ?limit + offset ?offset + ) all_ts + on all_ts.data_hash = o.data_hash + and ?filter_all = true + left join operation_open_count oc + on oc.package_id = o.package_id + and oc.version = o.version + and oc.operation_id = o.operation_id, + coalesce(?title_weight * (o.title ilike ?text_filter)::int, 0) title_tf, + coalesce(?scope_weight * (coalesce(rest_ts.rank, 0) + coalesce(graphql_ts.rank, 0) + coalesce(all_ts.rank, 0)), 0) scope_tf, + coalesce(title_tf + scope_tf, 0) init_rank, + coalesce( + ?version_status_release_weight * (o.version_status = ?version_status_release)::int + + ?version_status_draft_weight * (o.version_status = ?version_status_draft)::int + + ?version_status_archived_weight * (o.version_status = ?version_status_archived)::int) version_status_tf, + coalesce(?open_count_weight * coalesce(oc.open_count), 0) operation_open_count + where init_rank > 0 + order by rank desc, o.version_published_at desc, o.operation_id + limit ?limit; + ` + _, err = o.cp.GetConnection().Model(searchQuery).Query(&result, operationsSearchQuery) + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + + return result, nil +} + +func (o operationRepositoryImpl) SearchForOperations(searchQuery *entity.OperationSearchQuery) ([]entity.OperationSearchResult, error) { + _, err := o.cp.GetConnection().Exec("select to_tsquery(?)", searchQuery.SearchString) + if err != nil { + return nil, fmt.Errorf("invalid search string: %v", err.Error()) + } + searchQuery.TextFilter = "%" + utils.LikeEscaped(searchQuery.TextFilter) + "%" + var result []entity.OperationSearchResult + operationsSearchQuery := ` + with maxrev as + ( + select package_id, version, pg.name as package_name, max(revision) as revision + from published_version pv + inner join package_group pg + on pg.id = pv.package_id + and pg.exclude_from_search = false + --where (?packages = '{}' or package_id = ANY(?packages)) + /* + for now packages list serves as a list of parents and packages, + after adding new parents list need to uncomment line above and change condition below to use parents list + */ + where (?packages = '{}' or package_id like ANY( + select id from unnest(?packages::text[]) id + union + select id||'.%' from unnest(?packages::text[]) id)) + and (?versions = '{}' or version = ANY(?versions)) + group by package_id, version, pg.name + ), + versions as + ( + select pv.package_id, pv.version, pv.revision, pv.published_at, pv.status, maxrev.package_name + from published_version pv + inner join maxrev + on pv.package_id = maxrev.package_id + and pv.version = maxrev.version + and pv.revision = maxrev.revision + where pv.deleted_at is null + and (?statuses = '{}' or pv.status = ANY(?statuses)) + and pv.published_at >= ?start_date + and pv.published_at <= ?end_date + ), + operations as + ( + select o.*, v.status version_status, v.package_name, v.published_at version_published_at + from operation o + inner join versions v + on v.package_id = o.package_id + and v.version = o.version + and v.revision = o.revision + and (?api_type = '' or o.type = ?api_type) + and (?methods = '{}' or o.metadata->>'method' = ANY(?methods)) + and (?operation_types = '{}' or o.metadata->>'type' = ANY(?operation_types)) + ) + select + o.package_id, + o.package_name name, + o.version, + o.revision, + o.version_status status, + o.operation_id, + o.title, + o.data_hash, + o.deprecated, + o.kind, + o.type, + o.metadata, + parent_package_names(o.package_id) parent_names, + case + when init_rank > 0 then init_rank + version_status_tf + operation_open_count + else 0 + end rank, + + --debug + coalesce(?scope_weight) scope_weight, + coalesce(?open_count_weight) open_count_weight, + scope_tf, + title_tf, + version_status_tf, + operation_open_count + from operations o + left join ( + select ts.data_hash, max(rank) as rank from ( + with filtered as (select data_hash from operations) + select + ts.data_hash, + case when scope_rank = 0 then detailed_scope_rank + when detailed_scope_rank = 0 then scope_rank + else scope_rank * detailed_scope_rank end rank + from + ts_rest_operation_data ts, + filtered f, + to_tsquery(?search_filter) search_query, + --using coalesce to skip ts_rank evaluation for scopes that are not requested + coalesce(case when ?filter_response then null else 0 end, ts_rank(scope_response, search_query)) resp_rank, + coalesce(case when ?filter_request then null else 0 end, ts_rank(scope_request, search_query)) req_rank, + coalesce(case when ?filter_examples then null else 0 end, ts_rank(scope_examples, search_query)) example_rank, + coalesce(case when ?filter_annotation then null else 0 end, ts_rank(scope_annotation, search_query)) annotation_rank, + coalesce(case when ?filter_properties then null else 0 end, ts_rank(scope_properties, search_query)) properties_rank, + coalesce(resp_rank + req_rank) scope_rank, + coalesce(example_rank + annotation_rank + properties_rank) detailed_scope_rank + where ts.data_hash = f.data_hash + and + ( + ( + (?filter_request = false and ?filter_response = false) or + (?filter_request and search_query @@ scope_request) or + (?filter_response and search_query @@ scope_response) + ) + and + ( + (?filter_annotation = false and ?filter_examples = false and ?filter_properties = false) or + (?filter_annotation and search_query @@ scope_annotation) or + (?filter_examples and search_query @@ scope_examples) or + (?filter_properties and search_query @@ scope_properties) + ) + ) + ) ts + group by ts.data_hash + order by max(rank) desc + limit ?limit + offset ?offset + ) rest_ts + on rest_ts.data_hash = o.data_hash + and o.type = ?rest_api_type + and ?filter_all = false + left join ( + select ts.data_hash, max(rank) as rank from ( + with filtered as (select data_hash from operations) + select + ts.data_hash, + scope_rank rank + from + ts_graphql_operation_data ts, + filtered f, + to_tsquery(?search_filter) search_query, + --using coalesce to skip ts_rank evaluation for scopes that are not requested + coalesce(case when ?filter_annotation then null else 0 end, ts_rank(scope_annotation, search_query)) annotation_rank, + coalesce(case when ?filter_property then null else 0 end, ts_rank(scope_property, search_query)) property_rank, + coalesce(case when ?filter_argument then null else 0 end, ts_rank(scope_argument, search_query)) argument_rank, + coalesce(annotation_rank + property_rank + argument_rank) scope_rank + where ts.data_hash = f.data_hash + and + ( + (?filter_annotation = false and ?filter_property = false and ?filter_argument = false) or + (?filter_annotation and search_query @@ scope_annotation) or + (?filter_property and search_query @@ scope_property) or + (?filter_argument and search_query @@ scope_argument) + ) + ) ts + group by ts.data_hash + order by max(rank) desc + limit ?limit + offset ?offset + ) graphql_ts + on graphql_ts.data_hash = o.data_hash + and o.type = ?graphql_api_type + and ?filter_all = false + left join ( + select ts.data_hash, max(rank) as rank from ( + with filtered as (select data_hash from operations) + select + ts.data_hash, + scope_rank rank + from + ts_operation_data ts, + filtered f, + to_tsquery(?search_filter) search_query, + --using coalesce to skip ts_rank evaluation for scopes that are not requested + coalesce(case when ?filter_all then null else 0 end, ts_rank(scope_all, search_query)) scope_rank + where ts.data_hash = f.data_hash + and search_query @@ scope_all + ) ts + group by ts.data_hash + order by max(rank) desc + limit ?limit + offset ?offset + ) all_ts + on all_ts.data_hash = o.data_hash + and ?filter_all = true + left join operation_open_count oc + on oc.package_id = o.package_id + and oc.version = o.version + and oc.operation_id = o.operation_id, + coalesce(?title_weight * (o.title ilike ?text_filter)::int, 0) title_tf, + coalesce(?scope_weight * (coalesce(rest_ts.rank, 0) + coalesce(graphql_ts.rank, 0) + coalesce(all_ts.rank, 0)), 0) scope_tf, + coalesce(title_tf + scope_tf, 0) init_rank, + coalesce( + ?version_status_release_weight * (o.version_status = ?version_status_release)::int + + ?version_status_draft_weight * (o.version_status = ?version_status_draft)::int + + ?version_status_archived_weight * (o.version_status = ?version_status_archived)::int) version_status_tf, + coalesce(?open_count_weight * coalesce(oc.open_count), 0) operation_open_count + where init_rank > 0 + order by rank desc, o.version_published_at desc, o.operation_id + limit ?limit; + ` + _, err = o.cp.GetConnection().Model(searchQuery).Query(&result, operationsSearchQuery) + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + + return result, nil +} + +func (o operationRepositoryImpl) GetOperationsTypeCount(packageId string, version string, revision int) ([]entity.OperationsTypeCountEntity, error) { + var result []entity.OperationsTypeCountEntity + operationsTypeCountQuery := ` + with versions as( + select s.reference_id as package_id, s.reference_version as version, s.reference_revision as revision + from published_version_reference s + inner join published_version pv + on pv.package_id = s.reference_id + and pv.version = s.reference_version + and pv.revision = s.reference_revision + and pv.deleted_at is null + where s.package_id = ? + and s.version = ? + and s.revision = ? + and s.excluded = false + union + select ? as package_id, ? as version, ? as revision + ), + depr_count as ( + select type, count(operation_id) cnt from operation o, versions v + where deprecated = true + and o.package_id = v.package_id + and o.version = v.version + and o.revision = v.revision + group by type + ), + op_count as ( + select type, count(operation_id) cnt from operation o, versions v + where o.package_id = v.package_id + and o.version = v.version + and o.revision = v.revision + group by type + ), + no_bwc_count as ( + select type, count(operation_id) cnt from operation o, versions v + where o.package_id = v.package_id + and o.version = v.version + and o.revision = v.revision + and o.kind = ? + group by type + ), + audience_count as ( + select type, api_audience, count(operation_id) cnt from operation o, versions v + where o.package_id = v.package_id + and o.version = v.version + and o.revision = v.revision + group by type, api_audience + ) + select oc.type as type, + coalesce(oc.cnt, 0) as operations_count, + coalesce(dc.cnt, 0) as deprecated_count, + coalesce(nbc.cnt, 0) as no_bwc_count, + coalesce(ioc.cnt, 0) as internal_count, + coalesce(uoc.cnt, 0) as unknown_count + from op_count oc + full outer join depr_count dc + on oc.type = dc.type + full outer join no_bwc_count nbc + on oc.type = nbc.type + full outer join audience_count ioc + on oc.type = ioc.type + and ioc.api_audience = ? + full outer join audience_count uoc + on oc.type = uoc.type + and uoc.api_audience = ?; + ` + _, err := o.cp.GetConnection().Query(&result, + operationsTypeCountQuery, + packageId, version, revision, + packageId, version, revision, + view.NoBwcApiKind, + view.ApiAudienceInternal, + view.ApiAudienceUnknown) + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + + return result, nil +} + +func (o operationRepositoryImpl) GetOperationsTypeDataHashes(packageId string, version string, revision int) ([]entity.OperationsTypeDataHashEntity, error) { + var result []entity.OperationsTypeDataHashEntity + operationsTypeOperationHashesQuery := ` + select type, json_object_agg(operation_id, data_hash) operations_hash + from operation + where package_id = ? + and version = ? + and revision = ? + group by type; + ` + _, err := o.cp.GetConnection().Query(&result, + operationsTypeOperationHashesQuery, + packageId, version, revision, + ) + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + + return result, nil +} + +func (o operationRepositoryImpl) GetDeprecatedOperationsSummary(packageId string, version string, revision int) ([]entity.DeprecatedOperationsSummaryEntity, error) { + var result []entity.DeprecatedOperationsSummaryEntity + deprecatedOperationsSummaryQuery := ` + with depr_count as ( + select type, count(operation_id) cnt from operation + where ((operation.deprecated_items is not null and jsonb_typeof(operation.deprecated_items) = 'array' and jsonb_array_length(operation.deprecated_items) != 0) + or operation.deprecated = true) + and package_id = ? and version = ? and revision = ? + group by type + ), + tagss as ( + select type, array_agg(distinct x.value) as tags from operation + cross join lateral jsonb_array_elements_text(metadata->'tags') as x + where package_id = ? and version = ? and revision = ? + and ((operation.deprecated_items is not null and jsonb_typeof(operation.deprecated_items) = 'array' and jsonb_array_length(operation.deprecated_items) != 0) + or operation.deprecated = true) + group by type + ) + + select dc.type as type, + coalesce(dc.cnt, 0) as deprecated_count, + coalesce(tg.tags, '{}') as tags + from depr_count dc + full outer join tagss tg + on dc.type = tg.type; + ` + _, err := o.cp.GetConnection().Query(&result, + deprecatedOperationsSummaryQuery, + packageId, version, revision, + packageId, version, revision) + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + + return result, nil +} +func (o operationRepositoryImpl) GetDeprecatedOperationsRefsSummary(packageId string, version string, revision int) ([]entity.DeprecatedOperationsSummaryEntity, error) { + var result []entity.DeprecatedOperationsSummaryEntity + deprecatedOperationsSummaryQuery := ` + with refss as ( + select operation.type, operation.package_id, operation.version, operation.revision,operation.deprecated,operation.metadata,operation.operation_id, operation.deprecated_items from operation inner join + (with refs as( + select s.reference_id as package_id, s.reference_version as version, s.reference_revision as revision + from published_version_reference s + inner join published_version pv + on pv.package_id = s.reference_id + and pv.version = s.reference_version + and pv.revision = s.reference_revision + and pv.deleted_at is null + where s.package_id = ? + and s.version = ? + and s.revision = ? + and s.excluded = false + ) + select package_id, version, revision + from refs + ) refs on operation.package_id = refs.package_id and operation.version = refs.version and operation.revision = refs.revision + ), + depr_count as ( + select type, count(operation_id) as cnt, package_id, version, revision from refss as r + where ((r.deprecated_items is not null and jsonb_typeof(r.deprecated_items) = 'array' and jsonb_array_length(r.deprecated_items) != 0) + or r.deprecated = true) + group by package_id, version, revision,type + ), + tagss as ( + select type, array_agg(distinct x.value) as tags, package_id, version,revision from refss + cross join lateral jsonb_array_elements_text(metadata->'tags') as x + where ((deprecated_items is not null and jsonb_typeof(deprecated_items) = 'array' and jsonb_array_length(deprecated_items) != 0) + or deprecated = true) + group by package_id, version, revision, type + ) + + select dc.type as type, dc.package_id as package_id, dc.version as version, dc.revision as revision, + coalesce(dc.cnt, 0) as deprecated_count, + coalesce(tg.tags, '{}') as tags + from depr_count dc + full outer join tagss tg + on dc.type = tg.type and dc.package_id = tg.package_id and dc.version = tg.version and dc.revision = tg.revision; + ` + _, err := o.cp.GetConnection().Query(&result, + deprecatedOperationsSummaryQuery, + packageId, version, revision) + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + + return result, nil +} + +func (o operationRepositoryImpl) GetOperationGroup(packageId string, version string, revision int, apiType string, groupName string) (*entity.OperationGroupEntity, error) { + result := new(entity.OperationGroupEntity) + err := o.cp.GetConnection().Model(result). + Where("package_id = ?", packageId). + Where("version = ?", version). + Where("revision = ?", revision). + Where("api_type = ?", apiType). + Where("group_name = ?", groupName). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (o operationRepositoryImpl) GetOperationGroupTemplateFile(packageId string, version string, revision int, apiType string, groupName string) (*entity.OperationGroupTemplateFileEntity, error) { + result := new(entity.OperationGroupTemplateFileEntity) + err := o.cp.GetConnection().Model(result). + ColumnExpr("og.template_filename, operation_group_template.template"). + Join("inner join operation_group og"). + JoinOn("og.package_id = ?", packageId). + JoinOn("og.version = ?", version). + JoinOn("og.revision = ?", revision). + JoinOn("og.api_type = ?", apiType). + JoinOn("og.group_name = ?", groupName). + JoinOn("og.template_checksum = operation_group_template.checksum"). + Select() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (o operationRepositoryImpl) AddOperationGroupHistory(ent *entity.OperationGroupHistoryEntity) error { + _, err := o.cp.GetConnection().Model(ent).Insert() + if err != nil { + return err + } + return nil +} + +func (o operationRepositoryImpl) CreateOperationGroup(ent *entity.OperationGroupEntity, templateEntity *entity.OperationGroupTemplateEntity) error { + ctx := context.Background() + return o.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + _, err := tx.Model(ent).Insert() + if err != nil { + return err + } + return o.saveOperationGroupTemplate(tx, templateEntity) + }) +} + +func (o operationRepositoryImpl) DeleteOperationGroup(ent *entity.OperationGroupEntity) error { + ctx := context.Background() + return o.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + _, err := tx.Model(ent).WherePK().Delete() + if err != nil { + return err + } + return o.cleanupOperationGroupTemplate(tx, ent.TemplateChecksum) + }) +} + +func (o operationRepositoryImpl) ReplaceOperationGroup(oldGroupEntity *entity.OperationGroupEntity, newGroupEntity *entity.OperationGroupEntity, operationEntities []entity.GroupedOperationEntity, newTemplateEntity *entity.OperationGroupTemplateEntity) error { + ctx := context.Background() + return o.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + _, err := tx.Model(oldGroupEntity).WherePK().Delete() + if err != nil { + return fmt.Errorf("failed to delete old group %+v: %w", oldGroupEntity, err) + } + _, err = tx.Model(newGroupEntity).Insert() + if err != nil { + return fmt.Errorf("failed to insert new group %+v: %w", newGroupEntity, err) + } + if len(operationEntities) > 0 { + _, err = tx.Model(&operationEntities).Insert() + if err != nil { + return fmt.Errorf("failed to insert grouped operations %+v: %w", operationEntities, err) + } + } + err = o.saveOperationGroupTemplate(tx, newTemplateEntity) + if err != nil { + return err + } + err = o.cleanupOperationGroupTemplate(tx, oldGroupEntity.TemplateChecksum) + if err != nil { + return err + } + return nil + }) +} + +func (o operationRepositoryImpl) UpdateOperationGroup(oldGroupEntity *entity.OperationGroupEntity, newGroupEntity *entity.OperationGroupEntity, newTemplateEntity *entity.OperationGroupTemplateEntity, newGroupedOperations *[]entity.GroupedOperationEntity) error { + ctx := context.Background() + return o.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + //update to operation_group.group_id also updates grouped_operation.group_id + _, err := tx.Model(newGroupEntity). + Where("group_id = ?", oldGroupEntity.GroupId). + Set("group_name = ?group_name"). + Set("group_id = ?group_id"). + Set("description = ?description"). + Set("template_checksum = ?template_checksum"). + Set("template_filename = ?template_filename"). + Update() + if err != nil { + return err + } + err = o.saveOperationGroupTemplate(tx, newTemplateEntity) + if err != nil { + return err + } + err = o.cleanupOperationGroupTemplate(tx, oldGroupEntity.TemplateChecksum) + if err != nil { + return err + } + if newGroupedOperations == nil { + return nil + } + _, err = tx.Exec(`delete from grouped_operation where group_id = ?`, newGroupEntity.GroupId) + if err != nil { + return err + } + if len(*newGroupedOperations) > 0 { + _, err = tx.Model(newGroupedOperations).Insert() + if err != nil { + return err + } + } + return nil + }) +} + +func (o operationRepositoryImpl) saveOperationGroupTemplate(tx *pg.Tx, ent *entity.OperationGroupTemplateEntity) error { + if ent == nil { + return nil + } + _, err := tx.Model(ent).OnConflict("(checksum) DO NOTHING").Insert() + if err != nil { + return err + } + return nil +} + +func (o operationRepositoryImpl) cleanupOperationGroupTemplate(tx *pg.Tx, templateChecksum string) error { + if templateChecksum == "" { + return nil + } + _, err := tx.Exec(` + delete from operation_group_template t + where t.checksum = ? and not exists (select 1 from operation_group where template_checksum = t.checksum); + `, templateChecksum) + if err != nil { + return err + } + return nil +} + +func (o operationRepositoryImpl) CalculateOperationGroups(packageId string, version string, revision int, groupingPrefix string) ([]string, error) { + if groupingPrefix == "" { + return []string{}, nil + } + type group struct { + Group string `pg:"group_name"` + } + var groups []group + operationGroupsQuery := ` + select distinct coalesce(group_name, '') as group_name from ( + select + case + when type = 'rest' + then case when ? = '' then null else substring(metadata ->> 'path', ?) end + when type = 'graphql' + then case when ? = '' then null else substring(metadata ->> 'method', ?) end + end group_name + from operation + where package_id = ? + and version = ? + and revision = ? + ) groups + ` + _, err := o.cp.GetConnection().Query(&groups, + operationGroupsQuery, + groupingPrefix, groupingPrefix, + groupingPrefix, groupingPrefix, + packageId, + version, + revision) + if err != nil { + return nil, err + } + operationGroups := []string{} + for _, grp := range groups { + operationGroups = append(operationGroups, grp.Group) + } + return operationGroups, nil +} + +func (o operationRepositoryImpl) GetVersionOperationGroups(packageId string, version string, revision int) ([]entity.OperationGroupCountEntity, error) { + var result []entity.OperationGroupCountEntity + operationGroupCountQuery := ` + select og.package_id, og.version, og.revision, og.api_type, og.group_name, og.autogenerated, og.description, + (select count(*) from grouped_operation where group_id = og.group_id) operations_count, + og.template_filename export_template_filename + from operation_group og + where package_id = ? + and version = ? + and revision = ?` + _, err := o.cp.GetConnection().Query(&result, operationGroupCountQuery, packageId, version, revision) + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (o operationRepositoryImpl) GetGroupedOperations(packageId string, version string, revision int, operationType string, groupName string, searchReq view.OperationListReq) ([]entity.OperationRichEntity, error) { + var result []entity.OperationRichEntity + query := o.cp.GetConnection().Model(&result). + ColumnExpr("operation.*") + + query.Join(`inner join + (with refs as( + select s.reference_id as package_id, s.reference_version as version, s.reference_revision as revision + from published_version_reference s + inner join published_version pv + on pv.package_id = s.reference_id + and pv.version = s.reference_version + and pv.revision = s.reference_revision + and pv.deleted_at is null + where s.package_id = ? + and s.version = ? + and s.revision = ? + and s.excluded = false + ) + select package_id, version, revision + from refs + union + select ? as package_id, ? as version, ? as revision + ) refs`, packageId, version, revision, packageId, version, revision) + query.JoinOn("operation.package_id = refs.package_id"). + JoinOn("operation.version = refs.version"). + JoinOn("operation.revision = refs.revision") + + if searchReq.RefPackageId != "" { + query.JoinOn("refs.package_id = ?", searchReq.RefPackageId) + } + if searchReq.OnlyAddable { + //todo try to replace this 'not in' condition with join + query.Where(`operation.operation_id not in ( + select operation_id from grouped_operation go + inner join operation_group og + on go.group_id = og.group_id + and og.package_id = ? + and og.version = ? + and og.revision = ? + and og.api_type = ? + and og.group_name = ? + where go.package_id = operation.package_id + and go.version = operation.version + and go.revision = operation.revision + )`, packageId, version, revision, operationType, groupName) + } else { + query.Join(`inner join operation_group og`). + JoinOn("og.package_id = ?", packageId). + JoinOn("og.version = ?", version). + JoinOn("og.revision = ?", revision). + JoinOn("og.api_type = ?", operationType). + JoinOn("og.group_name = ?", groupName). + Join("inner join grouped_operation go"). + JoinOn("go.group_id = og.group_id"). + JoinOn("go.package_id = operation.package_id"). + JoinOn("go.version = operation.version"). + JoinOn("go.revision = operation.revision"). + JoinOn("go.operation_id = operation.operation_id") + } + + query.Where("operation.type = ?", operationType) + + query.Order("operation.package_id", + "operation.version", + "operation.revision", + "operation_id ASC"). + Offset(searchReq.Limit * searchReq.Page). + Limit(searchReq.Limit) + + if searchReq.TextFilter != "" { + searchReq.TextFilter = "%" + utils.LikeEscaped(searchReq.TextFilter) + "%" + query.WhereGroup(func(q *pg.Query) (*pg.Query, error) { + q = q.WhereOr("operation.title ilike ?", searchReq.TextFilter). + WhereOr("operation.metadata->>? ilike ?", "path", searchReq.TextFilter). + WhereOr("operation.metadata->>? ilike ?", "method", searchReq.TextFilter) + return q, nil + }) + } + + if searchReq.Kind != "" { + query.Where("kind = ?", searchReq.Kind) + } + if searchReq.ApiAudience != "" { + query.Where("api_audience = ?", searchReq.ApiAudience) + } + + if searchReq.Tag != "" { + searchReq.Tag = utils.LikeEscaped(searchReq.Tag) + query.Where(`exists( + select 1 from jsonb_array_elements(operation.metadata -> 'tags') a + where replace(a.value::text,'"','') like ?)`, searchReq.Tag) + } + + if searchReq.EmptyTag { + query.Where(`not exists(select 1 from jsonb_array_elements(operation.metadata -> 'tags') a + where a.value != '""') `) + } + + if searchReq.Deprecated != nil { + query.Where("operation.deprecated = ?", *searchReq.Deprecated) + } + + if searchReq.DocumentSlug != "" { + query.Join("inner join published_version_revision_content as pvrc"). + JoinOn("operation.operation_id = any(pvrc.operation_ids)"). + JoinOn("pvrc.slug = ?", searchReq.DocumentSlug). + JoinOn("operation.package_id = pvrc.package_id"). + JoinOn("operation.version = pvrc.version"). + JoinOn("operation.revision = pvrc.revision") + } + err := query.Select() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (o operationRepositoryImpl) GetOperationsByModelHash(packageId string, version string, revision int, apiType string, modelHash string) ([]entity.OperationModelsEntity, error) { + var result []entity.OperationModelsEntity + operationsByModelHashQuery := ` + with operation_model as( + select o.package_id, o.version, o.revision, o.operation_id, m.key::varchar as key, m.value::varchar as hash + from operation o, jsonb_each_text(o.models) m + where o.package_id = ? + and o.version = ? + and o.revision = ? + and o.type = ? + ) + select m.operation_id, array_agg(m.key)::varchar[] models + from operation_model m + where m.hash = ? + group by m.operation_id + order by m.operation_id; + ` + _, err := o.cp.GetConnection().Query(&result, operationsByModelHashQuery, packageId, version, revision, apiType, modelHash) + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (o operationRepositoryImpl) GetOperationsByPathAndMethod(packageId string, version string, revision int, apiType string, path string, method string) ([]string, error) { + type OperationId struct { + OperationId string `pg:"operation_id"` + } + var operationIds []OperationId + + operationsByPathAndMethod := ` + select operation_id + from operation + where package_id = ? + and version = ? + and revision = ? + and type = ? + and metadata ->> 'path' ilike ? + and metadata ->> 'method' ilike ? + ` + _, err := o.cp.GetConnection().Query(&operationIds, operationsByPathAndMethod, packageId, version, revision, apiType, path, method) + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + + result := make([]string, 0) + + for _, t := range operationIds { + result = append(result, t.OperationId) + } + return result, nil +} diff --git a/qubership-apihub-service/repository/ProjectIntRepository.go b/qubership-apihub-service/repository/ProjectIntRepository.go new file mode 100644 index 0000000..93286ee --- /dev/null +++ b/qubership-apihub-service/repository/ProjectIntRepository.go @@ -0,0 +1,33 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repository + +import ( + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" +) + +type PrjGrpIntRepository interface { + Create(ent *entity.ProjectIntEntity) (*entity.ProjectIntEntity, error) + Update(ent *entity.ProjectIntEntity) (*entity.ProjectIntEntity, error) + GetById(id string) (*entity.ProjectIntEntity, error) + GetByPackageId(packageId string) (*entity.ProjectIntEntity, error) + GetDeletedEntity(id string) (*entity.ProjectIntEntity, error) + GetProjectsForGroup(groupId string) ([]entity.ProjectIntEntity, error) + GetFilteredProjects(filter string, groupId string) ([]entity.ProjectIntEntity, error) + Delete(id string, userId string) error + Exists(id string) (bool, error) + CleanupDeleted() error + GetProjectsForIntegration(integrationType string, repositoryId string, secretToken string) ([]entity.ProjectIntEntity, error) +} diff --git a/qubership-apihub-service/repository/ProjectIntRepositoryPG.go b/qubership-apihub-service/repository/ProjectIntRepositoryPG.go new file mode 100644 index 0000000..0e8ab98 --- /dev/null +++ b/qubership-apihub-service/repository/ProjectIntRepositoryPG.go @@ -0,0 +1,194 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repository + +import ( + "context" + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/db" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/go-pg/pg/v10" +) + +func NewPrjGrpIntRepositoryPG(cp db.ConnectionProvider) (PrjGrpIntRepository, error) { + return &projectRepositoryImpl{cp: cp}, nil +} + +type projectRepositoryImpl struct { + cp db.ConnectionProvider +} + +func (p projectRepositoryImpl) Create(ent *entity.ProjectIntEntity) (*entity.ProjectIntEntity, error) { + _, err := p.cp.GetConnection().Model(ent).Insert() + if err != nil { + return nil, err + } + return ent, nil +} + +func (p projectRepositoryImpl) Update(ent *entity.ProjectIntEntity) (*entity.ProjectIntEntity, error) { + _, err := p.cp.GetConnection().Model(ent).Where("id = ?", ent.Id).Update() + if err != nil { + return nil, err + } + return ent, nil +} + +func (p projectRepositoryImpl) GetById(id string) (*entity.ProjectIntEntity, error) { + result := new(entity.ProjectIntEntity) + err := p.cp.GetConnection().Model(result). + Where("id = ?", id). + Where("deleted_at is ?", nil). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (p projectRepositoryImpl) GetByPackageId(packageId string) (*entity.ProjectIntEntity, error) { + result := new(entity.ProjectIntEntity) + err := p.cp.GetConnection().Model(result). + Where("package_id = ?", packageId). + Where("deleted_at is ?", nil). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (p projectRepositoryImpl) GetDeletedEntity(id string) (*entity.ProjectIntEntity, error) { + result := new(entity.ProjectIntEntity) + err := p.cp.GetConnection().Model(result). + Where("id = ?", id). + Where("deleted_at is not ?", nil). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (p projectRepositoryImpl) GetProjectsForGroup(groupId string) ([]entity.ProjectIntEntity, error) { + var result []entity.ProjectIntEntity + err := p.cp.GetConnection().Model(&result). + Where("group_id = ?", groupId). + Where("deleted_at is ?", nil). + Order("name ASC"). + Select() + if err != nil { + return nil, err + } + return result, nil +} + +func (p projectRepositoryImpl) GetFilteredProjects(filter string, groupId string) ([]entity.ProjectIntEntity, error) { + var result []entity.ProjectIntEntity + query := p.cp.GetConnection().Model(&result). + Where("deleted_at is ?", nil). + Order("name ASC") + + if filter != "" { + filter = "%" + utils.LikeEscaped(filter) + "%" + query.WhereGroup(func(q *pg.Query) (*pg.Query, error) { + q = q.WhereOr("name ilike ?", filter).WhereOr("id ilike ?", filter) + return q, nil + }) + } + if groupId != "" { + query.Where("group_id = ?", groupId) + } + + err := query.Select() + if err != nil { + return nil, err + } + return result, nil +} + +func (p projectRepositoryImpl) Delete(id string, userId string) error { + ctx := context.Background() + + err := p.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + ent := new(entity.ProjectIntEntity) + err := tx.Model(ent). + Where("id = ?", id). + Where("deleted_at is ?", nil). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil + } + return err + } + timeNow := time.Now() + ent.DeletedAt = &timeNow + ent.DeletedBy = userId + ent.PackageId = "" + + _, err = tx.Model(ent).Where("id = ?", ent.Id).Update() + return err + }) + + return err +} + +func (p projectRepositoryImpl) Exists(id string) (bool, error) { + group, err := p.GetById(id) + if err != nil { + return false, err + } + if group == nil { + return false, nil + } else { + return true, nil + } +} + +func (p projectRepositoryImpl) CleanupDeleted() error { + var ents []entity.ProjectIntEntity + _, err := p.cp.GetConnection().Model(&ents). + Where("deleted_at is not ?", nil). + Delete() + return err +} + +func (p projectRepositoryImpl) GetProjectsForIntegration(integrationType string, repositoryId string, secretToken string) ([]entity.ProjectIntEntity, error) { + var result []entity.ProjectIntEntity + query := p.cp.GetConnection().Model(&result). + Where("deleted_at is ?", nil). + Where("integration_type = ?", integrationType). + Where("repository_id = ?", repositoryId). + Where("secret_token = ?", secretToken). + Order("name ASC") + + err := query.Select() + if err != nil { + return nil, err + } + return result, nil +} diff --git a/qubership-apihub-service/repository/PublishedRepository.go b/qubership-apihub-service/repository/PublishedRepository.go new file mode 100644 index 0000000..013b687 --- /dev/null +++ b/qubership-apihub-service/repository/PublishedRepository.go @@ -0,0 +1,122 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repository + +import ( + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type PublishedRepository interface { + MarkVersionDeleted(packageId string, versionName string, userId string) error + PatchVersion(packageId string, versionName string, status *string, versionLabels *[]string) (*entity.PublishedVersionEntity, error) + GetVersion(packageId string, versionName string) (*entity.PublishedVersionEntity, error) + GetReadonlyVersion_deprecated(packageId string, versionName string) (*entity.ReadonlyPublishedVersionEntity_deprecated, error) + GetReadonlyVersion(packageId string, versionName string) (*entity.PackageVersionRevisionEntity, error) + GetVersionByRevision(packageId string, versionName string, revision int) (*entity.PublishedVersionEntity, error) + GetVersionIncludingDeleted(packageId string, versionName string) (*entity.PublishedVersionEntity, error) + IsPublished(packageId string, branchName string) (bool, error) + GetServiceOwner(workspaceId string, serviceName string) (string, error) + GetRichPackageVersion(packageId string, version string) (*entity.PackageVersionRichEntity, error) + GetRevisionContent(packageId string, versionName string, revision int) ([]entity.PublishedContentEntity, error) + GetRevisionContentWithLimit(packageId string, versionName string, revision int, skipRefs bool, searchQuery entity.PublishedContentSearchQueryEntity) ([]entity.PublishedContentEntity, error) + GetVersionRevisionsList_deprecated(searchQuery entity.PackageVersionSearchQueryEntity) ([]entity.PackageVersionRevisionEntity_deprecated, error) + GetVersionRevisionsList(searchQuery entity.PackageVersionSearchQueryEntity) ([]entity.PackageVersionRevisionEntity, error) + GetLatestContent(packageId string, versionName string, contentId string) (*entity.PublishedContentEntity, error) + GetLatestContentBySlug(packageId string, versionName string, slug string) (*entity.PublishedContentEntity, error) + GetRevisionContentBySlug(packageId string, versionName string, slug string, revision int) (*entity.PublishedContentEntity, error) + GetLatestContentByVersion(packageId string, versionName string) ([]entity.PublishedContentEntity, error) + + GetVersionSources(packageId string, versionName string, revision int) (*entity.PublishedSrcArchiveEntity, error) + GetPublishedVersionSourceDataConfig(packageId string, versionName string, revision int) (*entity.PublishedSrcDataConfigEntity, error) + GetPublishedSources(packageId string, versionName string, revision int) (*entity.PublishedSrcEntity, error) + + CreateVersionWithData(packageInfo view.PackageInfoFile, publishId string, version *entity.PublishedVersionEntity, content []*entity.PublishedContentEntity, + data []*entity.PublishedContentDataEntity, refs []*entity.PublishedReferenceEntity, src *entity.PublishedSrcEntity, srcArchive *entity.PublishedSrcArchiveEntity, + operations []*entity.OperationEntity, operationsData []*entity.OperationDataEntity, + operationComparisons []*entity.OperationComparisonEntity, builderNotifications []*entity.BuilderNotificationsEntity, + versionComparisonEntities []*entity.VersionComparisonEntity, serviceName string, pkg *entity.PackageEntity, versionComparisonsFromCache []string) error + GetContentData(packageId string, checksum string) (*entity.PublishedContentDataEntity, error) + + GetRevisionRefs(packageId string, versionName string, revision int) ([]entity.PublishedReferenceEntity, error) + GetVersionRefs(searchQuery entity.PackageVersionSearchQueryEntity) ([]entity.PackageVersionPublishedReference, error) //deprecated + GetVersionRefsV3(packageId string, version string, revision int) ([]entity.PublishedReferenceEntity, error) + GetVersionsByPreviousVersion(previousPackageId string, previousVersionName string) ([]entity.PublishedVersionEntity, error) + GetPackageVersions(packageId string, filter string) ([]entity.PublishedVersionEntity, error) + GetPackageVersionsWithLimit(searchQuery entity.PublishedVersionSearchQueryEntity, checkRevisions bool) ([]entity.PublishedVersionEntity, error) + GetReadonlyPackageVersionsWithLimit_deprecated(searchQuery entity.PublishedVersionSearchQueryEntity, checkRevisions bool) ([]entity.ReadonlyPublishedVersionEntity_deprecated, error) + GetReadonlyPackageVersionsWithLimit(searchQuery entity.PublishedVersionSearchQueryEntity, checkRevisions bool) ([]entity.PackageVersionRevisionEntity, error) + GetLastVersions(ids []string) ([]entity.PublishedVersionEntity, error) + GetLastVersion(id string) (*entity.PublishedVersionEntity, error) + GetDefaultVersion(packageId string, status string) (*entity.PublishedVersionEntity, error) + CleanupDeleted() error + DeleteDraftVersionsBeforeDate(packageId string, date time.Time, userId string) (int, error) + + GetFileSharedInfo(packageId string, fileId string, versionName string) (*entity.SharedUrlInfoEntity, error) + GetFileSharedInfoById(sharedId string) (*entity.SharedUrlInfoEntity, error) + CreateFileSharedInfo(newSharedIdInfo *entity.SharedUrlInfoEntity) error + + CreatePackage(packageEntity *entity.PackageEntity) error + CreatePrivatePackageForUser(packageEntity *entity.PackageEntity, userRoleEntity *entity.PackageMemberRoleEntity) error + GetPackage(id string) (*entity.PackageEntity, error) + GetPackageGroup(id string) (*entity.PackageEntity, error) + GetDeletedPackage(id string) (*entity.PackageEntity, error) + GetDeletedPackageGroup(id string) (*entity.PackageEntity, error) + GetPackageIncludingDeleted(id string) (*entity.PackageEntity, error) + GetPackagesForPackageGroup(id string) ([]entity.PackageEntity, error) + GetChildPackageGroups(parentId string, name string, onlyFavorite bool, userId string) ([]entity.PackageFavEntity, error) + GetAllChildPackageIdsIncludingParent(parentId string) ([]string, error) + GetAllPackageGroups(name string, onlyFavorite bool, userId string) ([]entity.PackageFavEntity, error) + GetParentPackageGroups(id string) ([]entity.PackageEntity, error) + GetParentsForPackage(id string) ([]entity.PackageEntity, error) + UpdatePackage(ent *entity.PackageEntity) (*entity.PackageEntity, error) + DeletePackage(id string, userId string) error + GetPackageGroupsByName(name string) ([]entity.PackageEntity, error) + GetFilteredPackages(filter string, parentId string) ([]entity.PackageEntity, error) + GetFilteredPackagesWithOffset(searchReq view.PackageListReq, userId string) ([]entity.PackageEntity, error) + GetPackageForServiceName(serviceName string) (*entity.PackageEntity, error) + GetVersionValidationChanges(packageId string, versionName string, revision int) (*entity.PublishedVersionValidationEntity, error) + GetVersionValidationProblems(packageId string, versionName string, revision int) (*entity.PublishedVersionValidationEntity, error) + SearchForVersions(searchQuery *entity.PackageSearchQuery) ([]entity.PackageSearchResult, error) + SearchForDocuments(searchQuery *entity.DocumentSearchQuery) ([]entity.DocumentSearchResult, error) + + RecalculatePackageOperationGroups(packageId string, restGroupingPrefixRegex string, graphqlGroupingPrefixRegex string, userId string) error + RecalculateOperationGroups(packageId string, version string, revision int, restGroupingPrefixRegex string, graphqlGroupingPrefixRegex string, userId string) error + + GetVersionComparison(comparisonId string) (*entity.VersionComparisonEntity, error) + GetVersionRefsComparisons(comparisonId string) ([]entity.VersionComparisonEntity, error) + SaveVersionChanges(packageInfo view.PackageInfoFile, publishId string, operationComparisons []*entity.OperationComparisonEntity, versionComparisons []*entity.VersionComparisonEntity, versionComparisonsFromCache []string) error + GetLatestRevision(packageId, version string) (int, error) + + SaveTransformedDocument(data *entity.TransformedContentDataEntity, publishId string) error + GetTransformedDocuments(packageId string, version string, apiType string, groupId string, buildType string, format string) (*entity.TransformedContentDataEntity, error) + DeleteTransformedDocuments(packageId string, version string, revision int, apiType string, groupId string) error + GetVersionRevisionContentForDocumentsTransformation(packageId string, version string, revision int, searchQuery entity.ContentForDocumentsTransformationSearchQueryEntity) ([]entity.PublishedContentWithDataEntity, error) + GetPublishedSourcesArchives(offset int) (*entity.PublishedSrcArchiveEntity, error) + DeletePublishedSourcesArchives(checksums []string) error + SavePublishedSourcesArchive(ent *entity.PublishedSrcArchiveEntity) error + GetPublishedVersionsHistory(filter view.PublishedVersionHistoryFilter) ([]entity.PackageVersionHistoryEntity, error) + + StoreOperationGroupPublishProcess(ent *entity.OperationGroupPublishEntity) error + UpdateOperationGroupPublishProcess(ent *entity.OperationGroupPublishEntity) error + GetOperationGroupPublishProcess(publishId string) (*entity.OperationGroupPublishEntity, error) + + StoreCSVDashboardPublishProcess(ent *entity.CSVDashboardPublishEntity) error + UpdateCSVDashboardPublishProcess(ent *entity.CSVDashboardPublishEntity) error + GetCSVDashboardPublishProcess(publishId string) (*entity.CSVDashboardPublishEntity, error) + GetCSVDashboardPublishReport(publishId string) (*entity.CSVDashboardPublishEntity, error) +} diff --git a/qubership-apihub-service/repository/PublishedRepositoryPG.go b/qubership-apihub-service/repository/PublishedRepositoryPG.go new file mode 100644 index 0000000..b877526 --- /dev/null +++ b/qubership-apihub-service/repository/PublishedRepositoryPG.go @@ -0,0 +1,3863 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repository + +import ( + "context" + "fmt" + "net/http" + "sort" + "strconv" + "strings" + "time" + + "github.com/pkg/errors" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + mEntity "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/migration/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + + log "github.com/sirupsen/logrus" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/db" + "github.com/go-pg/pg/v10" +) + +func NewPublishedRepositoryPG(cp db.ConnectionProvider) (PublishedRepository, error) { + return &publishedRepositoryImpl{cp: cp}, nil +} + +type publishedRepositoryImpl struct { + cp db.ConnectionProvider +} + +func (p publishedRepositoryImpl) updateVersion(tx *pg.Tx, version *entity.PublishedVersionEntity) error { + _, err := tx.Model(version).WherePK().Update() + if err != nil { + return err + } + return nil +} + +func (p publishedRepositoryImpl) MarkVersionDeleted(packageId string, versionName string, userId string) error { + ctx := context.Background() + + err := p.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + var ents []entity.PublishedVersionEntity + err := tx.Model(&ents). + Where("package_id = ?", packageId). + Where("version = ?", versionName). + Where("deleted_at is ?", nil). + Select() + if err != nil { + return err + } + + timeNow := time.Now() + for _, ent := range ents { + tmpEnt := &ent + tmpEnt.DeletedAt = &timeNow + tmpEnt.DeletedBy = userId + _, err := tx.Model(tmpEnt).WherePK().Update() + if err != nil { + return err + } + } + + clearDefaultReleaseVersionForProjectQuery := ` + UPDATE package_group + SET default_released_version = null + WHERE default_released_version = ? AND id = ?` + _, err = tx.Exec(clearDefaultReleaseVersionForProjectQuery, versionName, packageId) + if err != nil { + return err + } + _, err = tx.Exec(`delete from grouped_operation where package_id = ? and version = ?`, packageId, versionName) + if err != nil { + return err + } + + clearPreviousVersionQuery := ` + UPDATE published_version + SET previous_version = null, previous_version_package_id = null + WHERE previous_version = ? AND (previous_version_package_id = ? OR ((previous_version_package_id = '' or previous_version_package_id is null) and package_id = ?))` + _, err = tx.Exec(clearPreviousVersionQuery, versionName, packageId, packageId) + if err != nil { + return err + } + + return nil + }) + + return err +} + +func (p publishedRepositoryImpl) PatchVersion(packageId string, versionName string, status *string, versionLabels *[]string) (*entity.PublishedVersionEntity, error) { + getPackage, errGetPackage := p.GetPackage(packageId) + if errGetPackage != nil { + return nil, errGetPackage + } + if getPackage == nil { + return nil, nil + } + + ent := new(entity.PublishedVersionEntity) + + p.cp.GetConnection().RunInTransaction(context.Background(), func(tx *pg.Tx) error { + err := p.cp.GetConnection().Model(ent). + Where("package_id = ?", packageId). + Where("version = ?", versionName). + Where("deleted_at is ?", nil). + Order("revision DESC"). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil + } + return err + } + + if status != nil { + ent.Status = *status + } + if versionLabels != nil { + ent.Labels = *versionLabels + } + + _, err = tx.Model(ent).Where("package_id = ?", ent.PackageId).Where("version = ?", ent.Version).Where("revision = ?", ent.Revision).Update() + if err != nil { + return err + } + + return nil + }) + + return ent, nil +} + +func (p publishedRepositoryImpl) markAllVersionsDeletedByPackageId(tx *pg.Tx, packageId string, userId string) error { + var ents []entity.PublishedVersionEntity + err := tx.Model(&ents). + Where("package_id = ?", packageId). + Where("deleted_at is ?", nil). + Select() + if err != nil { + if err == pg.ErrNoRows { + return nil + } + return err + } + + timeNow := time.Now() + for _, ent := range ents { + tmpEnt := &ent + tmpEnt.DeletedAt = &timeNow + tmpEnt.DeletedBy = userId + err := p.updateVersion(tx, tmpEnt) + if err != nil { + return err + } + clearPreviousVersionQuery := ` + UPDATE published_version + SET previous_version = null, previous_version_package_id = null + WHERE previous_version = ? AND (previous_version_package_id = ? OR ((previous_version_package_id = '' or previous_version_package_id is null) and package_id = ?))` + _, err = tx.Exec(clearPreviousVersionQuery, ent.Version, packageId, packageId) + if err != nil { + return err + } + } + _, err = tx.Exec(`delete from grouped_operation where package_id = ?`, packageId) + if err != nil { + return err + } + return nil +} + +func (p publishedRepositoryImpl) GetVersion(packageId string, versionName string) (*entity.PublishedVersionEntity, error) { + getPackage, errGetPackage := p.GetPackage(packageId) + if errGetPackage != nil { + return nil, errGetPackage + } + if getPackage == nil { + return nil, nil + } + + result := new(entity.PublishedVersionEntity) + + version, revision, err := SplitVersionRevision(versionName) + if err != nil { + return nil, err + } + query := p.cp.GetConnection().Model(result). + Where("package_id = ?", packageId). + Where("deleted_at is ?", nil). + Where("version = ?", version) + + if revision > 0 { + query.Where("revision = ?", revision) + } else if revision == 0 { + query.Order("revision DESC") + } + + err = query.First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + + return result, nil +} + +func (p publishedRepositoryImpl) GetLatestRevision(packageId, versionName string) (int, error) { + result := new(entity.PublishedVersionEntity) + version, _, err := SplitVersionRevision(versionName) + if err != nil { + return -1, err + } + query := p.cp.GetConnection().Model(result). + Where("package_id = ?", packageId). + Where("deleted_at is ?", nil). + Where("version = ?", version). + Order("revision DESC") + err = query.First() + if err != nil { + if err == pg.ErrNoRows { + return 0, nil + } + return -1, err + } + + return result.Revision, nil +} +func (p publishedRepositoryImpl) GetReadonlyVersion_deprecated(packageId string, versionName string) (*entity.ReadonlyPublishedVersionEntity_deprecated, error) { + getPackage, errGetPackage := p.GetPackage(packageId) + if errGetPackage != nil { + return nil, errGetPackage + } + if getPackage == nil { + return nil, nil + } + result := new(entity.ReadonlyPublishedVersionEntity_deprecated) + version, revision, err := SplitVersionRevision(versionName) + if err != nil { + return nil, err + } + query := ` + select pv.*,get_latest_revision(coalesce(pv.previous_version_package_id,pv.package_id),pv.previous_version) as previous_version_revision, coalesce(usr.name, created_by) user_name from published_version as pv left join user_data usr on usr.user_id = created_by + where pv.package_id = ? + and pv.version = ? + and ((? = 0 and pv.revision = get_latest_revision(?,?)) or + (? != 0 and pv.revision = ?)) + and deleted_at is null + limit 1 + ` + _, err = p.cp.GetConnection().QueryOne(result, query, packageId, version, revision, packageId, version, revision, revision) + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} +func (p publishedRepositoryImpl) GetReadonlyVersion(packageId string, versionName string) (*entity.PackageVersionRevisionEntity, error) { + getPackage, errGetPackage := p.GetPackage(packageId) + if errGetPackage != nil { + return nil, errGetPackage + } + if getPackage == nil { + return nil, nil + } + result := new(entity.PackageVersionRevisionEntity) + version, revision, err := SplitVersionRevision(versionName) + if err != nil { + return nil, err + } + query := ` + select pv.*,get_latest_revision(coalesce(pv.previous_version_package_id,pv.package_id),pv.previous_version) as previous_version_revision, + usr.name as prl_usr_name, usr.email as prl_usr_email, usr.avatar_url as prl_usr_avatar_url, + apikey.id as prl_apikey_id, apikey.name as prl_apikey_name, + case when coalesce(usr.name, apikey.name) is null then pv.created_by else usr.user_id end prl_usr_id + from published_version as pv + left join user_data usr on usr.user_id = pv.created_by + left join apihub_api_keys apikey on apikey.id = pv.created_by + where pv.package_id = ? + and pv.version = ? + and ((? = 0 and pv.revision = get_latest_revision(?,?)) or + (? != 0 and pv.revision = ?)) + and pv.deleted_at is null + limit 1 + ` + _, err = p.cp.GetConnection().QueryOne(result, query, packageId, version, revision, packageId, version, revision, revision) + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (p publishedRepositoryImpl) GetRichPackageVersion(packageId string, version string) (*entity.PackageVersionRichEntity, error) { + result := new(entity.PackageVersionRichEntity) + version, revision, err := SplitVersionRevision(version) + if err != nil { + return nil, err + } + query := ` +select pv.*, pg.kind as kind, pg.name as package_name, pg.service_name as service_name, parent_package_names(pg.id) parent_names, get_latest_revision(pv.package_id, pv.version) != pv.revision as not_latest_revision +from package_group as pg, + published_version as pv +where pv.package_id = ? + and pv.version = ? + and ((? = 0 and pv.revision = get_latest_revision(pv.package_id, pv.version)) or + (? != 0 and pv.revision = ?)) + and pv.package_id = pg.id +limit 1 +` + _, err = p.cp.GetConnection().QueryOne(result, query, packageId, version, revision, revision, revision) + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (p publishedRepositoryImpl) GetVersionRevisionsList_deprecated(searchQuery entity.PackageVersionSearchQueryEntity) ([]entity.PackageVersionRevisionEntity_deprecated, error) { + var ents []entity.PackageVersionRevisionEntity_deprecated + if searchQuery.TextFilter != "" { + searchQuery.TextFilter = "%" + utils.LikeEscaped(searchQuery.TextFilter) + "%" + } + query := ` + select pv.*, us.email, us.name, us.avatar_url, coalesce(us.user_id, pv.created_by) as user_id, pv.revision != get_latest_revision(pv.package_id, pv.version) as not_latest_revision + from published_version as pv left join user_data as us on pv.created_by = us.user_id + where (?text_filter = '' + or exists(select 1 from unnest(pv.labels) as label where label ilike ?text_filter) + or pv.revision::text ilike ?text_filter + or exists(select user_id from user_data where user_id = pv.created_by and name ilike ?text_filter)) + and pv.package_id = ?package_id + and pv.version = ?version + and pv.deleted_at is null + order by pv.revision desc + limit ?limit + offset ?offset; + ` + _, err := p.cp.GetConnection().Model(&searchQuery).Query(&ents, query) + if err != nil { + return nil, err + } + return ents, nil +} +func (p publishedRepositoryImpl) GetVersionRevisionsList(searchQuery entity.PackageVersionSearchQueryEntity) ([]entity.PackageVersionRevisionEntity, error) { + var ents []entity.PackageVersionRevisionEntity + if searchQuery.TextFilter != "" { + searchQuery.TextFilter = "%" + utils.LikeEscaped(searchQuery.TextFilter) + "%" + } + query := ` + select pv.*, pv.revision != get_latest_revision(pv.package_id, pv.version) as not_latest_revision, + us.user_id as prl_usr_id, us.name as prl_usr_name, us.email as prl_usr_email, us.avatar_url as prl_usr_avatar_url, + apikey.id as prl_apikey_id, apikey.name as prl_apikey_name, + case when coalesce(us.name, apikey.name) is null then pv.created_by else us.user_id end prl_usr_id + from published_version as pv + left join user_data as us on pv.created_by = us.user_id + left join apihub_api_keys as apikey on pv.created_by = apikey.id + where (?text_filter = '' + or exists(select 1 from unnest(pv.labels) as label where label ilike ?text_filter) + or exists(select from jsonb_each_text(pv.metadata) where value ilike ?text_filter) + or exists(select user_id from user_data where user_id = pv.created_by and name ilike ?text_filter)) + and pv.package_id = ?package_id + and pv.version = ?version + and pv.deleted_at is null + order by pv.revision desc + limit ?limit + offset ?offset; + ` + _, err := p.cp.GetConnection().Model(&searchQuery).Query(&ents, query) + if err != nil { + return nil, err + } + return ents, nil +} + +func (p publishedRepositoryImpl) GetVersionByRevision(packageId string, versionName string, revision int) (*entity.PublishedVersionEntity, error) { + result := new(entity.PublishedVersionEntity) + err := p.cp.GetConnection().Model(result). + Where("package_id = ?", packageId). + Where("version = ?", versionName). + Where("revision = ?", revision). + Where("deleted_at is ?", nil). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + + return result, nil +} + +func (p publishedRepositoryImpl) GetVersionIncludingDeleted(packageId string, versionName string) (*entity.PublishedVersionEntity, error) { + result := new(entity.PublishedVersionEntity) + version, revision, err := SplitVersionRevision(versionName) + if err != nil { + return nil, err + } + query := p.cp.GetConnection().Model(result). + Where("package_id = ?", packageId). + Where("version = ?", version) + + if revision > 0 { + query.Where("revision = ?", revision) + } else if revision == 0 { + query.Order("revision DESC") + } + err = query.First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (p publishedRepositoryImpl) IsPublished(packageId string, branchName string) (bool, error) { + count, err := p.cp.GetConnection().Model(&entity.PublishedVersionEntity{PackageId: packageId}). + Where("package_id = ?", packageId). + Where("jsonb_extract_path_text(metadata, ?) = ?", entity.BRANCH_NAME_KEY, branchName). + Where("deleted_at is ?", nil). + Count() + if err != nil { + if err == pg.ErrNoRows { + return false, nil + } + return false, err + } + return count > 0, nil +} + +func (p publishedRepositoryImpl) GetServiceOwner(workspaceId string, serviceName string) (string, error) { + var packageId string + serviceOwnerQuery := `SELECT package_id FROM package_service WHERE workspace_id = ? and service_name = ?` + _, err := p.cp.GetConnection().QueryOne(pg.Scan(&packageId), serviceOwnerQuery, workspaceId, serviceName) + if err != nil { + if err == pg.ErrNoRows { + return "", nil + } + return "", err + } + return packageId, nil +} + +func (p publishedRepositoryImpl) validateMigrationResult(tx *pg.Tx, packageInfo view.PackageInfoFile, publishId string, version *entity.PublishedVersionEntity, content []*entity.PublishedContentEntity, contentData []*entity.PublishedContentDataEntity, + refs []*entity.PublishedReferenceEntity, src *entity.PublishedSrcEntity, operations []*entity.OperationEntity, operationData []*entity.OperationDataEntity, versionComparisons []*entity.VersionComparisonEntity, operationComparisons []*entity.OperationComparisonEntity, versionComparisonsFromCache []string) error { + migrationRun := new(mEntity.MigrationRunEntity) + + err := tx.Model(migrationRun).Where("id = ?", packageInfo.MigrationId).First() + if err != nil { + return fmt.Errorf("failed to get migration info: %v", err.Error()) + } + if migrationRun.SkipValidation { + return nil + } + changes := make(map[string]interface{}) + changesOverview := make(PublishedBuildChangesOverview) + + currentTable := "published_version" + oldVersion := new(entity.PublishedVersionEntity) + err = tx.Model(oldVersion). + Where("package_id = ?", version.PackageId). + Where("version = ?", version.Version). + Where("revision = ?", version.Revision). + First() + if err != nil { + if err == pg.ErrNoRows { + changes[currentTable] = "published version not found" + changesOverview.setUnexpectedEntry(currentTable) + return fmt.Errorf("published version not found") + } else { + return err + } + } + if versionChanges := oldVersion.GetChanges(*version); len(versionChanges) > 0 { + changes[currentTable] = versionChanges + changesOverview.setTableChanges(currentTable, versionChanges) + } + + oldContent := make([]entity.PublishedContentEntity, 0) + err = tx.Model(&oldContent). + Where("package_id = ?", version.PackageId). + Where("version = ?", version.Version). + Where("revision = ?", version.Revision). + Select() + if err != nil { + return err + } + + currentTable = "published_version_revision_content" + contentChanges := make(map[string]interface{}, 0) + matchedContent := make(map[string]struct{}, 0) + oldContentChecksums := make(map[string]struct{}, 0) + for _, s := range oldContent { + found := false + oldContentChecksums[s.Checksum] = struct{}{} + for _, t := range content { + if s.FileId == t.FileId { + found = true + matchedContent[s.FileId] = struct{}{} + if fileChanges := s.GetChanges(*t); len(fileChanges) > 0 { + contentChanges[s.FileId] = fileChanges + changesOverview.setTableChanges(currentTable, fileChanges) + continue + } + } + } + if !found { + return fmt.Errorf(`file '%v' not found in build archive`, s.FileId) + } + } + for _, t := range content { + if _, matched := matchedContent[t.FileId]; !matched { + return fmt.Errorf(`unexpected file '%v' (not found in database)`, t.FileId) + } + } + if len(contentChanges) > 0 { + changes[currentTable] = contentChanges + } + + currentTable = "published_data" + contentDataChanges := make(map[string]interface{}, 0) + matchedChecksums := make(map[string]struct{}, 0) + for oldChecksum := range oldContentChecksums { + found := false + for _, newContentData := range contentData { + if oldChecksum == newContentData.Checksum { + found = true + matchedChecksums[oldChecksum] = struct{}{} + } + } + if !found { + contentDataChanges[oldChecksum] = "content data not found in build archive" + changesOverview.setNotFoundEntry(currentTable) + } + } + for _, newContentData := range contentData { + if _, matched := matchedChecksums[newContentData.Checksum]; !matched { + contentDataChanges[newContentData.Checksum] = "unexpected content data (not found in database)" + changesOverview.setUnexpectedEntry(currentTable) + } + } + if len(contentDataChanges) > 0 { + changes[currentTable] = contentDataChanges + } + + currentTable = "published_version_reference" + oldRefs := make([]entity.PublishedReferenceEntity, 0) + err = tx.Model(&oldRefs). + Where("package_id = ?", version.PackageId). + Where("version = ?", version.Version). + Where("revision = ?", version.Revision). + Select() + if err != nil { + return err + } + refsChanges := make(map[string]interface{}, 0) + matchedRefs := make(map[string]struct{}, 0) + for _, s := range oldRefs { + found := false + refId := view.MakePackageRefKey(s.RefPackageId, s.RefVersion, s.RefRevision) + parentRefId := view.MakePackageRefKey(s.ParentRefPackageId, s.ParentRefVersion, s.ParentRefRevision) + refKey := fmt.Sprintf(`RefId:%v;ParentRef:%v`, refId, parentRefId) + for _, t := range refs { + if refId == view.MakePackageRefKey(t.RefPackageId, t.RefVersion, t.RefRevision) && + parentRefId == view.MakePackageRefKey(t.ParentRefPackageId, t.ParentRefVersion, t.ParentRefRevision) { + found = true + matchedRefs[refKey] = struct{}{} + if refChanges := s.GetChanges(*t); len(refChanges) > 0 { + refsChanges[refKey] = refChanges + changesOverview.setTableChanges(currentTable, refChanges) + continue + } + } + } + if !found { + return fmt.Errorf(`ref '%v' not found in build archive`, refKey) + } + } + for _, t := range refs { + refId := view.MakePackageRefKey(t.RefPackageId, t.RefVersion, t.RefRevision) + parentRefId := view.MakePackageRefKey(t.ParentRefPackageId, t.ParentRefVersion, t.ParentRefRevision) + refKey := fmt.Sprintf(`RefId:%v;ParentRef:%v`, refId, parentRefId) + if _, matched := matchedRefs[refKey]; !matched { + return fmt.Errorf(`unexpected ref '%v' (not found in database)`, refKey) + } + } + if len(refsChanges) > 0 { + changes[currentTable] = refsChanges + } + + currentTable = "published_sources" + oldSource := new(entity.PublishedSrcEntity) + sourcesFound := true + err = tx.Model(oldSource). + Where("package_id = ?", version.PackageId). + Where("version = ?", version.Version). + Where("revision = ?", version.Revision). + First() + if err != nil { + if err == pg.ErrNoRows { + changes[currentTable] = "sources not found" + changesOverview.setUnexpectedEntry(currentTable) + sourcesFound = false + } else { + return err + } + } + if sourcesFound { + if srcChanges := oldSource.GetChanges(*src); len(srcChanges) > 0 { + changes["published_sources"] = srcChanges + changesOverview.setTableChanges(currentTable, srcChanges) + } + } + + currentTable = "operation" + oldOperations := make([]entity.OperationEntity, 0) + err = tx.Model(&oldOperations). + Where("package_id = ?", version.PackageId). + Where("version = ?", version.Version). + Where("revision = ?", version.Revision). + Select() + if err != nil { + return err + } + operationsChanges := make(map[string]interface{}, 0) + matchedOperations := make(map[string]struct{}, 0) + for _, s := range oldOperations { + found := false + for _, t := range operations { + if s.OperationId == t.OperationId { + found = true + matchedOperations[s.OperationId] = struct{}{} + if operationChanges := s.GetChanges(*t); len(operationChanges) > 0 { + operationsChanges[s.OperationId] = operationChanges + changesOverview.setTableChanges(currentTable, operationChanges) + continue + } + } + } + if !found { + operationsChanges[s.OperationId] = "operation not found in build archive" + changesOverview.setNotFoundEntry(currentTable) + } + } + for _, t := range operations { + if _, matched := matchedOperations[t.OperationId]; !matched { + operationsChanges[t.OperationId] = "unexpected operation (not found in database)" + changesOverview.setUnexpectedEntry(currentTable) + } + } + if len(operationsChanges) > 0 { + changes["operation"] = operationsChanges + } + + currentTable = "operation_data" + oldOperationData := make([]entity.OperationDataEntity, 0) + err = tx.Model(&oldOperationData). + ColumnExpr("operation_data.data_hash, operation_data.search_scope"). + Join("inner join operation o"). + JoinOn("o.data_hash = operation_data.data_hash"). + JoinOn("o.package_id = ?", version.PackageId). + JoinOn("o.version = ?", version.Version). + JoinOn("o.revision = ?", version.Revision). + Select() + if err != nil { + return err + } + operationDataChanges := make(map[string]interface{}, 0) + matchedOperationData := make(map[string]struct{}, 0) + for _, s := range oldOperationData { + found := false + for _, t := range operationData { + if s.DataHash == t.DataHash { + found = true + matchedOperationData[s.DataHash] = struct{}{} + if dataChanges := s.GetChanges(*t); len(dataChanges) > 0 { + operationDataChanges[s.DataHash] = dataChanges + changesOverview.setTableChanges(currentTable, dataChanges) + continue + } + } + } + if !found { + operationDataChanges[s.DataHash] = "operation data not found in build archive" + changesOverview.setNotFoundEntry(currentTable) + } + } + for _, t := range operationData { + if _, matched := matchedOperationData[t.DataHash]; !matched { + operationDataChanges[t.DataHash] = "unexpected operation data (not found in database)" + changesOverview.setUnexpectedEntry(currentTable) + } + } + if len(operationDataChanges) > 0 { + changes["operation_data"] = operationDataChanges + } + + if !packageInfo.NoChangelog && packageInfo.PreviousVersion != "" { + versionComparisonsChanges, versionComparisonIds, err := p.getVersionComparisonsChanges(tx, packageInfo, versionComparisons, versionComparisonsFromCache, &changesOverview) + if err != nil { + return err + } + if len(versionComparisonsChanges) > 0 { + changes["version_comparison"] = versionComparisonsChanges + } + operationComparisonsChanges, err := p.getOperationComparisonsChanges(tx, packageInfo, operationComparisons, versionComparisonIds, &changesOverview) + if err != nil { + return err + } + if len(operationComparisonsChanges) > 0 { + changes["operation_comparison"] = operationComparisonsChanges + } + } + if len(changes) > 0 { + ent := mEntity.MigratedVersionChangesEntity{ + PackageId: version.PackageId, + Version: version.Version, + Revision: version.Revision, + BuildId: publishId, + MigrationId: packageInfo.MigrationId, + Changes: changes, + UniqueChanges: changesOverview.getUniqueChanges(), + } + _, err = tx.Model(&ent).Insert() + if err != nil { + return fmt.Errorf("failed to insert migrated version changes: %v", err.Error()) + } + insertMigrationChangesQuery := ` + insert into migration_changes + values (?, ?) + on conflict (migration_id) + do update + set changes = coalesce(migration_changes.changes, '{}') || ( + SELECT jsonb_object_agg(key, coalesce((migration_changes.changes ->> key)::int, 0) + 1) + from jsonb_each_text(EXCLUDED.changes) + );` + _, err = tx.Exec(insertMigrationChangesQuery, packageInfo.MigrationId, changesOverview) + if err != nil { + return fmt.Errorf("failed to insert migration changes: %v", err.Error()) + } + } + return nil +} + +func (p publishedRepositoryImpl) getVersionComparisonsChanges(tx *pg.Tx, packageInfo view.PackageInfoFile, versionComparisonEntities []*entity.VersionComparisonEntity, versionComparisonsFromCache []string, changesOverview *PublishedBuildChangesOverview) (map[string]interface{}, []string, error) { + var err error + currentTable := "version_comparison" + if packageInfo.PreviousVersionPackageId == "" { + packageInfo.PreviousVersionPackageId = packageInfo.PackageId + } + if strings.Contains(packageInfo.Version, `@`) { + packageInfo.Version, packageInfo.Revision, err = SplitVersionRevision(packageInfo.Version) + if err != nil { + return nil, nil, err + } + } + if strings.Contains(packageInfo.PreviousVersion, `@`) { + packageInfo.PreviousVersion, packageInfo.PreviousVersionRevision, err = SplitVersionRevision(packageInfo.PreviousVersion) + if err != nil { + return nil, nil, err + } + } + if packageInfo.PreviousVersionRevision == 0 { + _, err = tx.QueryOne(pg.Scan(&packageInfo.PreviousVersionRevision), ` + select max(revision) from published_version + where package_id = ? + and version = ?`, packageInfo.PreviousVersionPackageId, packageInfo.PreviousVersion) + if err != nil { + return nil, nil, fmt.Errorf("failed to calculate previous version revision: %v", err.Error()) + } + } + versionComparisonsChanges := make(map[string]interface{}, 0) + oldVersionComparisons := make([]entity.VersionComparisonEntity, 0) + versionComparisonSnapshotTable := fmt.Sprintf(`migration."version_comparison_%s"`, packageInfo.MigrationId) + getVersionComparisonsQuery := fmt.Sprintf(` + with ref_comparison_ids as ( + select unnest(refs) as comparison_id from %s + where package_id = ? + and version = ? + and revision = ? + and previous_package_id = ? + and previous_version = ? + and previous_revision = ? + ) + select * from %s + where package_id = ? + and version = ? + and revision = ? + and previous_package_id = ? + and previous_version = ? + and previous_revision = ? + union + select * from %s + where comparison_id in (select comparison_id from ref_comparison_ids) + `, versionComparisonSnapshotTable, versionComparisonSnapshotTable, versionComparisonSnapshotTable) + _, err = tx.Query(&oldVersionComparisons, getVersionComparisonsQuery, + packageInfo.PackageId, + packageInfo.Version, + packageInfo.Revision, + packageInfo.PreviousVersionPackageId, + packageInfo.PreviousVersion, + packageInfo.PreviousVersionRevision, + packageInfo.PackageId, + packageInfo.Version, + packageInfo.Revision, + packageInfo.PreviousVersionPackageId, + packageInfo.PreviousVersion, + packageInfo.PreviousVersionRevision, + ) + if err != nil { + return nil, nil, fmt.Errorf("failed to get version comparisons from db: %v", err.Error()) + } + matchedComparisons := make(map[string]struct{}, 0) + versionComparisonIds := make([]string, 0) + for _, s := range oldVersionComparisons { + found := false + for _, t := range versionComparisonEntities { + if s.ComparisonId == t.ComparisonId { + found = true + matchedComparisons[s.ComparisonId] = struct{}{} + versionComparisonIds = append(versionComparisonIds, s.ComparisonId) + if versionComparisonChanges := s.GetChanges(*t); len(versionComparisonChanges) > 0 { + versionComparisonsChanges[s.ComparisonId] = versionComparisonChanges + changesOverview.setTableChanges(currentTable, versionComparisonChanges) + } + } + } + if !found { + fromCache := false + for _, versionComparisonFromCache := range versionComparisonsFromCache { + if versionComparisonFromCache == s.ComparisonId { + fromCache = true + break + } + } + if !fromCache { + versionComparisonsChanges[s.ComparisonId] = "version comparison not found in build archive" + changesOverview.setNotFoundEntry(currentTable) + } + } + } + for _, t := range versionComparisonEntities { + if _, matched := matchedComparisons[t.ComparisonId]; !matched { + versionComparisonsChanges[t.ComparisonId] = "unexpected version comparison (not found in database)" + changesOverview.setNotFoundEntry(currentTable) + } + } + return versionComparisonsChanges, versionComparisonIds, nil +} + +func (p publishedRepositoryImpl) getOperationComparisonsChanges(tx *pg.Tx, packageInfo view.PackageInfoFile, operationComparisonEntities []*entity.OperationComparisonEntity, versionComparisonIds []string, changesOverview *PublishedBuildChangesOverview) (map[string]interface{}, error) { + var err error + currentTable := "operation_comparison" + if len(versionComparisonIds) == 0 && len(operationComparisonEntities) == 0 { + return nil, nil + } + if packageInfo.PreviousVersionPackageId == "" { + packageInfo.PreviousVersionPackageId = packageInfo.PackageId + } + if strings.Contains(packageInfo.Version, `@`) { + packageInfo.Version, packageInfo.Revision, err = SplitVersionRevision(packageInfo.Version) + if err != nil { + return nil, err + } + } + if strings.Contains(packageInfo.PreviousVersion, `@`) { + packageInfo.PreviousVersion, packageInfo.PreviousVersionRevision, err = SplitVersionRevision(packageInfo.PreviousVersion) + if err != nil { + return nil, err + } + } + if packageInfo.PreviousVersionRevision == 0 { + _, err = tx.QueryOne(pg.Scan(&packageInfo.PreviousVersionRevision), ` + select max(revision) from published_version + where package_id = ? + and version = ?`, packageInfo.PreviousVersionPackageId, packageInfo.PreviousVersion) + if err != nil { + return nil, fmt.Errorf("failed to calculate previous version revision: %v", err.Error()) + } + } + operationComparisonsChanges := make(map[string]interface{}, 0) + oldOperationComparisons := make([]entity.OperationComparisonEntity, 0) + matchedOperationComparisons := make(map[string]struct{}, 0) + if len(versionComparisonIds) > 0 { + operationComparisonSnapshotTable := fmt.Sprintf(`migration."operation_comparison_%s"`, packageInfo.MigrationId) + getOperationComparisonsQuery := fmt.Sprintf(` + select * from %s + where comparison_id in (?) + `, operationComparisonSnapshotTable) + _, err = tx.Query(&oldOperationComparisons, getOperationComparisonsQuery, + pg.In(versionComparisonIds), + ) + if err != nil { + return nil, fmt.Errorf("failed to get operation comparisons from db: %v", err.Error()) + } + for _, s := range oldOperationComparisons { + key := fmt.Sprintf(`ComparisonId:%s;OperationId:%s`, s.ComparisonId, s.OperationId) + found := false + for _, t := range operationComparisonEntities { + if s.ComparisonId == t.ComparisonId && + s.OperationId == t.OperationId { + found = true + matchedOperationComparisons[key] = struct{}{} + if operationComparisonChanges := s.GetChanges(*t); len(operationComparisonChanges) > 0 { + operationComparisonsChanges[key] = operationComparisonChanges + changesOverview.setTableChanges(currentTable, operationComparisonChanges) + } + } + } + if !found { + operationComparisonsChanges[key] = "operation comparison not found in build archive" + changesOverview.setNotFoundEntry(currentTable) + } + } + } + for _, t := range operationComparisonEntities { + key := fmt.Sprintf(`ComparisonId:%s;OperationId:%s`, t.ComparisonId, t.OperationId) + if _, matched := matchedOperationComparisons[key]; !matched { + operationComparisonsChanges[key] = "unexpected operation comparison (not found in database)" + changesOverview.setUnexpectedEntry(currentTable) + } + } + return operationComparisonsChanges, nil +} + +func (p publishedRepositoryImpl) CreateVersionWithData(packageInfo view.PackageInfoFile, buildId string, version *entity.PublishedVersionEntity, content []*entity.PublishedContentEntity, + data []*entity.PublishedContentDataEntity, refs []*entity.PublishedReferenceEntity, src *entity.PublishedSrcEntity, srcArchive *entity.PublishedSrcArchiveEntity, + operations []*entity.OperationEntity, operationsData []*entity.OperationDataEntity, + operationComparisons []*entity.OperationComparisonEntity, builderNotifications []*entity.BuilderNotificationsEntity, + versionComparisons []*entity.VersionComparisonEntity, serviceName string, pkg *entity.PackageEntity, versionComparisonsFromCache []string) error { + if len(content) == 0 && len(refs) == 0 { + return nil + } + + var err error + ctx := context.Background() + err = p.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + start := time.Now() + var ents []entity.BuildEntity + _, err := tx.Query(&ents, getBuildWithLock, buildId) + utils.PerfLog(time.Since(start).Milliseconds(), 50, "CreateVersionWithData: getBuildWithLock") + if err != nil { + return fmt.Errorf("CreateVersionWithData: failed to get build %s: %w", buildId, err) + } + if len(ents) == 0 { + return fmt.Errorf("CreateVersionWithData: failed to start version publish. Build with buildId='%s' is not found", buildId) + } + build := &ents[0] + + //do not allow publish for "complete" builds and builds that are not failed with "Restart count exceeded limit" + if build.Status == string(view.StatusComplete) || + (build.Status == string(view.StatusError) && build.RestartCount < 2) { + return fmt.Errorf("failed to start version publish. Version with buildId='%v' is already published or failed", buildId) + } + + start = time.Now() + _, err = tx.Model(version).OnConflict("(package_id, version, revision) DO UPDATE").Insert() + if err != nil { + return fmt.Errorf("failed to insert published_version %+v: %w", version, err) + } + utils.PerfLog(time.Since(start).Milliseconds(), 50, "CreateVersionWithData: insert version") + + if packageInfo.MigrationBuild { + start = time.Now() + err := p.validateMigrationResult(tx, packageInfo, buildId, version, content, data, refs, src, operations, operationsData, versionComparisons, operationComparisons, versionComparisonsFromCache) + if err != nil { + return fmt.Errorf("migration result validation failed: %v", err.Error()) + } + // ok, it takes pretty long time, but valuable + utils.PerfLog(time.Since(start).Milliseconds(), 2000, "CreateVersionWithData: migration validation") + } + + start = time.Now() + for _, d := range data { + exists, err := p.contentDataExists(tx, d.PackageId, d.Checksum) // TODO: could be bulk select + if err != nil { + return err + } + if !exists { + _, err := tx.Model(d).OnConflict("(package_id, checksum) DO UPDATE").Insert() + if err != nil { + return fmt.Errorf("failed to insert published_data %+v: %w", d, err) + } + } + } + utils.PerfLog(time.Since(start).Milliseconds(), 200, "CreateVersionWithData: content data insert") + start = time.Now() + for _, c := range content { + _, err := tx.Model(c).OnConflict("(package_id, version, revision, file_id) DO UPDATE").Insert() + if err != nil { + return fmt.Errorf("failed to insert published_version_revision_content %+v: %w", c, err) + } + } + utils.PerfLog(time.Since(start).Milliseconds(), 200, "CreateVersionWithData: content insert") + + if len(refs) > 0 { + start = time.Now() + _, err := tx.Model(&refs).OnConflict(`(package_id, version, revision, reference_id, reference_version, reference_revision, parent_reference_id, parent_reference_version, parent_reference_revision) + DO UPDATE SET "excluded" = EXCLUDED."excluded"`).Insert() + if err != nil { + return fmt.Errorf("failed to insert published_version_reference %+v: %w", refs, err) + } + utils.PerfLog(time.Since(start).Milliseconds(), 50, "CreateVersionWithData: refs insert") + } + if srcArchive != nil { + start = time.Now() + count, err := tx.Model(srcArchive). + Where("checksum = ?", srcArchive.Checksum). + Count() + if err != nil { + return err + } + if count == 0 { + _, err := tx.Model(srcArchive).OnConflict("(checksum) DO NOTHING").Insert() + if err != nil { + return fmt.Errorf("failed to insert published_sources_archive %+v: %w", srcArchive, err) + } + } + utils.PerfLog(time.Since(start).Milliseconds(), 50, "CreateVersionWithData: srcArchive insert") + } + if src != nil { + start = time.Now() + _, err := tx.Model(src).OnConflict("(package_id, version, revision) DO UPDATE").Insert() + if err != nil { + return fmt.Errorf("failed to insert published_sources %+v: %w", src, err) + } + utils.PerfLog(time.Since(start).Milliseconds(), 50, "CreateVersionWithData: src insert") + } + validationSkipped := true + if packageInfo.MigrationBuild { + migrationRun := new(mEntity.MigrationRunEntity) + err := tx.Model(migrationRun).Where("id = ?", packageInfo.MigrationId).First() + if err != nil { + return fmt.Errorf("failed to get migration info: %v", err.Error()) + } + validationSkipped = migrationRun.SkipValidation + } + newOperationsData := make([]entity.OperationDataEntity, 0) + if len(operationsData) > 0 { + start = time.Now() + seachScopeChangesCountQuery := ` + select count(*) + from migrated_version_changes + where build_id = ? + and (changes -> 'operation_data' -> ? -> 'SearchScope' is not null) limit 1;` + oldOperationDataCountQuery := ` + select count(data_hash) + from operation_data + where data_hash = ? limit 1` + for _, data := range operationsData { + var count int + _, err = tx.Query(pg.Scan(&count), oldOperationDataCountQuery, data.DataHash) + if err != nil { + return err + } + if count != 1 { + newOperationsData = append(newOperationsData, *data) + continue + } + if validationSkipped { + oldOperationData := new(entity.OperationDataEntity) + err = tx.Model(oldOperationData).Column("search_scope").Where("data_hash = ?", data.DataHash).First() + if err != nil { + if err == pg.ErrNoRows { + newOperationsData = append(newOperationsData, *data) + continue + } + return err + } + if len(oldOperationData.GetChanges(*data)) > 0 { + newOperationsData = append(newOperationsData, *data) + } + } else { + var count int + _, err = tx.Query(pg.Scan(&count), seachScopeChangesCountQuery, buildId, data.DataHash) + if err != nil { + return err + } + if count > 0 { + newOperationsData = append(newOperationsData, *data) + continue + } + } + } + utils.PerfLog(time.Since(start).Milliseconds(), 100+int64(len(operationsData)*10), fmt.Sprintf("CreateVersionWithData: operationsData calculation (%d items)", len(operationsData))) + } + if len(newOperationsData) > 0 { + start = time.Now() + _, err := tx.Model(&newOperationsData).OnConflict("(data_hash) DO UPDATE SET search_scope = EXCLUDED.search_scope").Insert() + if err != nil { + return fmt.Errorf("failed to insert operation_data: %w", err) + } + utils.PerfLog(time.Since(start).Milliseconds(), 50, "CreateVersionWithData: operationsData insert") + } + if packageInfo.MigrationBuild { + // In case of migration list of operations may change due to new builder implementation, so need to cleanup existing list before insert + start = time.Now() + _, err := tx.Model(&entity.OperationEntity{}). + Where("package_id=?", version.PackageId). + Where("version=?", version.Version). + Where("revision=?", version.Revision). + Delete() + utils.PerfLog(time.Since(start).Milliseconds(), 50+int64(len(operations)*10), "CreateVersionWithData: old operations delete") + if err != nil { + return fmt.Errorf("failed to cleanup operations for migration %+v: %w", operations, err) + } + } + if len(operations) != 0 { + start = time.Now() + _, err := tx.Model(&operations).OnConflict("(package_id, version, revision, operation_id) DO UPDATE").Insert() + utils.PerfLog(time.Since(start).Milliseconds(), 50+int64(len(operations)*10), "CreateVersionWithData: new operations insert") + if err != nil { + return fmt.Errorf("failed to insert operations %+v: %w", operations, err) + } + } + if len(newOperationsData) > 0 { + if packageInfo.MigrationBuild { + //insert versions that require text search recalculation into specific table. These versions will be recalculated at the end of migration + _, err = tx.Exec( + fmt.Sprintf(`insert into migration."expired_ts_operation_data_%s" values(?, ?, ?)`, packageInfo.MigrationId), + version.PackageId, version.Version, version.Revision) + if err != nil { + return fmt.Errorf("failed to insert into migration.expired_ts_operation_data: %w", err) + } + } else { + start = time.Now() + calculateRestTextSearchDataQuery := ` + insert into ts_rest_operation_data + select data_hash, + to_tsvector(jsonb_extract_path_text(search_scope, ?)) scope_request, + to_tsvector(jsonb_extract_path_text(search_scope, ?)) scope_response, + to_tsvector(jsonb_extract_path_text(search_scope, ?)) scope_annotation, + to_tsvector(jsonb_extract_path_text(search_scope, ?)) scope_properties, + to_tsvector(jsonb_extract_path_text(search_scope, ?)) scope_examples + from operation_data + where data_hash in (select distinct data_hash from operation where package_id = ? and version = ? and revision = ? and type = ?) + on conflict (data_hash) do update + set scope_request = EXCLUDED.scope_request, + scope_response = EXCLUDED.scope_response, + scope_annotation = EXCLUDED.scope_annotation, + scope_properties = EXCLUDED.scope_properties, + scope_examples = EXCLUDED.scope_examples;` + _, err = tx.Exec(calculateRestTextSearchDataQuery, + view.RestScopeRequest, view.RestScopeResponse, view.RestScopeAnnotation, view.RestScopeProperties, view.RestScopeExamples, + version.PackageId, version.Version, version.Revision, view.RestApiType) + if err != nil { + return fmt.Errorf("failed to insert ts_rest_operation_data: %w", err) + } + calculateGraphqlTextSearchDataQuery := ` + insert into ts_graphql_operation_data + select data_hash, + to_tsvector(jsonb_extract_path_text(search_scope, ?)) scope_argument, + to_tsvector(jsonb_extract_path_text(search_scope, ?)) scope_property, + to_tsvector(jsonb_extract_path_text(search_scope, ?)) scope_annotation + from operation_data + where data_hash in (select distinct data_hash from operation where package_id = ? and version = ? and revision = ? and type = ?) + on conflict (data_hash) do update + set scope_argument = EXCLUDED.scope_argument, + scope_property = EXCLUDED.scope_property, + scope_annotation = EXCLUDED.scope_annotation;` + _, err = tx.Exec(calculateGraphqlTextSearchDataQuery, + view.GraphqlScopeArgument, view.GraphqlScopeProperty, view.GraphqlScopeAnnotation, + version.PackageId, version.Version, version.Revision, view.GraphqlApiType) + if err != nil { + return fmt.Errorf("failed to insert ts_grahpql_operation_data: %w", err) + } + calculateAllTextSearchDataQuery := ` + insert into ts_operation_data + select data_hash, + to_tsvector(jsonb_extract_path_text(search_scope, ?)) scope_all + from operation_data + where data_hash in (select distinct data_hash from operation where package_id = ? and version = ? and revision = ?) + on conflict (data_hash) do update + set scope_all = EXCLUDED.scope_all` + _, err = tx.Exec(calculateAllTextSearchDataQuery, + view.ScopeAll, + version.PackageId, version.Version, version.Revision) + if err != nil { + return fmt.Errorf("failed to insert ts_operation_data: %w", err) + } + utils.PerfLog(time.Since(start).Milliseconds(), 1000, "CreateVersionWithData: ts_vectors insert") + } + } + if len(versionComparisons) != 0 { + start = time.Now() + err = p.saveVersionChangesTx(tx, operationComparisons, versionComparisons) + if err != nil { + return err + } + utils.PerfLog(time.Since(start).Milliseconds(), 50, "CreateVersionWithData: versionComparisons insert") + } + if len(builderNotifications) != 0 { + start = time.Now() + _, err := tx.Model(&builderNotifications).Insert() + if err != nil { + return fmt.Errorf("failed to insert builder notifications %+v: %w", builderNotifications, err) + } + utils.PerfLog(time.Since(start).Milliseconds(), 50, "CreateVersionWithData: builderNotifications insert") + } + if !packageInfo.MigrationBuild { + start = time.Now() + err = p.propagatePreviousOperationGroups(tx, version) + if err != nil { + return fmt.Errorf("failed to propagate previous operation groups: %w", err) + } + utils.PerfLog(time.Since(start).Milliseconds(), 50, "CreateVersionWithData: propagatePreviousOperationGroups") + } + + if serviceName != "" { + start = time.Now() + log.Infof("setting serviceName '%s' for package %s", serviceName, version.PackageId) + _, err := tx.Model(pkg).Where("id = ?", version.PackageId).Set("service_name = ?", serviceName).Update() + if err != nil { + return err + } + insertServiceOwnerQuery := ` + INSERT INTO package_service (workspace_id, package_id, service_name) + VALUES (?, ?, ?)` + _, err = tx.Exec(insertServiceOwnerQuery, utils.GetPackageWorkspaceId(version.PackageId), version.PackageId, serviceName) + if err != nil { + return err + } + utils.PerfLog(time.Since(start).Milliseconds(), 50, "CreateVersionWithData: set serviceName for package") + } + + start = time.Now() + var ent entity.BuildEntity + query := tx.Model(&ent). + Where("build_id = ?", buildId). + Set("status = ?", view.StatusComplete). + Set("details = ?", ""). + Set("last_active = now()") + _, err = query.Update() + if err != nil { + return fmt.Errorf("failed to update build entity: %w", err) + } + utils.PerfLog(time.Since(start).Milliseconds(), 50, "CreateVersionWithData: update build entity") + + return nil + }) + if err != nil { + return err + } + return nil +} + +func (p publishedRepositoryImpl) propagatePreviousOperationGroups(tx *pg.Tx, version *entity.PublishedVersionEntity) error { + previousGroupPackageId := version.PackageId + previousGroupVersion := version.Version + previousGroupRevision := version.Revision - 1 + if version.Revision <= 1 { + if version.PreviousVersion == "" { + return nil + } + if version.PreviousVersionPackageId != "" { + previousGroupPackageId = version.PreviousVersionPackageId + } + previousGroupVersion = version.PreviousVersion + _, err := tx.QueryOne(pg.Scan(&previousGroupRevision), ` + select max(revision) from published_version + where package_id = ? + and version = ?`, previousGroupPackageId, previousGroupVersion) + if err != nil { + return err + } + } + previousOperationGroups := make([]entity.OperationGroupEntity, 0) + getOperationGroupsQuery := `select * from operation_group where package_id = ? and version = ? and revision = ? and autogenerated = false` + _, err := tx.Query(&previousOperationGroups, getOperationGroupsQuery, previousGroupPackageId, previousGroupVersion, previousGroupRevision) + if err != nil { + if err == pg.ErrNoRows { + return nil + } + return err + } + if len(previousOperationGroups) == 0 { + return nil + } + copyExistingOperationsFromPackageQuery := ` + insert into grouped_operation + select ?, o.package_id, o.version, o.revision, o.operation_id + from grouped_operation g + inner join operation o + on o.package_id = ? + and o.version = ? + and o.revision = ? + and o.operation_id = g.operation_id + where g.group_id = ?; + ` + //this query detects if operation moved to another ref and updates the link in grouped_operation table instead of marking it as deleted + copyExistingOperationsFromRefsQuery := ` + insert into grouped_operation + with refs as ( + select distinct reference_id as package_id, reference_version as version, reference_revision as revision from published_version_reference + where package_id = ? + and version = ? + and revision = ? + and excluded = false + ), + operations as ( + select o.package_id, o.version, o.revision, o.operation_id from operation o + inner join refs r + on r.package_id = o.package_id + and r.version = o.version + and r.revision = o.revision + ) + select ?, o.package_id, o.version, o.revision, o.operation_id from grouped_operation g + inner join operations o + on g.package_id = o.package_id + and g.operation_id = o.operation_id + where g.group_id = ?; + ` + + for _, group := range previousOperationGroups { + oldGroupId := group.GroupId + newGroup := group + newGroup.PackageId = version.PackageId + newGroup.Version = version.Version + newGroup.Revision = version.Revision + newGroup.GroupId = view.MakeOperationGroupId(newGroup.PackageId, newGroup.Version, newGroup.Revision, newGroup.ApiType, newGroup.GroupName) + _, err = tx.Model(&newGroup).Insert() + if err != nil { + return fmt.Errorf("failed to copy old operation group: %w", err) + } + _, err = tx.Model(&entity.OperationGroupHistoryEntity{ + GroupId: newGroup.GroupId, + Action: view.OperationGroupActionCreate, + Data: newGroup, + UserId: version.CreatedBy, + Date: time.Now(), + Automatic: true, + }).Insert() + if err != nil { + return fmt.Errorf("failed to insert operation group history: %w", err) + } + _, err = tx.Exec(copyExistingOperationsFromPackageQuery, newGroup.GroupId, newGroup.PackageId, newGroup.Version, newGroup.Revision, oldGroupId) + if err != nil { + return fmt.Errorf("failed to copy existing grouped operations for package: %w", err) + } + _, err = tx.Exec(copyExistingOperationsFromRefsQuery, + version.PackageId, version.Version, version.Revision, + newGroup.GroupId, oldGroupId) + if err != nil { + return fmt.Errorf("failed to copy existing grouped operations for refs: %w", err) + } + } + + return err +} + +func (p publishedRepositoryImpl) validateChangelogMigrationResult(tx *pg.Tx, packageInfo view.PackageInfoFile, publishId string, versionComparisons []*entity.VersionComparisonEntity, operationComparisons []*entity.OperationComparisonEntity, versionComparisonsFromCache []string) error { + migrationRun := new(mEntity.MigrationRunEntity) + err := tx.Model(migrationRun).Where("id = ?", packageInfo.MigrationId).First() + if err != nil { + return fmt.Errorf("failed to get migration info: %v", err.Error()) + } + if migrationRun.SkipValidation { + return nil + } + if packageInfo.PreviousVersion == "" { + return nil + } + changes := make(map[string]interface{}, 0) + changesOverview := make(PublishedBuildChangesOverview) + versionComparisonsChanges, versionComparisonIds, err := p.getVersionComparisonsChanges(tx, packageInfo, versionComparisons, versionComparisonsFromCache, &changesOverview) + if err != nil { + return err + } + if len(versionComparisonsChanges) > 0 { + changes["version_comparison"] = versionComparisonsChanges + } + operationComparisonsChanges, err := p.getOperationComparisonsChanges(tx, packageInfo, operationComparisons, versionComparisonIds, &changesOverview) + if err != nil { + return err + } + if len(operationComparisonsChanges) > 0 { + changes["operation_comparison"] = operationComparisonsChanges + } + if len(changes) > 0 { + ent := mEntity.MigratedVersionChangesEntity{ + PackageId: packageInfo.PackageId, + Version: packageInfo.Version, + Revision: packageInfo.Revision, + BuildId: publishId, + MigrationId: packageInfo.MigrationId, + Changes: changes, + UniqueChanges: changesOverview.getUniqueChanges(), + } + _, err = tx.Model(&ent).Insert() + if err != nil { + return fmt.Errorf("failed to insert migrated version changes: %v", err.Error()) + } + insertMigrationChangesQuery := ` + insert into migration_changes + values (?, ?) + on conflict (migration_id) + do update + set changes = coalesce(migration_changes.changes, '{}') || ( + SELECT jsonb_object_agg(key, coalesce((migration_changes.changes ->> key)::int, 0) + 1) + from jsonb_each_text(EXCLUDED.changes) + );` + _, err = tx.Exec(insertMigrationChangesQuery, packageInfo.MigrationId, changesOverview) + if err != nil { + return fmt.Errorf("failed to insert migration changes: %v", err.Error()) + } + } + return nil +} + +func (p publishedRepositoryImpl) SaveVersionChanges(packageInfo view.PackageInfoFile, publishId string, operationComparisons []*entity.OperationComparisonEntity, versionComparisons []*entity.VersionComparisonEntity, versionComparisonsFromCache []string) error { + ctx := context.Background() + return p.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + var ents []entity.BuildEntity + _, err := tx.Query(&ents, getBuildWithLock, publishId) + if err != nil { + return fmt.Errorf("CreateVersionWithData: failed to get build %s: %w", publishId, err) + } + if len(ents) == 0 { + return fmt.Errorf("SaveVersionChanges: failed to start version publish. Build with buildId='%s' is not found", publishId) + } + build := &ents[0] + + //do not allow publish for "complete" builds and builds that are not failed with "Restart count exceeded limit" + if build.Status == string(view.StatusComplete) || + (build.Status == string(view.StatusError) && build.RestartCount < 2) { + return fmt.Errorf("failed to start version publish. Version with buildId='%v' is already published or failed", publishId) + } + if packageInfo.MigrationBuild && !packageInfo.NoChangelog { + start := time.Now() + err := p.validateChangelogMigrationResult(tx, packageInfo, publishId, versionComparisons, operationComparisons, versionComparisonsFromCache) + if err != nil { + return err + } + utils.PerfLog(time.Since(start).Milliseconds(), 500, "SaveVersionChanges: validateChangelogMigrationResult") + } + err = p.saveVersionChangesTx(tx, operationComparisons, versionComparisons) + if err != nil { + return err + } + + var ent entity.BuildEntity + query := tx.Model(&ent). + Where("build_id = ?", publishId). + Set("status = ?", view.StatusComplete). + Set("details = ?", ""). + Set("last_active = now()") + _, err = query.Update() + if err != nil { + return fmt.Errorf("failed to update build entity: %w", err) + } + return nil + }) +} + +func (p publishedRepositoryImpl) saveVersionChangesTx(tx *pg.Tx, operationComparisons []*entity.OperationComparisonEntity, versionComparisons []*entity.VersionComparisonEntity) error { + _, err := tx.Model(&versionComparisons). + OnConflict(`(comparison_id) DO UPDATE + SET operation_types=EXCLUDED.operation_types, + refs = EXCLUDED.refs, + last_active = EXCLUDED.last_active, + no_content = EXCLUDED.no_content, + open_count = version_comparison.open_count+1`).Insert() + if err != nil { + return fmt.Errorf("failed to insert version comparisons %+v: %w", versionComparisons, err) + } + deleteChangelogForComparisonQuery := ` + delete from operation_comparison + where comparison_id = ?comparison_id + ` + for _, comparisonEnt := range versionComparisons { + _, err := tx.Model(comparisonEnt).Exec(deleteChangelogForComparisonQuery) + if err != nil { + return fmt.Errorf("failed to delete old operation changes for comparison %+v: %w", *comparisonEnt, err) + } + } + if len(operationComparisons) != 0 { + _, err = tx.Model(&operationComparisons).Insert() + if err != nil { + return fmt.Errorf("failed to insert operation changes %+v: %w", operationComparisons, err) + } + } + return nil +} + +func (p publishedRepositoryImpl) GetRevisionContent(packageId string, versionName string, revision int) ([]entity.PublishedContentEntity, error) { + var ents []entity.PublishedContentEntity + version, _, err := SplitVersionRevision(versionName) + if err != nil { + return nil, err + } + err = p.cp.GetConnection().Model(&ents). + Where("package_id = ?", packageId). + Where("version = ?", version). + Where("revision = ?", revision). + Order("index ASC"). + //Where("deleted_at is ?", nil). // TODO: check that version wasn't deleted or not? + Select() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return ents, err +} + +func (p publishedRepositoryImpl) GetLatestContent(packageId string, versionName string, fileId string) (*entity.PublishedContentEntity, error) { + result := new(entity.PublishedContentEntity) + version, revision, err := SplitVersionRevision(versionName) + if err != nil { + return nil, err + } + query := p.cp.GetConnection().Model(result). + Where("package_id = ?", packageId). + Where("version = ?", version). + Where("file_id = ?", fileId) + //Where("deleted_at is ?", nil). // TODO: check that version wasn't deleted or not? + + if revision > 0 { + query.Where("revision = ?", revision) + } else if revision == 0 { + query.Order("revision DESC") + } + err = query.First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (p publishedRepositoryImpl) GetLatestContentBySlug(packageId string, versionName string, slug string) (*entity.PublishedContentEntity, error) { + result := new(entity.PublishedContentEntity) + version, revision, err := SplitVersionRevision(versionName) + if err != nil { + return nil, err + } + + query := p.cp.GetConnection().Model(result). + Where("package_id = ?", packageId). + Where("version = ?", version). + Where("slug = ?", slug) + //Where("deleted_at is ?", nil). // TODO: check that version wasn't deleted or not? + if revision > 0 { + query.Where("revision = ?", revision) + } else if revision == 0 { + query.Order("revision DESC") + } + err = query.First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (p publishedRepositoryImpl) GetRevisionContentBySlug(packageId string, versionName string, slug string, revision int) (*entity.PublishedContentEntity, error) { + result := new(entity.PublishedContentEntity) + err := p.cp.GetConnection().Model(result). + Where("package_id = ?", packageId). + Where("version = ?", versionName). + Where("slug = ?", slug). + Where("revision = ?", revision). + //Where("deleted_at is ?", nil). // TODO: check that version wasn't deleted or not? + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (p publishedRepositoryImpl) GetContentData(packageId string, checksum string) (*entity.PublishedContentDataEntity, error) { + result := new(entity.PublishedContentDataEntity) + err := p.cp.GetConnection().Model(result). + Where("package_id = ?", packageId). + Where("checksum = ?", checksum). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (p *publishedRepositoryImpl) GetLatestContentByVersion(packageId string, versionName string) ([]entity.PublishedContentEntity, error) { + var latestVersionRev entity.PublishedVersionEntity + version, _, err := SplitVersionRevision(versionName) + if err != nil { + return nil, err + } + query := ` + SELECT p.* + FROM ( + SELECT max(revision) over (partition by package_id, version) AS _max_revision, p.* + FROM published_version AS p + WHERE p.package_id = ? + AND p.version = ? + AND p.deleted_at is null + ) p + WHERE p.revision = p._max_revision LIMIT 1;` + _, err = p.cp.GetConnection().Query(&latestVersionRev, query, packageId, version) + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + + return p.GetRevisionContent(packageId, version, latestVersionRev.Revision) +} + +func (p publishedRepositoryImpl) GetVersionSources(packageId string, versionName string, revision int) (*entity.PublishedSrcArchiveEntity, error) { + query := ` + select psa.* + from published_sources_archives psa, published_sources ps + where ps.package_id = ? + and ps.version = ? + and ps.revision = ? + and ps.archive_checksum = psa.checksum + limit 1 + ` + savedSources := new(entity.PublishedSrcArchiveEntity) + _, err := p.cp.GetConnection().QueryOne(savedSources, query, packageId, versionName, revision) + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return savedSources, nil +} + +func (p publishedRepositoryImpl) GetPublishedVersionSourceDataConfig(packageId string, versionName string, revision int) (*entity.PublishedSrcDataConfigEntity, error) { + query := ` + select psa.checksum as archive_checksum, psa.data, ps.config, ps.package_id + from published_sources_archives psa, published_sources ps + where ps.package_id = ? + and ps.version = ? + and ps.revision = ? + and ps.archive_checksum = psa.checksum + limit 1 + ` + savedSources := new(entity.PublishedSrcDataConfigEntity) + _, err := p.cp.GetConnection().QueryOne(savedSources, query, packageId, versionName, revision) + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return savedSources, nil +} + +func (p publishedRepositoryImpl) GetPublishedSources(packageId string, versionName string, revision int) (*entity.PublishedSrcEntity, error) { + src := new(entity.PublishedSrcEntity) + err := p.cp.GetConnection().Model(src). + Where("package_id = ?", packageId). + Where("version = ?", versionName). + Where("revision = ?", revision). + Limit(1). + Select() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return src, nil +} + +func (p publishedRepositoryImpl) contentDataExists(tx *pg.Tx, packageId string, checksum string) (bool, error) { + result := new(entity.PublishedContentDataEntity) + err := tx.Model(result). + Where("package_id = ?", packageId). + Where("checksum = ?", checksum). + First() + if err != nil { + if err == pg.ErrNoRows { + return false, nil + } + return false, err + } + return true, nil +} + +func (p publishedRepositoryImpl) GetRevisionRefs(packageId string, versionName string, revision int) ([]entity.PublishedReferenceEntity, error) { + var ents []entity.PublishedReferenceEntity + version, _, err := SplitVersionRevision(versionName) + if err != nil { + return nil, err + } + err = p.cp.GetConnection().Model(&ents). + Where("package_id = ?", packageId). + Where("version = ?", version). + Where("revision = ?", revision). + Where("excluded = false"). + Select() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return ents, err +} + +func (p publishedRepositoryImpl) GetPackageVersions(packageId string, filter string) ([]entity.PublishedVersionEntity, error) { + var ents []entity.PublishedVersionEntity + + query := p.cp.GetConnection().Model(&ents). + Where("package_id = ?", packageId). + Where("deleted_at is ?", nil) + + if filter != "" { + filter = "%" + utils.LikeEscaped(filter) + "%" + query.Where("version ilike ?", filter) + } + + err := query.Select() + // TODO: try to get latest via query + if err != nil { + return nil, err + } + + result := make([]entity.PublishedVersionEntity, 0) + latestRevNums := make(map[string]int) + latestRevVersions := make(map[string]entity.PublishedVersionEntity) + + for _, version := range ents { + if version.PackageId == packageId && (version.DeletedAt == nil || version.DeletedAt.IsZero()) { + if maxRev, ok := latestRevNums[version.Version]; ok { + if version.Revision > maxRev { + latestRevNums[version.Version] = version.Revision + latestRevVersions[version.Version] = version + } + } else { + latestRevNums[version.Version] = version.Revision + latestRevVersions[version.Version] = version + } + } + } + for _, v := range latestRevVersions { + result = append(result, v) + } + + sort.Slice(result, func(i, j int) bool { + return result[i].PublishedAt.Unix() > result[j].PublishedAt.Unix() + }) + + return result, err +} + +func (p publishedRepositoryImpl) GetVersionsByPreviousVersion(previousPackageId string, previousVersionName string) ([]entity.PublishedVersionEntity, error) { + var ents []entity.PublishedVersionEntity + previousVersion, _, err := SplitVersionRevision(previousVersionName) + if err != nil { + return nil, err + } + + query := ` + select pv.* from published_version pv + inner join ( + select package_id, version, max(revision) as revision + from published_version + group by package_id, version + ) mx + on pv.package_id = mx.package_id + and pv.version = mx.version + and pv.revision = mx.revision + where pv.previous_version_package_id = ? + and pv.previous_version = ? + and pv.deleted_at is null + order by pv.published_at desc + ` + _, err = p.cp.GetConnection().Query(&ents, query, previousPackageId, previousVersion, previousPackageId, previousVersion) + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + + return ents, err +} + +func (p publishedRepositoryImpl) GetPackageVersionsWithLimit(searchQuery entity.PublishedVersionSearchQueryEntity, checkRevisions bool) ([]entity.PublishedVersionEntity, error) { + var ents []entity.PublishedVersionEntity + if searchQuery.TextFilter != "" { + searchQuery.TextFilter = "%" + utils.LikeEscaped(searchQuery.TextFilter) + "%" + } + if searchQuery.Status != "" { + searchQuery.Status = "%" + utils.LikeEscaped(searchQuery.Status) + "%" + } + if checkRevisions { + query := ` + select * from published_version pv + where pv.deleted_at is null + and (pv.package_id = ?package_id) + and (?text_filter = '' or pv.version ilike ?text_filter OR EXISTS(SELECT 1 FROM unnest(pv.labels) as label WHERE label ILIKE ?text_filter)) + and (?status = '' or pv.status ilike ?status) + and (?label = '' or ?label = any(pv.labels)) + order by pv.published_at desc + ` + _, err := p.cp.GetConnection().Model(&searchQuery).Query(&ents, query) + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + + result := make([]entity.PublishedVersionEntity, 0) + latestRevNums := make(map[string]int) + latestRevVersions := make(map[string]entity.PublishedVersionEntity) + + for _, version := range ents { + if version.PackageId == searchQuery.PackageId && (version.DeletedAt == nil || version.DeletedAt.IsZero()) { + if maxRev, ok := latestRevNums[version.Version]; ok { + if version.Revision > maxRev { + latestRevNums[version.Version] = version.Revision + latestRevVersions[version.Version] = version + } + } else { + latestRevNums[version.Version] = version.Revision + latestRevVersions[version.Version] = version + } + } + } + for _, v := range latestRevVersions { + result = append(result, v) + } + sort.Slice(result, func(i, j int) bool { + return result[i].PublishedAt.Unix() > result[j].PublishedAt.Unix() + }) + + if len(result) <= searchQuery.Offset { + return make([]entity.PublishedVersionEntity, 0), nil + } else if len(result) <= searchQuery.Limit+searchQuery.Offset { + return result[searchQuery.Offset:], nil + } + return result[searchQuery.Offset : searchQuery.Limit+searchQuery.Offset], nil + } else { + query := ` + select * from published_version pv + inner join ( + select package_id, version, max(revision) as revision + from published_version + where (package_id = ?package_id) + group by package_id, version + ) mx + on pv.package_id = mx.package_id + and pv.version = mx.version + and pv.revision = mx.revision + where (?text_filter = '' or pv.version ilike ?text_filter OR EXISTS(SELECT 1 FROM unnest(pv.labels) as label WHERE label ILIKE ?text_filter)) + and (?status = '' or pv.status ilike ?status) + and (?label = '' or ?label = any(pv.labels)) + and pv.deleted_at is null + order by pv.published_at desc + limit ?limit + offset ?offset + ` + _, err := p.cp.GetConnection().Model(&searchQuery).Query(&ents, query) + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + } + + return ents, nil +} + +func (p publishedRepositoryImpl) GetReadonlyPackageVersionsWithLimit_deprecated(searchQuery entity.PublishedVersionSearchQueryEntity, checkRevisions bool) ([]entity.ReadonlyPublishedVersionEntity_deprecated, error) { + var ents []entity.ReadonlyPublishedVersionEntity_deprecated + if searchQuery.TextFilter != "" { + searchQuery.TextFilter = "%" + utils.LikeEscaped(searchQuery.TextFilter) + "%" + } + if searchQuery.Status != "" { + searchQuery.Status = "%" + utils.LikeEscaped(searchQuery.Status) + "%" + } + if searchQuery.SortBy == "" { + searchQuery.SortBy = entity.GetVersionSortByPG(view.VersionSortByCreatedAt) + } + if searchQuery.SortOrder == "" { + searchQuery.SortOrder = entity.GetVersionSortOrderPG(view.VersionSortOrderDesc) + } + if checkRevisions { + query := ` + select pv.*, get_latest_revision(coalesce(pv.previous_version_package_id,pv.package_id), pv.previous_version) as previous_version_revision, coalesce(usr.name, pv.created_by) user_name from published_version pv + left join user_data usr + on usr.user_id = pv.created_by + where pv.deleted_at is null + and (pv.package_id = ?package_id) + and (?text_filter = '' or pv.version ilike ?text_filter OR EXISTS(SELECT 1 FROM unnest(pv.labels) as label WHERE label ILIKE ?text_filter)) + and (?status = '' or pv.status ilike ?status) + and (?label = '' or ?label = any(pv.labels)) + order by pv.published_at desc + ` + _, err := p.cp.GetConnection().Model(&searchQuery).Query(&ents, query) + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + + result := make([]entity.ReadonlyPublishedVersionEntity_deprecated, 0) + latestRevNums := make(map[string]int) + latestRevVersions := make(map[string]entity.ReadonlyPublishedVersionEntity_deprecated) + + for _, version := range ents { + if version.PackageId == searchQuery.PackageId && (version.DeletedAt == nil || version.DeletedAt.IsZero()) { + if maxRev, ok := latestRevNums[version.Version]; ok { + if version.Revision > maxRev { + latestRevNums[version.Version] = version.Revision + latestRevVersions[version.Version] = version + } + } else { + latestRevNums[version.Version] = version.Revision + latestRevVersions[version.Version] = version + } + } + } + for _, v := range latestRevVersions { + result = append(result, v) + } + sort.Slice(result, func(i, j int) bool { + switch searchQuery.SortBy { + case "published_at", "": + switch searchQuery.SortOrder { + case "desc", "": + return result[i].PublishedAt.Unix() > result[j].PublishedAt.Unix() + case "asc": + return result[i].PublishedAt.Unix() < result[j].PublishedAt.Unix() + } + case "version": + switch searchQuery.SortOrder { + case "desc", "": + return result[i].Version > result[j].Version + case "asc": + return result[i].Version < result[j].Version + } + } + return result[i].PublishedAt.Unix() > result[j].PublishedAt.Unix() + }) + + if len(result) <= searchQuery.Offset { + return make([]entity.ReadonlyPublishedVersionEntity_deprecated, 0), nil + } else if len(result) <= searchQuery.Limit+searchQuery.Offset { + return result[searchQuery.Offset:], nil + } + return result[searchQuery.Offset : searchQuery.Limit+searchQuery.Offset], nil + } else { + query := ` + select pv.*, get_latest_revision(coalesce(pv.previous_version_package_id,pv.package_id), pv.previous_version) as previous_version_revision, coalesce(usr.name, pv.created_by) user_name from published_version pv + inner join ( + select package_id, version, max(revision) as revision + from published_version + where (package_id = ?package_id) + group by package_id, version + ) mx + on pv.package_id = mx.package_id + and pv.version = mx.version + and pv.revision = mx.revision + left join user_data usr + on usr.user_id = pv.created_by + where (?text_filter = '' or pv.version ilike ?text_filter OR EXISTS(SELECT 1 FROM unnest(pv.labels) as label WHERE label ILIKE ?text_filter)) + and (?status = '' or pv.status ilike ?status) + and (?label = '' or ?label = any(pv.labels)) + and pv.deleted_at is null + order by pv.%s %s + limit ?limit + offset ?offset + ` + _, err := p.cp.GetConnection().Model(&searchQuery). + Query(&ents, fmt.Sprintf(query, searchQuery.SortBy, searchQuery.SortOrder)) + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + } + + return ents, nil +} +func (p publishedRepositoryImpl) GetReadonlyPackageVersionsWithLimit(searchQuery entity.PublishedVersionSearchQueryEntity, checkRevisions bool) ([]entity.PackageVersionRevisionEntity, error) { + var ents []entity.PackageVersionRevisionEntity + if searchQuery.TextFilter != "" { + searchQuery.TextFilter = "%" + utils.LikeEscaped(searchQuery.TextFilter) + "%" + } + if searchQuery.Status != "" { + searchQuery.Status = "%" + utils.LikeEscaped(searchQuery.Status) + "%" + } + if searchQuery.SortBy == "" { + searchQuery.SortBy = entity.GetVersionSortByPG(view.VersionSortByCreatedAt) + } + if searchQuery.SortOrder == "" { + searchQuery.SortOrder = entity.GetVersionSortOrderPG(view.VersionSortOrderDesc) + } + if checkRevisions { + query := ` + select pv.*, get_latest_revision(coalesce(pv.previous_version_package_id,pv.package_id), pv.previous_version) as previous_version_revision, + usr.name as prl_usr_name, usr.email as prl_usr_email, usr.avatar_url as prl_usr_avatar_url, + apikey.id as prl_apikey_id, apikey.name as prl_apikey_name, + case when coalesce(usr.name, apikey.name) is null then pv.created_by else usr.user_id end prl_usr_id + from published_version pv + left join user_data usr on usr.user_id = pv.created_by + left join apihub_api_keys apikey on apikey.id = pv.created_by + where pv.deleted_at is null + and (pv.package_id = ?package_id) + and (?text_filter = '' or pv.version ilike ?text_filter OR EXISTS(SELECT 1 FROM unnest(pv.labels) as label WHERE label ILIKE ?text_filter)) + and (?status = '' or pv.status ilike ?status) + and (?label = '' or ?label = any(pv.labels)) + order by pv.published_at desc + ` + _, err := p.cp.GetConnection().Model(&searchQuery).Query(&ents, query) + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + + result := make([]entity.PackageVersionRevisionEntity, 0) + latestRevNums := make(map[string]int) + latestRevVersions := make(map[string]entity.PackageVersionRevisionEntity) + + for _, version := range ents { + if version.PackageId == searchQuery.PackageId && (version.DeletedAt == nil || version.DeletedAt.IsZero()) { + if maxRev, ok := latestRevNums[version.Version]; ok { + if version.Revision > maxRev { + latestRevNums[version.Version] = version.Revision + latestRevVersions[version.Version] = version + } + } else { + latestRevNums[version.Version] = version.Revision + latestRevVersions[version.Version] = version + } + } + } + for _, v := range latestRevVersions { + result = append(result, v) + } + sort.Slice(result, func(i, j int) bool { + switch searchQuery.SortBy { + case "published_at", "": + switch searchQuery.SortOrder { + case "desc", "": + return result[i].PublishedAt.Unix() > result[j].PublishedAt.Unix() + case "asc": + return result[i].PublishedAt.Unix() < result[j].PublishedAt.Unix() + } + case "version": + switch searchQuery.SortOrder { + case "desc", "": + return result[i].Version > result[j].Version + case "asc": + return result[i].Version < result[j].Version + } + } + return result[i].PublishedAt.Unix() > result[j].PublishedAt.Unix() + }) + + if len(result) <= searchQuery.Offset { + return make([]entity.PackageVersionRevisionEntity, 0), nil + } else if len(result) <= searchQuery.Limit+searchQuery.Offset { + return result[searchQuery.Offset:], nil + } + return result[searchQuery.Offset : searchQuery.Limit+searchQuery.Offset], nil + } else { + query := ` + select pv.*, get_latest_revision(coalesce(pv.previous_version_package_id,pv.package_id), pv.previous_version) as previous_version_revision, + usr.name as prl_usr_name, usr.email as prl_usr_email, usr.avatar_url as prl_usr_avatar_url, + apikey.id as prl_apikey_id, apikey.name as prl_apikey_name, + case when coalesce(usr.name, apikey.name) is null then pv.created_by else usr.user_id end prl_usr_id + from published_version pv + inner join ( + select package_id, version, max(revision) as revision + from published_version + where (package_id = ?package_id) + group by package_id, version + ) mx + on pv.package_id = mx.package_id + and pv.version = mx.version + and pv.revision = mx.revision + left join user_data usr on usr.user_id = pv.created_by + left join apihub_api_keys apikey on apikey.id = pv.created_by + where (?text_filter = '' or pv.version ilike ?text_filter OR EXISTS(SELECT 1 FROM unnest(pv.labels) as label WHERE label ILIKE ?text_filter)) + and (?status = '' or pv.status ilike ?status) + and (?label = '' or ?label = any(pv.labels)) + and pv.deleted_at is null + order by pv.%s %s + limit ?limit + offset ?offset + ` + _, err := p.cp.GetConnection().Model(&searchQuery). + Query(&ents, fmt.Sprintf(query, searchQuery.SortBy, searchQuery.SortOrder)) + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + } + + return ents, nil +} + +// GetVersionRefs deprecated +func (p publishedRepositoryImpl) GetVersionRefs(searchQuery entity.PackageVersionSearchQueryEntity) ([]entity.PackageVersionPublishedReference, error) { + var query string + if searchQuery.TextFilter != "" { + searchQuery.TextFilter = "%" + utils.LikeEscaped(searchQuery.TextFilter) + "%" + } + if searchQuery.ShowAllDescendants { + query = ` + with refs as ( + select distinct reference_id as package_id, reference_version as version, reference_revision as revision from published_version_reference + where package_id = ?package_id + and version = ?version + and revision = ?revision + and excluded = false + ) + select pg.id as package_id, + pg.name as package_name, + pg.kind as kind, + pub_version.version as version, + pub_version.status as version_status, + pub_version.revision as revision, + pub_version.deleted_at, + pub_version.deleted_by + from package_group as pg, published_version as pub_version, refs + where refs.package_id = pg.id + and (?text_filter = '' or pg.name ilike ?text_filter) + and (?kind = '' or pg.kind = ?kind) + and refs.package_id = pub_version.package_id + and refs.version = pub_version.version + and refs.revision = pub_version.revision + and not(refs.package_id = ?package_id and refs.version = ?version and refs.revision = ?revision) + offset ?offset + limit ?limit; + ` + } else { + query = ` + with refs as ( + select distinct reference_id as package_id, reference_version as version, reference_revision as revision from published_version_reference + where package_id = ?package_id + and version = ?version + and revision = ?revision + and parent_reference_id = '' + and excluded = false + ) + select pg.id as package_id, + pg.name as package_name, + pg.kind as kind, + pub_version.version as version, + pub_version.status as version_status, + pub_version.revision as revision, + pub_version.deleted_at, + pub_version.deleted_by + from package_group as pg, published_version as pub_version,refs + where pg.id = refs.package_id + and pub_version.package_id = refs.package_id + and pub_version.version = refs.version + and pub_version.revision = refs.revision + and (?text_filter = '' or pg.name ilike ?text_filter) + and (?kind = '' or pg.kind = ?kind) + offset ?offset + limit ?limit;` + } + + var ents []entity.PackageVersionPublishedReference + _, err := p.cp.GetConnection().Model(&searchQuery).Query(&ents, query) + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return ents, err +} + +func (p publishedRepositoryImpl) GetVersionRefsV3(packageId string, version string, revision int) ([]entity.PublishedReferenceEntity, error) { + var result []entity.PublishedReferenceEntity + err := p.cp.GetConnection().Model(&result). + ColumnExpr("published_version_reference.*"). + Where("package_id = ?", packageId). + Where("version = ?", version). + Where("revision = ?", revision). + Order("published_version_reference.reference_id", + "published_version_reference.reference_version", + "published_version_reference.reference_revision", + "published_version_reference.parent_reference_id", + "published_version_reference.parent_reference_version", + "published_version_reference.parent_reference_revision"). + Select() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + + return result, nil +} + +func (p publishedRepositoryImpl) GetRevisionContentWithLimit(packageId string, versionName string, revision int, skipRefs bool, searchQuery entity.PublishedContentSearchQueryEntity) ([]entity.PublishedContentEntity, error) { + var ents []entity.PublishedContentEntity + query := p.cp.GetConnection().Model(&ents). + ColumnExpr("published_version_revision_content.*") + if !skipRefs { + query.Join(`inner join + (with refs as( + select s.reference_id as package_id, s.reference_version as version, s.reference_revision as revision + from published_version_reference s + inner join published_version pv + on pv.package_id = s.reference_id + and pv.version = s.reference_version + and pv.revision = s.reference_revision + and pv.deleted_at is null + where s.package_id = ? + and s.version = ? + and s.revision = ? + and s.excluded = false + ) + select package_id, version, revision + from refs + union + select ? as package_id, ? as version, ? as revision + ) refs`, packageId, versionName, revision, packageId, versionName, revision) + query.JoinOn("published_version_revision_content.package_id = refs.package_id"). + JoinOn("published_version_revision_content.version = refs.version"). + JoinOn("published_version_revision_content.revision = refs.revision") + } else { + query.Where("package_id = ?", packageId). + Where("version = ?", versionName). + Where("revision = ?", revision) + } + + if searchQuery.TextFilter != "" { + searchQuery.TextFilter = "%" + utils.LikeEscaped(searchQuery.TextFilter) + "%" + query.Where("title ilike ?", searchQuery.TextFilter) + } + if len(searchQuery.DocumentTypesFilter) > 0 { + query.Where("data_type = any(?)", pg.Array(searchQuery.DocumentTypesFilter)) + } + query.Order("published_version_revision_content.package_id", + "published_version_revision_content.version", + "published_version_revision_content.revision", + "index ASC"). + Offset(searchQuery.Offset). + Limit(searchQuery.Limit) + + err := query.Select() + + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return ents, err +} +func (p publishedRepositoryImpl) GetLastVersions(ids []string) ([]entity.PublishedVersionEntity, error) { + if len(ids) == 0 { + return nil, nil + } + var versions []entity.PublishedVersionEntity + selectMaxVersionQuery := ` + SELECT p.* + FROM ( + SELECT max(published_at) over (partition by package_id) AS _max_published_at, p.* + FROM published_version AS p + WHERE package_id IN (?) AND deleted_at is null + ) p + WHERE p.published_at = p._max_published_at;` + _, err := p.cp.GetConnection().Query(&versions, selectMaxVersionQuery, pg.In(ids)) + if err != nil { + if err != pg.ErrNoRows { + return nil, err + } + } + return versions, nil +} + +func (p publishedRepositoryImpl) GetLastVersion(id string) (*entity.PublishedVersionEntity, error) { + version := new(entity.PublishedVersionEntity) + selectMaxVersionQuery := ` + SELECT p.* + FROM ( + SELECT max(published_at) over (partition by package_id) AS _max_published_at, p.* + FROM published_version AS p + WHERE package_id = ? AND deleted_at is null + ) p + WHERE p.published_at = p._max_published_at LIMIT 1;` + _, err := p.cp.GetConnection().Query(version, selectMaxVersionQuery, id) + if err != nil { + if err != pg.ErrNoRows { + return nil, err + } + } + return version, nil +} + +func (p publishedRepositoryImpl) GetDefaultVersion(packageId string, status string) (*entity.PublishedVersionEntity, error) { + result := new(entity.PublishedVersionEntity) + query := `with maxrev as + ( + select package_id, version, max(revision) as revision + from published_version + where package_id = ? + group by package_id, version + ) + select * from published_version pv + inner join maxrev + on maxrev.package_id = pv.package_id + and maxrev.version = pv.version + and maxrev.revision = pv.revision + where pv.status = ? and pv.deleted_at is null` + if status == string(view.Release) { + query += ` order by pv.version desc` + } else { + query += ` order by pv.published_at desc` + } + query += ` limit 1;` + _, err := p.cp.GetConnection().QueryOne(result, query, packageId, status) + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (p *publishedRepositoryImpl) CleanupDeleted() error { + var ents []entity.PublishedVersionEntity + _, err := p.cp.GetConnection().Model(&ents). + Where("deleted_at is not ?", nil). + Delete() + return err +} + +func (p publishedRepositoryImpl) GetFileSharedInfo(packageId string, slug string, versionName string) (*entity.SharedUrlInfoEntity, error) { + result := new(entity.SharedUrlInfoEntity) + version, _, err := SplitVersionRevision(versionName) + if err != nil { + return nil, err + } + + err = p.cp.GetConnection().Model(result). + Where("package_id = ?", packageId). + Where("version = ?", version). + Where("file_id = ?", slug). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (p publishedRepositoryImpl) GetFileSharedInfoById(sharedId string) (*entity.SharedUrlInfoEntity, error) { + result := entity.SharedUrlInfoEntity{SharedId: sharedId} + err := p.cp.GetConnection().Model(&result). + WherePK(). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return &result, nil +} + +func (p publishedRepositoryImpl) CreateFileSharedInfo(newSharedUrlInfo *entity.SharedUrlInfoEntity) error { + _, err := p.cp.GetConnection().Model(newSharedUrlInfo).Insert() + if err != nil { + if pgErr, ok := err.(pg.Error); ok { + if pgErr.IntegrityViolation() { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.GeneratedSharedIdIsNotUnique, + Message: exception.GeneratedSharedIdIsNotUniqueMsg, + } + } + } + return err + } + return nil +} + +func (p publishedRepositoryImpl) CreatePackage(packageEntity *entity.PackageEntity) error { + ctx := context.Background() + err := p.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + _, err := tx.Model(packageEntity).OnConflict("(id) DO NOTHING").Insert() + if err != nil { + return err + } + if packageEntity.ServiceName != "" { + insertServiceOwnerQuery := ` + INSERT INTO package_service (workspace_id, package_id, service_name) + VALUES (?, ?, ?) + ON CONFLICT (workspace_id, package_id, service_name) DO NOTHING` + _, err := tx.Exec(insertServiceOwnerQuery, utils.GetPackageWorkspaceId(packageEntity.Id), packageEntity.Id, packageEntity.ServiceName) + if err != nil { + return err + } + } + return err + }) + if err != nil { + return err + } + return nil +} + +func (p publishedRepositoryImpl) CreatePrivatePackageForUser(packageEntity *entity.PackageEntity, userRoleEntity *entity.PackageMemberRoleEntity) error { + ctx := context.Background() + return p.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + _, err := tx.Model(packageEntity).Insert() + if err != nil { + return err + } + _, err = tx.Model(userRoleEntity).Insert() + if err != nil { + return err + } + return nil + }) +} + +func (p publishedRepositoryImpl) GetPackage(id string) (*entity.PackageEntity, error) { + result := new(entity.PackageEntity) + err := p.cp.GetConnection().Model(result). + Where("id = ?", id). + Where("deleted_at is ?", nil). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (p publishedRepositoryImpl) GetPackageGroup(id string) (*entity.PackageEntity, error) { + result := new(entity.PackageEntity) + err := p.cp.GetConnection().Model(result). + Where("id = ?", id). + Where("kind in (?)", pg.In([]string{entity.KIND_GROUP, entity.KIND_WORKSPACE})). + Where("deleted_at is ?", nil). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (p publishedRepositoryImpl) GetDeletedPackage(id string) (*entity.PackageEntity, error) { + result := new(entity.PackageEntity) + err := p.cp.GetConnection().Model(result). + Where("id = ?", id). + Where("deleted_at is not ?", nil). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (p publishedRepositoryImpl) GetDeletedPackageGroup(id string) (*entity.PackageEntity, error) { + result := new(entity.PackageEntity) + err := p.cp.GetConnection().Model(result). + Where("id = ?", id). + Where("kind in (?)", pg.In([]string{entity.KIND_GROUP, entity.KIND_WORKSPACE})). + Where("deleted_at is not ?", nil). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (p publishedRepositoryImpl) GetPackageIncludingDeleted(id string) (*entity.PackageEntity, error) { + result := new(entity.PackageEntity) + err := p.cp.GetConnection().Model(result). + Where("id = ?", id). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (p publishedRepositoryImpl) GetAllPackageGroups(name string, onlyFavorite bool, userId string) ([]entity.PackageFavEntity, error) { + var result []entity.PackageFavEntity + query := p.cp.GetConnection().Model(&result). + Where("kind in (?)", pg.In([]string{entity.KIND_GROUP, entity.KIND_WORKSPACE})). + Where("deleted_at is ?", nil) + if name != "" { + name = "%" + utils.LikeEscaped(name) + "%" + query.Where("name ilike ?", name) + } + query.Order("parent_id ASC", "name ASC") + + query.ColumnExpr("package_group.*"). + ColumnExpr("fav.user_id as user_id") + if onlyFavorite { + query.Join("INNER JOIN favorite_packages as fav") + } else { + query.Join("FULL OUTER JOIN favorite_packages as fav") + } + query.JoinOn("package_group.id = fav.package_id"). + JoinOn("fav.user_id = ?", userId) + + err := query.Select() + if err != nil { + return nil, err + } + + return result, nil +} + +func (p publishedRepositoryImpl) GetPackagesForPackageGroup(id string) ([]entity.PackageEntity, error) { + var result []entity.PackageEntity + err := p.cp.GetConnection().Model(&result). + Where("parent_id = ?", id). + Where("kind = ?", entity.KIND_PACKAGE). + Where("deleted_at is ?", nil). + Order("name ASC"). + Select() + if err != nil { + return nil, err + } + return result, nil +} + +func (p publishedRepositoryImpl) GetChildPackageGroups(parentId string, name string, onlyFavorite bool, userId string) ([]entity.PackageFavEntity, error) { + var result []entity.PackageFavEntity + query := p.cp.GetConnection().Model(&result). + Where("package_group.kind in (?)", pg.In([]string{entity.KIND_GROUP, entity.KIND_WORKSPACE})). + Where("package_group.deleted_at is ?", nil). + Distinct() + if parentId != "" { + query.Where("package_group.parent_id = ?", parentId) + } else { + query.Where("package_group.parent_id is ?", nil) + } + if name != "" { + name = "%" + utils.LikeEscaped(name) + "%" + query.Where("package_group.name ilike ?", name) + } + query.Order("package_group.parent_id ASC", "package_group.name ASC") + + query.ColumnExpr("package_group.*"). + ColumnExpr("fav.user_id as user_id") + if onlyFavorite { + query.Join("INNER JOIN favorite_packages as fav") + } else { + query.Join("FULL OUTER JOIN favorite_packages as fav") + } + query.JoinOn("package_group.id = fav.package_id"). + JoinOn("fav.user_id = ?", userId) + + query.Join("INNER JOIN project pr"). + JoinOn("pr.id ilike (package_group.id || '%')") + + err := query.Select() + + if err != nil { + return nil, err + } + return result, nil +} + +func (p publishedRepositoryImpl) GetAllChildPackageIdsIncludingParent(parentId string) ([]string, error) { + var result []string + var ents []entity.PackageIdEntity + + query := `with recursive children as ( + select id from package_group where id=? + UNION ALL + select g.id from package_group g inner join children on children.id = g.parent_id) + select id from children` + _, err := p.cp.GetConnection().Query(&ents, query, parentId) + if err != nil { + return nil, err + } + for _, ent := range ents { + result = append(result, ent.Id) + } + return result, nil +} + +func (p publishedRepositoryImpl) updateExcludeFromSearchForAllChildPackages(tx *pg.Tx, parentId string, excludeFromSearch bool) error { + var ents []entity.PackageIdEntity + query := `update package_group set exclude_from_search = ? where id like ? || '.%' and exclude_from_search != ?` + _, err := tx.Query(&ents, query, excludeFromSearch, parentId, excludeFromSearch) + if err != nil { + return err + } + return nil +} + +func (p publishedRepositoryImpl) GetParentPackageGroups(id string) ([]entity.PackageEntity, error) { + var parentIds []string + var result []entity.PackageEntity + + parts := strings.Split(id, ".") + if len(parts) == 0 || len(parts) == 1 { + return result, nil + } + + for i, part := range parts { + if i == 0 { + parentIds = append(parentIds, part) + continue + } + if i == (len(parts) - 1) { + break + } + parentIds = append(parentIds, parentIds[i-1]+"."+part) + } + + err := p.cp.GetConnection().Model(&result). + Where("kind in (?)", pg.In([]string{entity.KIND_GROUP, entity.KIND_WORKSPACE})). + Where("deleted_at is ?", nil). + ColumnExpr("package_group.*"). + Join("JOIN UNNEST(?::text[]) WITH ORDINALITY t(id, ord) USING (id)", pg.Array(parentIds)). + Order("t.ord"). + Select() + if err != nil { + return nil, err + } + return result, nil +} + +func (p publishedRepositoryImpl) GetParentsForPackage(id string) ([]entity.PackageEntity, error) { + var parentIds []string + var result []entity.PackageEntity + + parts := strings.Split(id, ".") + if len(parts) == 0 || len(parts) == 1 { + return result, nil + } + + for i, part := range parts { + if i == 0 { + parentIds = append(parentIds, part) + continue + } + if i == (len(parts) - 1) { + break + } + parentIds = append(parentIds, parentIds[i-1]+"."+part) + } + + err := p.cp.GetConnection().Model(&result). + Where("deleted_at is ?", nil). + ColumnExpr("package_group.*"). + Join("JOIN UNNEST(?::text[]) WITH ORDINALITY t(id, ord) USING (id)", pg.Array(parentIds)). + Order("t.ord"). + Select() + if err != nil { + return nil, err + } + return result, nil +} + +func (p publishedRepositoryImpl) UpdatePackage(ent *entity.PackageEntity) (*entity.PackageEntity, error) { + ctx := context.Background() + + err := p.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + _, err := p.updatePackage(tx, ent) + if err != nil { + return err + } + return nil + }) + if err != nil { + return nil, err + } + return ent, nil +} + +func (p publishedRepositoryImpl) updatePackage(tx *pg.Tx, ent *entity.PackageEntity) (*entity.PackageEntity, error) { + _, err := tx.Model(ent).Where("id = ?", ent.Id).Update() + if err != nil { + return nil, err + } + if ent.ServiceName != "" { + insertServiceOwnerQuery := ` + INSERT INTO package_service (workspace_id, package_id, service_name) + VALUES (?, ?, ?) + ON CONFLICT (workspace_id, package_id, service_name) DO NOTHING` + _, err := tx.Exec(insertServiceOwnerQuery, utils.GetPackageWorkspaceId(ent.Id), ent.Id, ent.ServiceName) + if err != nil { + return nil, err + } + } + err = p.updateExcludeFromSearchForAllChildPackages(tx, ent.Id, ent.ExcludeFromSearch) + if err != nil { + return nil, err + } + return ent, nil +} + +func (p publishedRepositoryImpl) deletePackage(tx *pg.Tx, packageId string, userId string) error { + ent := new(entity.PackageEntity) + err := tx.Model(ent). + Where("id = ?", packageId). + Where("deleted_at is ?", nil). + First() + + if err != nil { + if err == pg.ErrNoRows { + return nil + } + return err + } + + err = p.markAllVersionsDeletedByPackageId(tx, packageId, userId) + if err != nil { + return err + } + + timeNow := time.Now() + ent.DeletedAt = &timeNow + ent.DeletedBy = userId + ent.ServiceName = "" + + _, err = p.updatePackage(tx, ent) + if err != nil { + return err + } + err = p.deletePackageServiceOwnership(tx, ent.Id) + if err != nil { + return err + } + + return err +} + +func (p publishedRepositoryImpl) deletePackageServiceOwnership(tx *pg.Tx, packageId string) error { + _, err := tx.Exec(`delete from package_service where package_id = ?`, packageId) + if err != nil { + return err + } + return nil +} + +func (p publishedRepositoryImpl) DeletePackage(id string, userId string) error { + ctx := context.Background() + return p.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + return p.deleteGroup(tx, id, userId) + }) +} + +func (p publishedRepositoryImpl) deleteGroup(tx *pg.Tx, packageId string, userId string) error { + ent := new(entity.PackageEntity) + err := tx.Model(ent). + Where("id = ?", packageId). + Where("deleted_at is ?", nil). + First() + + if err != nil { + if err == pg.ErrNoRows { + return nil + } + return err + } + + var children []entity.PackageEntity + err = tx.Model(&children). + Where("parent_id = ?", packageId). + Where("deleted_at is ?", nil). + Select() + if err != nil { + if err != pg.ErrNoRows { + return err + } + } + for _, child := range children { + if child.Kind == entity.KIND_GROUP || child.Kind == entity.KIND_WORKSPACE { + err := p.deleteGroup(tx, child.Id, userId) + if err != nil { + return err + } + } else if child.Kind == entity.KIND_PACKAGE || child.Kind == entity.KIND_DASHBOARD { + err := p.deletePackage(tx, child.Id, userId) + if err != nil { + return err + } + } + } + + err = p.markAllVersionsDeletedByPackageId(tx, packageId, userId) + if err != nil { + return err + } + + timeNow := time.Now() + ent.DeletedAt = &timeNow + ent.DeletedBy = userId + ent.ServiceName = "" + + _, err = p.updatePackage(tx, ent) + if err != nil { + return err + } + err = p.deletePackageServiceOwnership(tx, ent.Id) + if err != nil { + return err + } + + return err +} + +func (p publishedRepositoryImpl) GetPackageGroupsByName(name string) ([]entity.PackageEntity, error) { + var result []entity.PackageEntity + err := p.cp.GetConnection().Model(&result). + Where("name = ?", name). + Where("kind in (?)", pg.In([]string{entity.KIND_GROUP, entity.KIND_WORKSPACE})). + Where("deleted_at is ?", nil). + Order("name ASC"). + Select() + if err != nil { + return nil, err + } + return result, nil +} + +func (p publishedRepositoryImpl) GetFilteredPackages(filter string, parentId string) ([]entity.PackageEntity, error) { + var result []entity.PackageEntity + query := p.cp.GetConnection().Model(&result). + Where("deleted_at is ?", nil). + Where("kind = ?", entity.KIND_PACKAGE). + Order("name ASC") + + if filter != "" { + filter = "%" + utils.LikeEscaped(filter) + "%" + query.WhereGroup(func(q *pg.Query) (*pg.Query, error) { + q = q.WhereOr("name ilike ?", filter).WhereOr("id ilike ?", filter) + return q, nil + }) + } + if parentId != "" { + query.Where("parent_id = ?", parentId) + } + + err := query.Select() + if err != nil { + return nil, err + } + return result, nil +} + +func (p publishedRepositoryImpl) GetFilteredPackagesWithOffset(searchReq view.PackageListReq, userId string) ([]entity.PackageEntity, error) { + var result []entity.PackageEntity + query := p.cp.GetConnection().Model(&result). + Where("deleted_at is ?", nil) + if searchReq.OnlyFavorite { + query.Join("INNER JOIN favorite_packages as fav"). + JoinOn("package_group.id = fav.package_id"). + JoinOn("fav.user_id = ?", userId) + } + if searchReq.OnlyShared { + query.Join("INNER JOIN package_member_role as mem"). + JoinOn("package_group.id = mem.package_id"). + JoinOn("mem.user_id = ?", userId) + } + query.Order("name ASC"). + Offset(searchReq.Offset). + Limit(searchReq.Limit) + + if searchReq.TextFilter != "" { + searchReq.TextFilter = "%" + utils.LikeEscaped(searchReq.TextFilter) + "%" + query.WhereGroup(func(q *pg.Query) (*pg.Query, error) { + q = q.WhereOr("name ilike ?", searchReq.TextFilter).WhereOr("package_group.id ilike ?", searchReq.TextFilter) + return q, nil + }) + } + if searchReq.ParentId != "" { + if searchReq.ShowAllDescendants { + query.Where("package_group.id ilike ?", searchReq.ParentId+".%") + } else { + query.Where("parent_id = ?", searchReq.ParentId) + } + } + + if len(searchReq.Kind) != 0 { + query.Where("kind in (?)", pg.In(searchReq.Kind)) + } + if searchReq.ServiceName != "" { + query.Where("service_name = ?", searchReq.ServiceName) + } + if len(searchReq.Ids) > 0 { + query.Where("id in (?)", pg.In(searchReq.Ids)) + } + + err := query.Select() + if err != nil { + return nil, err + } + return result, nil +} + +func (p publishedRepositoryImpl) GetPackageForServiceName(serviceName string) (*entity.PackageEntity, error) { + result := new(entity.PackageEntity) + err := p.cp.GetConnection().Model(result). + Where("deleted_at is ?", nil). + Where("service_name = ?", serviceName). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (p publishedRepositoryImpl) GetVersionValidationChanges(packageId string, versionName string, revision int) (*entity.PublishedVersionValidationEntity, error) { + result := new(entity.PublishedVersionValidationEntity) + err := p.cp.GetConnection().Model(result). + ExcludeColumn("spectral"). + Where("package_id = ?", packageId). + Where("version = ?", versionName). + Where("revision = ?", revision). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (p publishedRepositoryImpl) GetVersionValidationProblems(packageId string, versionName string, revision int) (*entity.PublishedVersionValidationEntity, error) { + result := new(entity.PublishedVersionValidationEntity) + err := p.cp.GetConnection().Model(result). + ExcludeColumn("changelog", "bwc"). + Where("package_id = ?", packageId). + Where("version = ?", versionName). + Where("revision = ?", revision). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func SplitVersionRevision(version string) (string, int, error) { + if !strings.Contains(version, "@") { + return version, 0, nil + } + versionSplit := strings.Split(version, "@") + if len(versionSplit) != 2 { + return "", -1, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidRevisionFormat, + Message: exception.InvalidRevisionFormatMsg, + Params: map[string]interface{}{"version": version}, + } + } + versionName := versionSplit[0] + versionRevisionStr := versionSplit[1] + versionRevision, err := strconv.Atoi(versionRevisionStr) + if err != nil { + return "", -1, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidRevisionFormat, + Message: exception.InvalidRevisionFormatMsg, + Params: map[string]interface{}{"version": version}, + Debug: err.Error(), + } + } + if versionRevision <= 0 { + return "", -1, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidRevisionFormat, + Message: exception.InvalidRevisionFormatMsg, + Params: map[string]interface{}{"version": version}, + } + } + return versionName, versionRevision, nil +} + +func (p publishedRepositoryImpl) SearchForVersions(searchQuery *entity.PackageSearchQuery) ([]entity.PackageSearchResult, error) { + searchQuery.TextFilter = "%" + utils.LikeEscaped(searchQuery.TextFilter) + "%" + var result []entity.PackageSearchResult + versionsSearchQuery := ` + with maxrev as + ( + select package_id, version, revision, bool_or(s.latest_revision) as latest_revision + from + ( + select pv.package_id, pv.version, max(revision) as revision, true as latest_revision + from published_version pv + inner join package_group pg + on pg.id = pv.package_id + and pg.exclude_from_search = false + --where (?packages = '{}' or pv.package_id = ANY(?packages)) + /* + for now packages list serves as a list of parents and packages, + after adding new parents list need to uncomment line above and change condition below to use parents list + */ + where (?packages = '{}' or pv.package_id like ANY( + select id from unnest(?packages::text[]) id + union + select id||'.%' from unnest(?packages::text[]) id)) + and (?versions = '{}' or pv.version = ANY(?versions)) + group by pv.package_id, pv.version + union + select pv.package_id, pv.version, max(revision) as revision, false as latest_revision + from published_version pv + inner join package_group pg + on pg.id = pv.package_id + and pg.exclude_from_search = false + where (?packages = '{}' or pv.package_id = ANY(?packages)) + and (?versions = '{}' or pv.version = ANY(?versions)) + and array_to_string(pv.labels,',') ilike ?text_filter + group by pv.package_id, pv.version + ) s + group by package_id, version, revision + ) + select + pkg.id as package_id, + pkg.name, + pkg.description, + pkg.service_name, + pv.version, + pv.revision, + pv.status, + pv.published_at as created_at, + pv.labels, + maxrev.latest_revision, + parent_package_names(pkg.id) parent_names, + case + when init_rank > 0 then init_rank + default_version_tf + version_status_tf + version_open_count + else 0 + end rank, + + --debug + coalesce(?open_count_weight) open_count_weight, + pkg_name_tf, + pkg_description_tf, + pkg_id_tf, + pkg_service_name_tf, + version_tf, + version_labels_tf, + default_version_tf, + version_status_tf, + version_open_count + from + published_version pv + inner join maxrev + on pv.package_id = maxrev.package_id + and pv.version = maxrev.version + and pv.revision = maxrev.revision + inner join package_group pkg + on pv.package_id = pkg.id + left join published_version_open_count oc + on oc.package_id = pv.package_id + and oc.version = pv.version, + coalesce(?pkg_name_weight * (pkg.name ilike ?text_filter)::int, 0) pkg_name_tf, + coalesce(?pkg_description_weight * (pkg.description ilike ?text_filter)::int, 0) pkg_description_tf, + coalesce(?pkg_id_weight * (pkg.id ilike ?text_filter)::int, 0) pkg_id_tf, + coalesce(?pkg_service_name_weight * (pkg.service_name ilike ?text_filter)::int, 0) pkg_service_name_tf, + coalesce(?version_weight * (pv.version ilike ?text_filter)::int, 0) version_tf, + coalesce(?version_label_weight * (array_to_string(pv.labels,',') ilike ?text_filter)::int, 0) version_labels_tf, + coalesce(?default_version_weight * (pv.version = pkg.default_released_version)::int, 0) default_version_tf, + coalesce(pkg_name_tf + pkg_description_tf + pkg_id_tf + pkg_service_name_tf + version_tf + version_labels_tf, 0) init_rank, + coalesce( + ?version_status_release_weight * (pv.status = ?version_status_release)::int + + ?version_status_draft_weight * (pv.status = ?version_status_draft)::int + + ?version_status_archived_weight * (pv.status = ?version_status_archived)::int) version_status_tf, + coalesce(?open_count_weight * coalesce(oc.open_count), 0) version_open_count + where pv.deleted_at is null + and (?statuses = '{}' or pv.status = ANY(?statuses)) + and pv.published_at >= ?start_date + and pv.published_at <= ?end_date + and init_rank > 0 + order by rank desc, created_at desc, version + limit ?limit + offset ?offset; + ` + _, err := p.cp.GetConnection().Model(searchQuery).Query(&result, versionsSearchQuery) + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + + return result, nil +} + +func (p publishedRepositoryImpl) SearchForDocuments(searchQuery *entity.DocumentSearchQuery) ([]entity.DocumentSearchResult, error) { + searchQuery.TextFilter = "%" + utils.LikeEscaped(searchQuery.TextFilter) + "%" + var result []entity.DocumentSearchResult + documentsSearchQuery := ` + with maxrev as + ( + select pv.package_id, pv.version, max(revision) as revision + from published_version pv + inner join package_group pg + on pg.id = pv.package_id + and pg.exclude_from_search = false + --where (?packages = '{}' or pv.package_id = ANY(?packages)) + /* + for now packages list serves as a list of parents and packages, + after adding new parents list need to uncomment line above and change condition below to use parents list + */ + where (?packages = '{}' or pv.package_id like ANY( + select id from unnest(?packages::text[]) id + union + select id||'.%' from unnest(?packages::text[]) id)) + and (?versions = '{}' or pv.version = ANY(?versions)) + group by pv.package_id, pv.version + ), + versions as + ( + select pv.package_id, pv.version, pv.revision, pv.published_at, pv.status + from published_version pv + inner join maxrev + on pv.package_id = maxrev.package_id + and pv.version = maxrev.version + and pv.revision = maxrev.revision + where pv.deleted_at is null + and (?statuses = '{}' or pv.status = ANY(?statuses)) + and pv.published_at >= ?start_date + and pv.published_at <= ?end_date + ) + select + pg.id as package_id, + pg.name, + v.version, + v.revision, + v.status, + v.published_at as created_at, + c.slug, + c.title, + c.data_type as type, + c.metadata, + parent_package_names(pg.id) parent_names, + case + when init_rank > 0 then init_rank + version_status_tf + document_open_count + else 0 + end rank, + + --debug + coalesce(?open_count_weight) open_count_weight, + content_tf, + title_tf, + labels_tf, + version_status_tf, + document_open_count + from published_version_revision_content c + inner join package_group pg + on pg.id = c.package_id + inner join versions v + on v.package_id = c.package_id + and v.version = c.version + and v.revision = c.revision + left join published_document_open_count oc + on oc.package_id = c.package_id + and oc.version = c.version + and oc.slug = c.slug, + coalesce(?content_weight * case when c.data_type = ANY(?unknown_types) then 0 + else (c.metadata->>'description' ilike ?text_filter)::int end, 0) content_tf, + coalesce(?title_weight * (c.title ilike ?text_filter)::int, 0) title_tf, + coalesce(?labels_weight * (c.metadata->>'labels' ilike ?text_filter)::int, 0) labels_tf, + coalesce(content_tf + title_tf + labels_tf, 0) init_rank, + coalesce( + ?version_status_release_weight * (v.status = ?version_status_release)::int + + ?version_status_draft_weight * (v.status = ?version_status_draft)::int + + ?version_status_archived_weight * (v.status = ?version_status_archived)::int) version_status_tf, + coalesce(?open_count_weight * coalesce(oc.open_count), 0) document_open_count + where init_rank > 0 + order by rank desc, v.published_at desc, c.file_id, c.index asc + limit ?limit + offset ?offset; + ` + _, err := p.cp.GetConnection().Model(searchQuery).Query(&result, documentsSearchQuery) + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + + return result, nil +} + +func (p publishedRepositoryImpl) RecalculatePackageOperationGroups(packageId string, restGroupingPrefixRegex string, graphqlGroupingPrefixRegex string, userId string) error { + ctx := context.Background() + + err := p.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + _, err := tx.Exec(`delete from operation_group where package_id = ? and autogenerated = true`, packageId) + if err != nil { + return fmt.Errorf("failed to delete autogenerated groups for package %v from operation_group: %w", packageId, err) + } + err = p.recalculateOperationsGroupsTx(tx, packageId, "", 0, restGroupingPrefixRegex, graphqlGroupingPrefixRegex, userId) + if err != nil { + return fmt.Errorf("failed to insert groups for package %v: %w", packageId, err) + } + return nil + }) + if err != nil { + return fmt.Errorf("failed to recalculate package operations groups: %w", err) + } + return nil +} + +func (p publishedRepositoryImpl) RecalculateOperationGroups(packageId string, version string, revision int, restGroupingPrefixRegex string, graphqlGroupingPrefixRegex string, userId string) error { + ctx := context.Background() + + return p.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + return p.recalculateOperationsGroupsTx(tx, packageId, version, revision, restGroupingPrefixRegex, graphqlGroupingPrefixRegex, userId) + }) +} + +func (p publishedRepositoryImpl) recalculateOperationsGroupsTx(tx *pg.Tx, packageId string, version string, revision int, restGroupingPrefixRegex string, graphqlGroupingPrefixRegex string, userId string) error { + if restGroupingPrefixRegex == "" && graphqlGroupingPrefixRegex == "" { + return nil + } + if version != "" && revision != 0 { + _, err := tx.Exec(`delete from operation_group where package_id = ? and version = ? and revision = ? and autogenerated = true`, packageId, version, revision) + if err != nil { + return fmt.Errorf("failed to delete autogenerated groups for package %v version %v revision %v from operation_group: %w", packageId, version, revision, err) + } + } + var operationGroups []entity.OperationGroupEntity + operationGroupsQuery := ` + select groups.*, og.template_checksum, og.template_filename, og.description from ( + select distinct + package_id, + version, + revision, + case + when type = 'rest' + then case when ? = '' then null else substring(metadata ->> 'path', ?) end + when type = 'graphql' + then case when ? = '' then null else substring(metadata ->> 'method', ?) end + end group_name, + type api_type, + true autogenerated + from operation + where + package_id = ? + and (? = '' or version = ?) + and (? = 0 or revision = ?) + ) groups + left join operation_group og + on og.package_id = groups.package_id + and og.version = groups.version + and og.revision = (groups.revision - 1) + and og.group_name = groups.group_name + and og.api_type = groups.api_type + and og.autogenerated = true + where groups.group_name is not null and groups.group_name != '';` + _, err := tx.Query(&operationGroups, operationGroupsQuery, + restGroupingPrefixRegex, restGroupingPrefixRegex, + graphqlGroupingPrefixRegex, graphqlGroupingPrefixRegex, + packageId, + version, version, + revision, revision) + if err != nil { + return fmt.Errorf("failed to calculate autogenerated groups %+v: %w", operationGroups, err) + } + if len(operationGroups) == 0 { + return nil + } + + for i, group := range operationGroups { + operationGroups[i].GroupId = view.MakeOperationGroupId(group.PackageId, group.Version, group.Revision, group.ApiType, group.GroupName) + } + + //delete manually created groups with the same PK as autogenerated groups + deleteManualGroupsQuery := tx.Model(&operationGroups).Returning("operation_group_entity.*") + var deletedManualGroups []entity.OperationGroupEntity + err = tx.Model(&deletedManualGroups).WithDelete("operation_group", deleteManualGroupsQuery).Select() + if err != nil { + return fmt.Errorf("failed to delete not-autogenerated groups %+v: %w", operationGroups, err) + } + deletedGroupsHistory := make([]entity.OperationGroupHistoryEntity, len(deletedManualGroups)) + for _, deletedManualGroup := range deletedManualGroups { + deletedGroupsHistory = append(deletedGroupsHistory, entity.OperationGroupHistoryEntity{ + GroupId: deletedManualGroup.GroupId, + Action: view.OperationGroupActionDelete, + Data: deletedManualGroup, + UserId: userId, + Date: time.Now(), + Automatic: true, + }) + } + if len(deletedGroupsHistory) > 0 { + _, err = tx.Model(&deletedGroupsHistory).Insert() + if err != nil { + return err + } + } + _, err = tx.Model(&operationGroups). + OnConflict(` + (package_id, version, revision, api_type, group_name) DO UPDATE + SET autogenerated = EXCLUDED.autogenerated, + description = EXCLUDED.description, + template_checksum = EXCLUDED.template_checksum, + template_filename = EXCLUDED.template_filename`). + Insert() + if err != nil { + return fmt.Errorf("failed to insert autogenerated groups %+v: %w", operationGroups, err) + } + + insertGroupedOperationsQuery := ` + insert into grouped_operation + select ?, package_id, version, revision, operation_id from ( + select * from ( + select distinct + package_id, + version, + revision, + case + when type = 'rest' + then case when ? = '' then null else substring(metadata ->> 'path', ?) end + when type = 'graphql' + then case when ? = '' then null else substring(metadata ->> 'method', ?) end + end group_name, + operation_id + from operation + where + package_id = ? + and version = ? + and revision = ? + and type = ? + ) groups + where group_name = ? + ) filtered_groups;` + + for _, group := range operationGroups { + _, err = tx.Exec(insertGroupedOperationsQuery, + group.GroupId, + restGroupingPrefixRegex, restGroupingPrefixRegex, + graphqlGroupingPrefixRegex, graphqlGroupingPrefixRegex, + group.PackageId, + group.Version, + group.Revision, + group.ApiType, + group.GroupName) + if err != nil { + return fmt.Errorf("failed to insert autogenerated grouped operations for group %+v: %w", group, err) + } + } + return nil +} + +func (p publishedRepositoryImpl) GetVersionComparison(comparisonId string) (*entity.VersionComparisonEntity, error) { + comparison := new(entity.VersionComparisonEntity) + err := p.cp.GetConnection(). + Model(comparison). + Where("comparison_id = ?", comparisonId). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return comparison, nil +} + +func (p publishedRepositoryImpl) GetVersionRefsComparisons(comparisonId string) ([]entity.VersionComparisonEntity, error) { + comparisons := make([]entity.VersionComparisonEntity, 0) + err := p.cp.GetConnection(). + Model(&comparisons). + Where("comparison_id in (select unnest(refs) from version_comparison where comparison_id = ?)", comparisonId). + Select() + if err != nil { + return nil, err + } + return comparisons, nil +} + +func (p publishedRepositoryImpl) SaveTransformedDocument(data *entity.TransformedContentDataEntity, publishId string) error { + ctx := context.Background() + err := p.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + var ents []entity.BuildEntity + _, err := tx.Query(&ents, getBuildWithLock, publishId) + if err != nil { + return fmt.Errorf("SaveTransformedDocument: failed to get build %s: %w", publishId, err) + } + if len(ents) == 0 { + return fmt.Errorf("SaveTransformedDocument: failed to start doc transformation publish. Build with buildId='%s' is not found", publishId) + } + build := &ents[0] + + //do not allow publish for "complete" builds and builds that are not failed with "Restart count exceeded limit" + if build.Status == string(view.StatusComplete) || + (build.Status == string(view.StatusError) && build.RestartCount < 2) { + return fmt.Errorf("failed to start document transformation. Build with buildId='%v' is already published or failed", publishId) + } + + _, err = tx.Model(data).OnConflict("(package_id, version, revision, api_type, group_id, build_type, format) DO UPDATE").Insert() + if err != nil { + return fmt.Errorf("failed to insert published_data %+v: %w", data, err) + } + var ent entity.BuildEntity + query := tx.Model(&ent). + Where("build_id = ?", publishId). + Set("status = ?", view.StatusComplete). + Set("details = ?", ""). + Set("last_active = now()") + _, err = query.Update() + if err != nil { + return fmt.Errorf("failed to update build entity: %w", err) + } + return nil + }) + return err +} + +func (p publishedRepositoryImpl) GetTransformedDocuments(packageId string, version string, apiType string, groupId string, buildType string, format string) (*entity.TransformedContentDataEntity, error) { + result := new(entity.TransformedContentDataEntity) + version, revision, err := SplitVersionRevision(version) + if err != nil { + return nil, err + } + err = p.cp.GetConnection().Model(result). + Where("package_id = ?", packageId). + Where("version = ?", version). + Where("revision = ?", revision). + Where("api_type = ?", apiType). + Where("group_id = ?", groupId). + Where("build_type = ?", buildType). + Where("format = ?", format). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, err +} + +func (p publishedRepositoryImpl) DeleteTransformedDocuments(packageId string, version string, revision int, apiType string, groupId string) error { + ctx := context.Background() + return p.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + query := ` + delete from transformed_content_data + where package_id = ? and version = ? and revision = ? and api_type = ? and group_id = ?` + _, err := tx.Exec(query, packageId, version, revision, apiType, groupId) + return err + }) +} + +func (p publishedRepositoryImpl) GetVersionRevisionContentForDocumentsTransformation(packageId string, versionName string, revision int, searchQuery entity.ContentForDocumentsTransformationSearchQueryEntity) ([]entity.PublishedContentWithDataEntity, error) { + var ents []entity.PublishedContentWithDataEntity + query := p.cp.GetConnection().Model(&ents).Distinct(). + ColumnExpr("published_version_revision_content.*").ColumnExpr("pd.*") + query.Join(`inner join + (with refs as( + select s.reference_id as package_id, s.reference_version as version, s.reference_revision as revision + from published_version_reference s + inner join published_version pv + on pv.package_id = s.reference_id + and pv.version = s.reference_version + and pv.revision = s.reference_revision + and pv.deleted_at is null + where s.package_id = ? + and s.version = ? + and s.revision = ? + and s.excluded = false + ) + select package_id, version, revision + from refs + union + select ? as package_id, ? as version, ? as revision + ) refs`, packageId, versionName, revision, packageId, versionName, revision) + query.JoinOn("published_version_revision_content.package_id = refs.package_id"). + JoinOn("published_version_revision_content.version = refs.version"). + JoinOn("published_version_revision_content.revision = refs.revision") + + query.Join("inner join published_data as pd"). + JoinOn("published_version_revision_content.package_id = pd.package_id"). + JoinOn("published_version_revision_content.checksum = pd.checksum") + + if len(searchQuery.DocumentTypesFilter) > 0 { + query.Where("data_type = any(?)", pg.Array(searchQuery.DocumentTypesFilter)) + } + + if searchQuery.OperationGroup != "" { + query.Join(`inner join grouped_operation as go + on go.operation_id = any(published_version_revision_content.operation_ids) + and published_version_revision_content.package_id = go.package_id + and published_version_revision_content.version = go.version + and published_version_revision_content.revision = go.revision + and go.group_id = ?`, searchQuery.OperationGroup) + } + + query.Order("published_version_revision_content.package_id", + "published_version_revision_content.version", + "published_version_revision_content.revision", + "index ASC"). + Offset(searchQuery.Offset). + Limit(searchQuery.Limit) + + err := query.Select() + + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return ents, err +} + +func (p publishedRepositoryImpl) GetPublishedSourcesArchives(offset int) (*entity.PublishedSrcArchiveEntity, error) { + result := new(entity.PublishedSrcArchiveEntity) + err := p.cp.GetConnection().Model(result).Offset(offset).Limit(1). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (p publishedRepositoryImpl) DeletePublishedSourcesArchives(checksums []string) error { + ctx := context.Background() + var deletedRows int + err := p.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + query := `delete from published_sources_archives + where checksum in (?)` + result, err := tx.Exec(query, pg.In(checksums)) + if err != nil { + return err + } + deletedRows += result.RowsAffected() + return nil + }) + + if deletedRows > 0 { + _, err = p.cp.GetConnection().Exec("vacuum full published_sources_archives") + if err != nil { + return errors.Wrap(err, "failed to run vacuum for table published_sources_archives") + } + } + return nil +} + +func (p publishedRepositoryImpl) SavePublishedSourcesArchive(ent *entity.PublishedSrcArchiveEntity) error { + ctx := context.Background() + err := p.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + _, err := tx.Model(ent).OnConflict("(checksum) DO NOTHING").Insert() + if err != nil { + return fmt.Errorf("failed to insert published_sources_archive %+v: %w", ent, err) + } + return nil + }) + return err +} + +func (p publishedRepositoryImpl) DeleteDraftVersionsBeforeDate_deprecated(packageId string, date time.Time, userId string) (int, error) { + limit, page, deletedItems := 100, 0, 0 + for { + ents, err := p.GetReadonlyPackageVersionsWithLimit_deprecated(entity.PublishedVersionSearchQueryEntity{ + PackageId: packageId, + Status: string(view.Draft), + Limit: limit, + Offset: limit * page, + }, false) + if err != nil { + return 0, err + } + if len(ents) == 0 { + return deletedItems, nil + } + + for _, v := range ents { + if v.PublishedAt.Before(date) { // set id of clean up job in deleted_by column + err = p.MarkVersionDeleted(packageId, v.Version, userId) + if err != nil { + return deletedItems, err + } + deletedItems++ + } + } + page++ + } +} + +func (p publishedRepositoryImpl) DeleteDraftVersionsBeforeDate(packageId string, date time.Time, userId string) (int, error) { + limit, page, deletedItems := 100, 0, 0 + for { + ents, err := p.GetReadonlyPackageVersionsWithLimit(entity.PublishedVersionSearchQueryEntity{ + PackageId: packageId, + Status: string(view.Draft), + Limit: limit, + Offset: limit * page, + }, false) + if err != nil { + return 0, err + } + if len(ents) == 0 { + return deletedItems, nil + } + + for _, v := range ents { + if v.PublishedAt.Before(date) { // set id of clean up job in deleted_by column + err = p.MarkVersionDeleted(packageId, v.Version, userId) + if err != nil { + return deletedItems, err + } + deletedItems++ + } + } + page++ + } +} + +type PublishedBuildChangesOverview map[string]int + +func (p PublishedBuildChangesOverview) setUnexpectedEntry(table string) { + p[fmt.Sprintf("%v.%v", table, "Unexpected")] = 1 +} + +func (p PublishedBuildChangesOverview) setNotFoundEntry(table string) { + p[fmt.Sprintf("%v.%v", table, "NotFound")] = 1 +} + +func (p PublishedBuildChangesOverview) setTableChanges(table string, changesMap map[string]interface{}) { + for key := range changesMap { + p[fmt.Sprintf("%v.%v", table, key)] = 1 + } +} + +func (p PublishedBuildChangesOverview) getUniqueChanges() []string { + keys := make([]string, 0) + for key := range p { + keys = append(keys, key) + } + return keys +} + +func (p publishedRepositoryImpl) GetPublishedVersionsHistory(filter view.PublishedVersionHistoryFilter) ([]entity.PackageVersionHistoryEntity, error) { + result := make([]entity.PackageVersionHistoryEntity, 0) + + // query := p.cp.GetConnection().Model(&result) + // if filter.PublishedAfter != nil { + // query.Where("published_version.published_at >= ?", *filter.PublishedAfter) + // } + // if filter.PublishedBefore != nil { + // query.Where("published_version.published_at <= ?", *filter.PublishedBefore) + // } + // if filter.Status != nil { + // query.Where("published_version.status = ?", *filter.Status) + // } + // query.ColumnExpr("published_version.*, coalesce(o.api_types,'{}') api_types"). + // Where("deleted_at is null"). + // Join(`left join ( + // select package_id, version, revision, array_agg(distinct type) api_types + // from operation + // group by package_id, version, revision + // ) o`). + // JoinOn("o.package_id = published_version.package_id"). + // JoinOn("o.version = published_version.version"). + // JoinOn("o.revision = published_version.revision"). + // Order("published_version.published_at asc", "published_version.package_id", "published_version.version", "published_version.revision"). + // Limit(filter.Limit). + // Offset(filter.Limit * filter.Page) + _, err := p.cp.GetConnection().Query(&result, ` + with publications as( + select published_version.package_id, + published_version.version, + published_version.revision, + status, + published_version.published_at, + previous_version_package_id, + previous_version + from published_version + where deleted_at is null + and (? is null or status = ?) + and (? is null or published_at >= ?) + and (? is null or published_at <= ?) + order by published_at asc, package_id, version, revision + limit ? + offset ? + ), + ops as ( + select o.package_id, o.version, o.revision, array_agg(distinct o.type) api_types + from operation o + inner join publications p + on o.package_id = p.package_id + and o.version = p.version + and o.revision = p.revision + group by o.package_id, o.version, o.revision + ) + select + p.*, coalesce(api_types,'{}') api_types + from publications p + left join ops o + on o.package_id = p.package_id + and o.version = p.version + and o.revision = p.revision; + `, filter.Status, filter.Status, + filter.PublishedAfter, filter.PublishedAfter, + filter.PublishedBefore, filter.PublishedBefore, + filter.Limit, filter.Limit*filter.Page, + ) + if err != nil { + return nil, err + } + return result, nil +} + +func (p publishedRepositoryImpl) StoreOperationGroupPublishProcess(ent *entity.OperationGroupPublishEntity) error { + _, err := p.cp.GetConnection().Model(ent).Insert() + if err != nil { + return err + } + return nil +} + +func (p publishedRepositoryImpl) UpdateOperationGroupPublishProcess(ent *entity.OperationGroupPublishEntity) error { + _, err := p.cp.GetConnection().Model(ent). + WherePK(). + Set("details = ?details"). + Set("status = ?status"). + Update() + if err != nil { + return err + } + return nil +} + +func (p publishedRepositoryImpl) GetOperationGroupPublishProcess(publishId string) (*entity.OperationGroupPublishEntity, error) { + result := new(entity.OperationGroupPublishEntity) + err := p.cp.GetConnection().Model(result). + Where("publish_id = ?", publishId). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (p publishedRepositoryImpl) StoreCSVDashboardPublishProcess(ent *entity.CSVDashboardPublishEntity) error { + _, err := p.cp.GetConnection().Model(ent).Insert() + if err != nil { + return err + } + return nil +} + +func (p publishedRepositoryImpl) UpdateCSVDashboardPublishProcess(ent *entity.CSVDashboardPublishEntity) error { + _, err := p.cp.GetConnection().Model(ent). + WherePK(). + Set("message = ?message"). + Set("status = ?status"). + Set("csv_report = ?csv_report"). + Update() + if err != nil { + return err + } + return nil +} + +func (p publishedRepositoryImpl) GetCSVDashboardPublishProcess(publishId string) (*entity.CSVDashboardPublishEntity, error) { + result := new(entity.CSVDashboardPublishEntity) + err := p.cp.GetConnection().Model(result). + ExcludeColumn("csv_report"). + Where("publish_id = ?", publishId). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (p publishedRepositoryImpl) GetCSVDashboardPublishReport(publishId string) (*entity.CSVDashboardPublishEntity, error) { + result := new(entity.CSVDashboardPublishEntity) + err := p.cp.GetConnection().Model(result). + Where("publish_id = ?", publishId). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} diff --git a/qubership-apihub-service/repository/RoleRepository.go b/qubership-apihub-service/repository/RoleRepository.go new file mode 100644 index 0000000..a2b6edc --- /dev/null +++ b/qubership-apihub-service/repository/RoleRepository.go @@ -0,0 +1,507 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repository + +import ( + "context" + "strings" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/db" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/go-pg/pg/v10" +) + +type RoleRepository interface { + AddPackageMemberRoles(entities []entity.PackageMemberRoleEntity) error + DeleteDirectPackageMember(packageId string, userId string) error + GetDirectPackageMembers(packageId string) ([]entity.PackageMemberRoleEntity, error) + GetDirectPackageMember(packageId string, userId string) (*entity.PackageMemberRoleEntity, error) + RemoveRoleFromPackageMember(packageId string, userId string, roleId string) error + GetPackageRolesHierarchyForUser(packageId string, userId string) ([]entity.PackageMemberRoleRichEntity, error) + GetPackageHierarchyMembers(packageId string) ([]entity.PackageMemberRoleRichEntity, error) + GetAvailablePackageRoles(packageId string, userId string) ([]entity.RoleEntity, error) + GetUserSystemRole(userId string) (*entity.SystemRoleEntity, error) + SetUserSystemRole(userId string, role string) error + DeleteUserSystemRole(userId string) error + GetAllRoles() ([]entity.RoleEntity, error) + CreateRole(roleEntity entity.RoleEntity) error + UpdateRolePermissions(roleId string, permissions []string) error + DeleteRole(roleId string) error + GetRole(roleId string) (*entity.RoleEntity, error) + GetPermissionsForRoles(roles []string) ([]string, error) + GetUserPermissions(packageId string, userId string) ([]string, error) + SetRoleRanks(entities []entity.RoleEntity) error + GetUsersBySystemRole(systemRole string) ([]entity.UserEntity, error) +} + +func NewRoleRepository(cp db.ConnectionProvider) RoleRepository { + return &roleRepositoryImpl{cp: cp} +} + +type roleRepositoryImpl struct { + cp db.ConnectionProvider +} + +func (r roleRepositoryImpl) AddPackageMemberRoles(entities []entity.PackageMemberRoleEntity) error { + if len(entities) == 0 { + return nil + } + ctx := context.Background() + return r.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + _, err := tx.Model(&entities). + OnConflict(` + (package_id, user_id) do update + set updated_by = excluded.updated_by, + updated_at = excluded.updated_at, + roles = array(select distinct unnest(package_member_role.roles || excluded.roles))`). + Insert() + if err != nil { + return err + } + //user is not allowed to have the same role for parent and children package + removeDuplicateInheritedRolesQuery := ` + update package_member_role + set roles = + ( + SELECT array + ( + SELECT unnest(roles) + EXCEPT + select unnest(roles) from package_member_role where user_id = ? and package_id = ? + ) + ) + where user_id = ? + and package_id like ? || '.%'; + ` + for _, ent := range entities { + _, err = tx.Exec(removeDuplicateInheritedRolesQuery, ent.UserId, ent.PackageId, ent.UserId, ent.PackageId) + if err != nil { + return err + } + } + return r.deleteMembersWithEmptyRoles(tx) + }) +} + +func (r roleRepositoryImpl) DeleteDirectPackageMember(packageId string, userId string) error { + ent := new(entity.PackageMemberRoleEntity) + _, err := r.cp.GetConnection().Model(ent). + Where("package_id = ?", packageId). + Where("user_id = ?", userId). + Delete() + if err != nil { + return err + } + return nil +} + +func (r roleRepositoryImpl) GetDirectPackageMember(packageId string, userId string) (*entity.PackageMemberRoleEntity, error) { + result := new(entity.PackageMemberRoleEntity) + err := r.cp.GetConnection().Model(result). + Where("package_id = ?", packageId). + Where("user_id = ?", userId). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (r roleRepositoryImpl) RemoveRoleFromPackageMember(packageId string, userId string, roleId string) error { + ctx := context.Background() + return r.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + removeRoleFromPackageMemberQuery := ` + update package_member_role set roles = array_remove(roles, ?) + where package_id = ? + and user_id = ?;` + _, err := tx.Exec(removeRoleFromPackageMemberQuery, roleId, packageId, userId) + if err != nil { + return err + } + return r.deleteMembersWithEmptyRoles(tx) + }) +} + +func (r roleRepositoryImpl) GetDirectPackageMembers(packageId string) ([]entity.PackageMemberRoleEntity, error) { + var result []entity.PackageMemberRoleEntity + err := r.cp.GetConnection().Model(&result). + Where("package_id = ?", packageId). + Select() + if err != nil { + return nil, err + } + return result, nil +} + +func (r roleRepositoryImpl) deleteMembersWithEmptyRoles(tx *pg.Tx) error { + deleteMembersWithEmptyRolesQuery := `delete from package_member_role where roles = ARRAY[]::varchar[];` + _, err := tx.Exec(deleteMembersWithEmptyRolesQuery) + if err != nil { + return err + } + return nil +} + +func (r roleRepositoryImpl) GetPackageRolesHierarchyForUser(packageId string, userId string) ([]entity.PackageMemberRoleRichEntity, error) { + var result []entity.PackageMemberRoleRichEntity + if packageId == "" { + return nil, nil + } + packageIds := make([]string, 0) + parts := strings.Split(packageId, ".") + if len(parts) > 1 { + for i, part := range parts { + if i == 0 { + packageIds = append(packageIds, part) + continue + } + if i == (len(parts) - 1) { + break + } + packageIds = append(packageIds, packageIds[i-1]+"."+part) + } + } + packageIds = append([]string{packageId}, packageIds...) + //using unnest to sort result by packageIds array + query := ` + select pg.id package_id, pg.kind package_kind, pg.name package_name, u.user_id, u.name user_name, u.email user_email, u.avatar_url user_avatar, role.id as role_id, role.role as role + from + package_member_role p, + package_group pg, + user_data u, + role, + UNNEST(?::text[]) WITH ORDINALITY t(package_id, ord), + UNNEST(p.roles) roles(role) + where t.package_id = p.package_id + and p.package_id=pg.id + and p.user_id = ? + and p.user_id = u.user_id + and role.id = roles.role + order by t.ord; + ` + _, err := r.cp.GetConnection().Query(&result, query, pg.Array(packageIds), userId) + if err != nil { + return nil, err + } + return result, nil +} + +func (r roleRepositoryImpl) GetPackageHierarchyMembers(packageId string) ([]entity.PackageMemberRoleRichEntity, error) { + var result []entity.PackageMemberRoleRichEntity + if packageId == "" { + return nil, nil + } + packageIds := make([]string, 0) + parts := strings.Split(packageId, ".") + if len(parts) > 1 { + for i, part := range parts { + if i == 0 { + packageIds = append(packageIds, part) + continue + } + if i == (len(parts) - 1) { + break + } + packageIds = append(packageIds, packageIds[i-1]+"."+part) + } + } + packageIds = append([]string{packageId}, packageIds...) + //using unnest to sort result by packageIds array + query := ` + select pg.id package_id, pg.kind package_kind, pg.name package_name, u.user_id, u.name user_name, u.email user_email, u.avatar_url user_avatar, role.id as role_id, role.role as role + from + package_member_role p, + package_group pg, + user_data u, + role, + UNNEST(?::text[]) WITH ORDINALITY t(package_id, ord), + UNNEST(p.roles) roles(role) + where t.package_id = p.package_id + and p.package_id=pg.id + and p.user_id = u.user_id + and role.id = roles.role + order by t.ord; + ` + _, err := r.cp.GetConnection().Query(&result, query, pg.Array(packageIds)) + if err != nil { + return nil, err + } + return result, nil +} + +func (r roleRepositoryImpl) GetAvailablePackageRoles(packageId string, userId string) ([]entity.RoleEntity, error) { + var result []entity.RoleEntity + if packageId == "" { + return nil, nil + } + packageIds := make([]string, 0) + parts := strings.Split(packageId, ".") + if len(parts) > 1 { + for i, part := range parts { + if i == 0 { + packageIds = append(packageIds, part) + continue + } + if i == (len(parts) - 1) { + break + } + packageIds = append(packageIds, packageIds[i-1]+"."+part) + } + } + packageIds = append([]string{packageId}, packageIds...) + query := ` + select distinct * + from role + where rank <= ( + select max(rank) from role where id in + ( + select unnest(roles) as role + from + package_member_role + where package_id in (?) + and user_id = ? + union + select default_role as role + from package_group + where id in (?) + ) + ) + order by rank desc; + ` + _, err := r.cp.GetConnection().Query(&result, query, pg.In(packageIds), userId, pg.In(packageIds)) + if err != nil { + return nil, err + } + return result, nil +} + +func (r roleRepositoryImpl) GetUserSystemRole(userId string) (*entity.SystemRoleEntity, error) { + systemRole := new(entity.SystemRoleEntity) + err := r.cp.GetConnection().Model(systemRole). + Where("user_id = ?", userId). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return systemRole, nil +} + +func (r roleRepositoryImpl) SetUserSystemRole(userId string, role string) error { + _, err := r.cp.GetConnection().Model(&entity.SystemRoleEntity{UserId: userId, Role: role}).OnConflict("(user_id) DO UPDATE").Insert() + if err != nil { + return err + } + return nil +} + +func (r roleRepositoryImpl) DeleteUserSystemRole(userId string) error { + _, err := r.cp.GetConnection(). + Model(&entity.SystemRoleEntity{UserId: userId}). + WherePK(). + ForceDelete() + if err != nil { + return err + } + return nil +} + +func (r roleRepositoryImpl) GetAllRoles() ([]entity.RoleEntity, error) { + var result []entity.RoleEntity + err := r.cp.GetConnection().Model(&result). + Order("rank desc"). + Select() + if err != nil { + return nil, err + } + return result, nil +} + +func (r roleRepositoryImpl) CreateRole(roleEntity entity.RoleEntity) error { + ctx := context.Background() + return r.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + shiftRoleRanksUpQuery := `update role set rank = rank + 1 where rank >= ?` + _, err := tx.Exec(shiftRoleRanksUpQuery, roleEntity.Rank) + if err != nil { + return err + } + _, err = tx.Model(&roleEntity).Insert() + if err != nil { + return err + } + + return err + }) +} + +func (r roleRepositoryImpl) UpdateRolePermissions(roleId string, permissions []string) error { + _, err := r.cp.GetConnection().Model(&entity.RoleEntity{}). + Where("id = ?", roleId). + Set("permissions = ?", pg.Array(permissions)). + Update() + if err != nil { + return err + } + return nil +} + +func (r roleRepositoryImpl) DeleteRole(roleId string) error { + ctx := context.Background() + return r.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + shiftRoleRanksDownQuery := ` + update role + set rank = rank - 1 + where rank > (select rank from role where id = ?) + ` + _, err := tx.Exec(shiftRoleRanksDownQuery, roleId) + if err != nil { + return err + } + _, err = tx.Model(&entity.RoleEntity{}). + Where("id = ?", roleId). + Delete() + if err != nil { + return err + } + removeRoleFromMembers := ` + update package_member_role + set roles = array_remove(roles, ?) + ` + _, err = tx.Exec(removeRoleFromMembers, roleId) + if err != nil { + return err + } + return r.deleteMembersWithEmptyRoles(tx) + }) +} + +func (r roleRepositoryImpl) GetRole(roleId string) (*entity.RoleEntity, error) { + result := new(entity.RoleEntity) + err := r.cp.GetConnection().Model(result). + Where("id = ?", roleId). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +type Permission struct { + Permission string `pg:"permission"` +} + +func (r roleRepositoryImpl) GetPermissionsForRoles(roles []string) ([]string, error) { + var permissions []Permission + if len(roles) == 0 { + return make([]string, 0), nil + } + query := ` + select distinct unnest(permissions) as permission + from role + where id in(?);` + _, err := r.cp.GetConnection().Query(&permissions, query, pg.In(roles)) + if err != nil { + return nil, err + } + result := make([]string, 0) + for _, p := range permissions { + result = append(result, p.Permission) + } + return result, nil +} + +func (r roleRepositoryImpl) GetUserPermissions(packageId string, userId string) ([]string, error) { + var permissions []Permission + if packageId == "" { + return make([]string, 0), nil + } + packageIds := make([]string, 0) + parts := strings.Split(packageId, ".") + if len(parts) > 1 { + for i, part := range parts { + if i == 0 { + packageIds = append(packageIds, part) + continue + } + if i == (len(parts) - 1) { + break + } + packageIds = append(packageIds, packageIds[i-1]+"."+part) + } + } + packageIds = append([]string{packageId}, packageIds...) + query := ` + select distinct unnest(permissions) as permission + from role + where id in( + select unnest(roles) as role + from + package_member_role + where package_id in (?) + and user_id = ? + union + select default_role as role + from package_group + where id in (?) + );` + _, err := r.cp.GetConnection().Query(&permissions, query, pg.In(packageIds), userId, pg.In(packageIds)) + if err != nil { + return nil, err + } + result := make([]string, 0) + for _, p := range permissions { + result = append(result, p.Permission) + } + return result, nil +} + +func (r roleRepositoryImpl) SetRoleRanks(entities []entity.RoleEntity) error { + ctx := context.Background() + return r.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + for _, ent := range entities { + _, err := tx.Model(&ent). + Column("rank"). + Where("id = ?id"). + Where("read_only = false"). + Update() + if err != nil { + return err + } + } + return nil + }) +} + +func (r roleRepositoryImpl) GetUsersBySystemRole(systemRole string) ([]entity.UserEntity, error) { + var result []entity.UserEntity + err := r.cp.GetConnection().Model(&result). + ColumnExpr("user_data.*"). + Join("inner join system_role sr"). + JoinOn("sr.user_id = user_data.user_id"). + JoinOn("sr.role = ?", systemRole). + Select() + if err != nil { + return nil, err + } + return result, nil +} diff --git a/qubership-apihub-service/repository/TransitionRepository.go b/qubership-apihub-service/repository/TransitionRepository.go new file mode 100644 index 0000000..608ac9b --- /dev/null +++ b/qubership-apihub-service/repository/TransitionRepository.go @@ -0,0 +1,720 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repository + +import ( + "fmt" + "strings" + "time" + + log "github.com/sirupsen/logrus" + + context2 "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/db" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + "github.com/go-pg/pg/v10" + "golang.org/x/net/context" +) + +type TransitionRepository interface { + MoveAllData(fromPkg, toPkg string) (int, error) + MovePackage(fromPkg, toPkg string, overwriteHistory bool) (int, error) + MoveGroupingPackage(fromPkg, toPkg string) (int, error) + + TrackTransitionStarted(userCtx context2.SecurityContext, id, trType, fromPkg, toPkg string) error + TrackTransitionProgress(id, progress int) error + TrackTransitionFailed(id, details string) error + TrackTransitionCompleted(id string, affectedObjects int) error + + GetTransitionStatus(id string) (*entity.TransitionActivityEntity, error) + ListCompletedTransitions(completedSerialOffset int, limit int) ([]entity.TransitionActivityEntity, error) + + addPackageTransitionRecord(tx *pg.Tx, oldPackageId string, newPackageId string, overwriteHistory bool) error + GetNewPackageId(oldPackageId string) (string, error) + GetOldPackageIds(newPackageId string) ([]string, error) + ListPackageTransitions() ([]entity.PackageTransitionEntity, error) +} + +func NewTransitionRepository(cp db.ConnectionProvider) TransitionRepository { + return &transitionRepositoryImpl{ + cp: cp, + } +} + +type transitionRepositoryImpl struct { + cp db.ConnectionProvider +} + +func (t transitionRepositoryImpl) MoveGroupingPackage(fromPkg, toPkg string) (int, error) { + objAffected := 0 + err := t.cp.GetConnection().RunInTransaction(context.Background(), func(tx *pg.Tx) error { + + fromPkgEnt := new(entity.PackageEntity) + err := tx.Model(fromPkgEnt). + Where("id = ?", fromPkg). + First() + if err != nil { + return fmt.Errorf("failed to get from package: %w", err) + } + if !(fromPkgEnt.Kind == entity.KIND_WORKSPACE || fromPkgEnt.Kind == entity.KIND_GROUP) { + return fmt.Errorf("MoveGroupingPackage: not applicable for (from) api kind %s", fromPkgEnt.Kind) + } + // in this case no data is expected, but need to update child packages + + // TODO: implement me! + + // TODO: need to update all child package ids, that's going to be a lot of work + + // TODO: need to handle type change! workspace <-> group + + return fmt.Errorf("MoveGroupingPackage: TODO: not supported yet") + }) + + return objAffected, err +} + +func (t transitionRepositoryImpl) MoveAllData(fromPkg, toPkg string) (int, error) { + objAffected := 0 + err := t.cp.GetConnection().RunInTransaction(context.Background(), func(tx *pg.Tx) error { + // Copy version data to satisfy constraints + affected, err := copyVersions(tx, fromPkg, toPkg) + if err != nil { + return err + } + objAffected += affected + + affected, err = moveNonVersionsData(tx, fromPkg, toPkg) + if err != nil { + return err + } + objAffected += affected + + // deleteVersionsData should affect the same rows as copy, so do not append it + err = deleteVersionsData(tx, fromPkg) + if err != nil { + return fmt.Errorf("MoveAllData: failed to delete orig pkg data: %w", err) + } + + return nil + }) + if err != nil { + return 0, err // transaction should be rolled back + } else { + return objAffected, nil + } +} + +func (t transitionRepositoryImpl) MovePackage(fromPkg, toPkg string, overwriteHistory bool) (int, error) { + objAffected := 0 + err := t.cp.GetConnection().RunInTransaction(context.Background(), func(tx *pg.Tx) error { + fromPkgEnt := new(entity.PackageEntity) + err := tx.Model(fromPkgEnt). + Where("id = ?", fromPkg). + First() + if err != nil { + return fmt.Errorf("failed to get from package: %w", err) + } + if !(fromPkgEnt.Kind == entity.KIND_PACKAGE || fromPkgEnt.Kind == entity.KIND_DASHBOARD) { + return fmt.Errorf("MovePackage: not applicable for (from) api kind %s", fromPkgEnt.Kind) + } + // in this case no child packages expected, but need to update data + + toParts := strings.Split(toPkg, ".") + newAlias := toParts[len(toParts)-1] + newParent := strings.Join(toParts[:len(toParts)-1], ".") + + fromPkgEnt.Id = toPkg + fromPkgEnt.Alias = newAlias + fromPkgEnt.ParentId = newParent + + _, err = tx.Model(fromPkgEnt).Insert() + if err != nil { + return fmt.Errorf("failed to create new package %s: %w", toPkg, err) + } + + // Copy version data to satisfy constraints + affected, err := copyVersions(tx, fromPkg, toPkg) + if err != nil { + return err + } + objAffected += affected + + affected, err = moveNonVersionsData(tx, fromPkg, toPkg) + if err != nil { + return err + } + objAffected += affected + + updatePS := "update package_service set package_id = ? where package_id=?" + res, err := tx.Exec(updatePS, toPkg, fromPkg) + if err != nil { + return fmt.Errorf("MovePackage: failed to update package_id in package_service from %s to %s: %w", fromPkg, toPkg, err) + } + objAffected += res.RowsAffected() + + err = deleteVersionsData(tx, fromPkg) + if err != nil { + return fmt.Errorf("MoveAllData: failed to delete orig pkg data: %w", err) + } + + deleteFromPkg := "delete from package_group where id = ?" + res, err = tx.Exec(deleteFromPkg, fromPkg) + if err != nil { + return fmt.Errorf("failed to delete orig(%s) from package_group: %w", fromPkg, err) + } + objAffected += res.RowsAffected() + + err = t.addPackageTransitionRecord(tx, fromPkg, toPkg, overwriteHistory) + if err != nil { + return fmt.Errorf("MoveAllData: failed to add transition record: %w", err) + } + + return nil + }) + return objAffected, err +} + +// copyVersions copy data related to all versions/revisions +func copyVersions(tx *pg.Tx, fromPkg, toPkg string) (int, error) { + objAffected := 0 + versionsCount, err := tx.Model(&entity.PublishedVersionEntity{}). + Where("package_id = ?", fromPkg). + Count() + if err != nil { + return 0, fmt.Errorf("failed to query from pkg version count: %w", err) + } + if versionsCount == 0 { + return 0, nil + } + + copyVer := "insert into published_version (package_id, version, revision, status, published_at, deleted_at, metadata, " + + "previous_version, previous_version_package_id, labels, created_by, deleted_by) " + + "(select ?, version, revision, status, published_at, deleted_at, metadata, " + + "previous_version, previous_version_package_id, labels, created_by, deleted_by FROM " + + "published_version orig WHERE orig.package_id = ?) on conflict do nothing" + res, err := tx.Exec(copyVer, toPkg, fromPkg) + if err != nil { + return 0, fmt.Errorf("failed to copy versions from %s to %s: %w", fromPkg, toPkg, err) + } + objAffected += res.RowsAffected() + + copyDocsData := "insert into published_data (package_id, checksum, media_type, data) (select ?, checksum, media_type, data from published_data orig where orig.package_id = ?) on conflict do nothing" + res, err = tx.Exec(copyDocsData, toPkg, fromPkg) + if err != nil { + return 0, fmt.Errorf("failed to copy versions docs data from %s to %s: %w", fromPkg, toPkg, err) + } + objAffected += res.RowsAffected() + + copyVerDocs := "insert into published_version_revision_content (package_id, version, revision, checksum, index, file_id, path, slug, data_type, name, metadata, title, format, operation_ids, filename) " + + "(select ?, version, revision, checksum, index, file_id, path, slug, data_type, name, metadata, title, format, operation_ids, filename from published_version_revision_content orig where orig.package_id = ?) on conflict do nothing" + res, err = tx.Exec(copyVerDocs, toPkg, fromPkg) + if err != nil { + return 0, fmt.Errorf("failed to copy versions docs from %s to %s: %w", fromPkg, toPkg, err) + } + objAffected += res.RowsAffected() + + copyRefsMain := "insert into published_version_reference (package_id, version, revision, reference_id, reference_version, reference_revision, parent_reference_id, parent_reference_version, parent_reference_revision, excluded) " + + "(select ?, version, revision, reference_id, reference_version, reference_revision, parent_reference_id, parent_reference_version, parent_reference_revision, excluded from published_version_reference orig where orig.package_id = ?) on conflict do nothing" + res, err = tx.Exec(copyRefsMain, toPkg, fromPkg) + if err != nil { + return 0, fmt.Errorf("failed to copy versions refs from %s to %s: %w", fromPkg, toPkg, err) + } + objAffected += res.RowsAffected() + + copyPSD := "insert into published_sources (package_id, version, revision, config, metadata, archive_checksum) " + + "(select ?, version, revision, config, metadata, archive_checksum from published_sources orig where orig.package_id = ?) on conflict do nothing" + res, err = tx.Exec(copyPSD, toPkg, fromPkg) + if err != nil { + return 0, fmt.Errorf("failed to copy published sources from %s to %s: %w", fromPkg, toPkg, err) + } + objAffected += res.RowsAffected() + + updPSD := "update published_sources set config=convert_to((convert_from(config,'UTF8')::jsonb||'{\"packageId\": \"" + toPkg + "\"}')::varchar, 'UTF8')::bytea where package_id='" + toPkg + "'" // toPkg twice here since the record is already inserted for new package id + res, err = tx.Exec(updPSD) + if err != nil { + return 0, fmt.Errorf("failed to update published sources packageId for %s: %w", toPkg, err) + } + objAffected += res.RowsAffected() + + updPSD2 := "update published_sources set config=convert_to((convert_from(config,'UTF8')::jsonb||'{\"previousVersionPackageId\": \"" + toPkg + "\"}')::varchar, 'UTF8')::bytea where (convert_from(config,'UTF8')::jsonb)->>'previousVersionPackageId'='" + fromPkg + "'" + res, err = tx.Exec(updPSD2) + if err != nil { + return 0, fmt.Errorf("failed to update published sources previousVersionPackageId for %s: %w", toPkg, err) + } + objAffected += res.RowsAffected() + + copyOps := "insert into operation (package_id, version, revision, operation_id, data_hash, deprecated, kind, title, metadata, type, deprecated_info, deprecated_items, previous_release_versions) " + + "(select ?, version, revision, operation_id, data_hash, deprecated, kind, title, metadata, type, deprecated_info, deprecated_items, previous_release_versions from operation orig where orig.package_id = ?) on conflict do nothing" + res, err = tx.Exec(copyOps, toPkg, fromPkg) + if err != nil { + return 0, fmt.Errorf("failed to copy operations from %s to %s: %w", fromPkg, toPkg, err) + } + objAffected += res.RowsAffected() + + copyOpsGroups := "insert into grouped_operation (group_id, package_id, version, revision, operation_id) (select group_id, ?, version, revision, operation_id from grouped_operation orig where orig.package_id = ?) on conflict do nothing" + res, err = tx.Exec(copyOpsGroups, toPkg, fromPkg) + if err != nil { + return 0, fmt.Errorf("failed to copy operation groups from %s to %s: %w", fromPkg, toPkg, err) + } + objAffected += res.RowsAffected() + + copyMigratedVersion := "insert into migrated_version (package_id, version, revision, error, build_id, migration_id, build_type, no_changelog) " + + "(select ?, version, revision, error, build_id, migration_id, build_type, no_changelog from migrated_version orig where orig.package_id = ?) on conflict do nothing" + res, err = tx.Exec(copyMigratedVersion, toPkg, fromPkg) + if err != nil { + return 0, fmt.Errorf("failed to copy migrated version from %s to %s: %w", fromPkg, toPkg, err) + } + objAffected += res.RowsAffected() + + copyPVOC := "insert into published_version_open_count (package_id, version, open_count) " + + "(select ?, version, open_count from " + + "published_version_open_count orig where orig.package_id = ?) on conflict do nothing" + res, err = tx.Exec(copyPVOC, toPkg, fromPkg) + if err != nil { + return 0, fmt.Errorf("failed to insert published_version_open_count copy to pkg %s: %w", toPkg, err) + } + objAffected += res.RowsAffected() + + copyPDOC := "insert into published_document_open_count (package_id, version, slug, open_count) " + + "(select ?, version, slug, open_count from " + + "published_document_open_count orig where orig.package_id = ?) on conflict do nothing" + res, err = tx.Exec(copyPDOC, toPkg, fromPkg) + if err != nil { + return 0, fmt.Errorf("failed to insert published_document_open_count copy to pkg %s: %w", toPkg, err) + } + objAffected += res.RowsAffected() + + copyOOC := "insert into operation_open_count (package_id, version, operation_id, open_count) " + + "(select ?, version, operation_id, open_count from operation_open_count orig where orig.package_id = ?) on conflict do nothing" + res, err = tx.Exec(copyOOC, toPkg, fromPkg) + if err != nil { + return 0, fmt.Errorf("failed to insert operation_open_count copy to pkg %s: %w", toPkg, err) + } + objAffected += res.RowsAffected() + + return objAffected, nil +} + +// deleteVersionsData move non-version data without strong relations +func moveNonVersionsData(tx *pg.Tx, fromPkg, toPkg string) (int, error) { + objAffected := 0 + + updateAT := "update activity_tracking set package_id = ? where package_id=?" + res, err := tx.Exec(updateAT, toPkg, fromPkg) + if err != nil { + return 0, fmt.Errorf("MoveAllData: failed to update package_id in activity tracking from %s to %s: %w", fromPkg, toPkg, err) + } + objAffected += res.RowsAffected() + + updateApiKeys := "update apihub_api_keys set package_id = ? where package_id=?;" + res, err = tx.Exec(updateApiKeys, toPkg, fromPkg) + if err != nil { + return 0, fmt.Errorf("MoveAllData: failed to update package_id in apihub_api_keys from %s to %s: %w", fromPkg, toPkg, err) + } + objAffected += res.RowsAffected() + + updateBuild := "update build set package_id = ? where package_id=?;" + res, err = tx.Exec(updateBuild, toPkg, fromPkg) + if err != nil { + return 0, fmt.Errorf("MoveAllData: failed to update package_id in build from %s to %s: %w", fromPkg, toPkg, err) + } + objAffected += res.RowsAffected() + + updateFavs := "update favorite_packages set package_id = ? where package_id=?" + res, err = tx.Exec(updateFavs, toPkg, fromPkg) + if err != nil { + return 0, fmt.Errorf("MoveAllData: failed to update package_id in favorite_packages from %s to %s: %w", fromPkg, toPkg, err) + } + objAffected += res.RowsAffected() + + updateShared := "update shared_url_info set package_id = ? where package_id=?" + res, err = tx.Exec(updateShared, toPkg, fromPkg) + if err != nil { + return 0, fmt.Errorf("MoveAllData: failed to update package_id in shared_url_info from %s to %s: %w", fromPkg, toPkg, err) + } + objAffected += res.RowsAffected() + + updatePrevVer := "update published_version set previous_version_package_id = ? where previous_version_package_id = ?;" + res, err = tx.Exec(updatePrevVer, toPkg, fromPkg) + if err != nil { + return 0, fmt.Errorf("MoveAllData: failed to update prev ver package_id in published_version from %s to %s: %w", fromPkg, toPkg, err) + } + objAffected += res.RowsAffected() + + updateRefs := "update published_version_reference set reference_id = ? where reference_id = ?;" + res, err = tx.Exec(updateRefs, toPkg, fromPkg) + if err != nil { + return 0, fmt.Errorf("MoveAllData: failed to update ref package_id in published_version_reference from %s to %s: %w", fromPkg, toPkg, err) + } + objAffected += res.RowsAffected() + // TODO: what about parent reference id? + + updateVersComp := "update version_comparison set package_id = ? where package_id = ?;" + res, err = tx.Exec(updateVersComp, toPkg, fromPkg) + if err != nil { + return 0, fmt.Errorf("MoveAllData: failed to update package_id in version_comparison from %s to %s: %w", fromPkg, toPkg, err) + } + objAffected += res.RowsAffected() + + updateVersCompPrev := "update version_comparison set previous_package_id = ? where previous_package_id = ?;" + res, err = tx.Exec(updateVersCompPrev, toPkg, fromPkg) + if err != nil { + return 0, fmt.Errorf("MoveAllData: failed to update prev package_id in version_comparison from %s to %s: %w", fromPkg, toPkg, err) + } + objAffected += res.RowsAffected() + + updateVersCompIdForRefs := ` + with comp as ( + select + comparison_id, + md5(package_id||'@'||version||'@'||revision||'@'||previous_package_id||'@'||previous_version||'@'||previous_revision) as new_comparison_id + from version_comparison + where package_id = ? or previous_package_id = ? + ) + update version_comparison b set refs = array_replace(refs, c.comparison_id, c.new_comparison_id::varchar) + from comp c + where c.comparison_id = any(refs);` + res, err = tx.Exec(updateVersCompIdForRefs, toPkg, toPkg) + if err != nil { + return 0, fmt.Errorf("MoveAllData: failed to update comparison_id for refs in version_comparison for package_id update from %s to %s: %w", fromPkg, toPkg, err) + } + objAffected += res.RowsAffected() + + updateVersCompId := ` + with comp as ( + select + comparison_id, + md5(package_id||'@'||version||'@'||revision||'@'||previous_package_id||'@'||previous_version||'@'||previous_revision) as new_comparison_id + from version_comparison + where package_id = ? or previous_package_id = ? + ) + update version_comparison b set comparison_id = c.new_comparison_id::varchar + from comp c + where c.comparison_id = b.comparison_id;` + res, err = tx.Exec(updateVersCompId, toPkg, toPkg) + if err != nil { + return 0, fmt.Errorf("MoveAllData: failed to update comparison_id in version_comparison for package_id update from %s to %s: %w", fromPkg, toPkg, err) + } + objAffected += res.RowsAffected() + + updateOperationComp := "update operation_comparison set package_id = ? where package_id = ?;" + res, err = tx.Exec(updateOperationComp, toPkg, fromPkg) + if err != nil { + return 0, fmt.Errorf("MoveAllData: failed to update package_id in operation_comparison from %s to %s: %w", fromPkg, toPkg, err) + } + objAffected += res.RowsAffected() + + updateOperationGroups := "update operation_group set package_id = ? where package_id = ?;" + res, err = tx.Exec(updateOperationGroups, toPkg, fromPkg) + if err != nil { + return 0, fmt.Errorf("MoveAllData: failed to update package_id in operation_group from %s to %s: %w", fromPkg, toPkg, err) + } + objAffected += res.RowsAffected() + + updateOperationGroup := "update operation_group " + + "set group_id=MD5(CONCAT_WS('@', package_id, version, revision, api_type, group_name)) " + + "where MD5(CONCAT_WS('@', package_id, version, revision, api_type, group_name))!= operation_group.group_id;" + res, err = tx.Exec(updateOperationGroup) + if err != nil { + return 0, fmt.Errorf("failed to update operation group ids: %w", err) + } + objAffected += res.RowsAffected() + + updateOperationCompPrev := "update operation_comparison set previous_package_id = ? where previous_package_id = ?;" + res, err = tx.Exec(updateOperationCompPrev, toPkg, fromPkg) + if err != nil { + return 0, fmt.Errorf("MoveAllData: failed to update prev package_id in operation_comparison from %s to %s: %w", fromPkg, toPkg, err) + } + objAffected += res.RowsAffected() + + updatePkgSvc := "update package_service set package_id = ? where package_id=?;" + res, err = tx.Exec(updatePkgSvc, toPkg, fromPkg) + if err != nil { + return 0, fmt.Errorf("MoveAllData: failed to update package_id in package_service from %s to %s: %w", fromPkg, toPkg, err) + } + objAffected += res.RowsAffected() + + updateProject := "update project set package_id = ? where package_id=?;" + res, err = tx.Exec(updateProject, toPkg, fromPkg) + if err != nil { + return 0, fmt.Errorf("MoveAllData: failed to update package_id in project from %s to %s: %w", fromPkg, toPkg, err) + } + objAffected += res.RowsAffected() + + updateUserRoles := "update package_member_role set package_id = ? where package_id=?;" + res, err = tx.Exec(updateUserRoles, toPkg, fromPkg) + if err != nil { + return 0, fmt.Errorf("MoveAllData: failed to update package_member_role package_id from %s to %s: %w", fromPkg, toPkg, err) + } + objAffected += res.RowsAffected() + + updateMetrics := `update business_metric set data = business_metric.data - ? || jsonb_build_object(?, business_metric.data -> ?) + where data -> ? is not null;` + res, err = tx.Exec(updateMetrics, fromPkg, toPkg, fromPkg, fromPkg) + if err != nil { + return 0, fmt.Errorf("MoveAllData: failed to update package_id in business_metric from %s to %s: %w", fromPkg, toPkg, err) + } + objAffected += res.RowsAffected() + + return objAffected, nil +} + +// deleteVersionsData delete data related to all versions/revisions +func deleteVersionsData(tx *pg.Tx, fromPkg string) error { + query := "delete from published_version where package_id = ?" + _, err := tx.Exec(query, fromPkg) + if err != nil { + return fmt.Errorf("failed to delete orig(%s) from published_version: %w", fromPkg, err) + } + + query = "delete from published_data where package_id = ?" + _, err = tx.Exec(query, fromPkg) + if err != nil { + return fmt.Errorf("failed to delete orig(%s) from published_data: %w", fromPkg, err) + } + + query = "delete from published_version_revision_content where package_id = ?" + _, err = tx.Exec(query, fromPkg) + if err != nil { + return fmt.Errorf("failed to delete orig(%s) from published_version_revision_content: %w", fromPkg, err) + } + + query = "delete from published_version_reference where package_id = ?" + _, err = tx.Exec(query, fromPkg) + if err != nil { + return fmt.Errorf("failed to delete orig(%s) from published_version_reference: %w", fromPkg, err) + } + + query = "delete from published_sources where package_id = ?" + _, err = tx.Exec(query, fromPkg) + if err != nil { + return fmt.Errorf("failed to delete orig(%s) from published_sources: %w", fromPkg, err) + } + + query = "delete from grouped_operation where package_id = ?" + _, err = tx.Exec(query, fromPkg) + if err != nil { + return fmt.Errorf("failed to delete orig(%s) from grouped_operation: %w", fromPkg, err) + } + + query = "delete from operation where package_id = ?" + _, err = tx.Exec(query, fromPkg) + if err != nil { + return fmt.Errorf("failed to delete orig(%s) from operation: %w", fromPkg, err) + } + + query = "delete from migrated_version where package_id = ?" + _, err = tx.Exec(query, fromPkg) + if err != nil { + return fmt.Errorf("failed to delete orig(%s) from migrated_version: %w", fromPkg, err) + } + + query = "delete from operation_comparison where package_id = ?" + _, err = tx.Exec(query, fromPkg) + if err != nil { + return fmt.Errorf("failed to delete orig(%s) from operation_comparison: %w", fromPkg, err) + } + + query = "delete from version_comparison where package_id = ?" + _, err = tx.Exec(query, fromPkg) + if err != nil { + return fmt.Errorf("failed to delete orig(%s) from version_comparison: %w", fromPkg, err) + } + + query = "delete from published_version_open_count where package_id = ?" + _, err = tx.Exec(query, fromPkg) + if err != nil { + return fmt.Errorf("failed to delete orig(%s) from published_version_open_count: %w", fromPkg, err) + } + + query = "delete from operation_open_count where package_id = ?" + _, err = tx.Exec(query, fromPkg) + if err != nil { + return fmt.Errorf("failed to delete orig(%s) from operation_open_count: %w", fromPkg, err) + } + + query = "delete from published_document_open_count where package_id = ?" + _, err = tx.Exec(query, fromPkg) + if err != nil { + return fmt.Errorf("failed to delete orig(%s) from published_document_open_count: %w", fromPkg, err) + } + + return nil +} + +func (t transitionRepositoryImpl) TrackTransitionStarted(userCtx context2.SecurityContext, id, trType, fromPkg, toPkg string) error { + ent := entity.TransitionActivityEntity{ + Id: id, + TrType: trType, + FromId: fromPkg, + ToId: toPkg, + Status: string(view.StatusRunning), + Details: "", + StartedBy: userCtx.GetUserId(), + StartedAt: time.Now(), + FinishedAt: time.Time{}, + ProgressPercent: 0, + AffectedObjects: 0, + } + + _, err := t.cp.GetConnection().Model(&ent).Insert() + if err != nil { + return fmt.Errorf("failed to insert transition activity entity %+v: %w", ent, err) + } + + return nil +} + +func (t transitionRepositoryImpl) TrackTransitionProgress(id, progress int) error { + ent := entity.TransitionActivityEntity{} + err := t.cp.GetConnection().Model(&ent).Where("id=?", id).First() + if err != nil { + return err + } + ent.ProgressPercent = progress + + _, err = t.cp.GetConnection().Model(&ent).Where("id=?", id).Update() + return err +} + +func (t transitionRepositoryImpl) TrackTransitionFailed(id, details string) error { + ent := entity.TransitionActivityEntity{} + err := t.cp.GetConnection().Model(&ent).Where("id=?", id).First() + if err != nil { + return err + } + ent.Status = string(view.StatusError) + ent.Details = details + ent.FinishedAt = time.Now() + + _, err = t.cp.GetConnection().Model(&ent).Where("id=?", id).Update() + return err +} + +func (t transitionRepositoryImpl) TrackTransitionCompleted(id string, affectedObjects int) error { + updateQuery := `update activity_tracking_transition + set status = ?, affected_objects = ?, finished_at = ?, progress_percent = 100, completed_serial_number = nextval('activity_tracking_transition_completed_seq') + where id=?;` + _, err := t.cp.GetConnection().Exec(updateQuery, string(view.StatusComplete), affectedObjects, time.Now(), id) + return err +} + +func (t transitionRepositoryImpl) GetTransitionStatus(id string) (*entity.TransitionActivityEntity, error) { + ent := entity.TransitionActivityEntity{} + err := t.cp.GetConnection().Model(&ent).Where("id=?", id).First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return &ent, nil +} + +func (t transitionRepositoryImpl) ListCompletedTransitions(completedSerialOffset int, limit int) ([]entity.TransitionActivityEntity, error) { + var result []entity.TransitionActivityEntity + err := t.cp.GetConnection().Model(&result). + Where("status = ?", string(view.StatusComplete)). + Order("completed_serial_number ASC"). + Offset(completedSerialOffset). + Limit(limit). + Select() + return result, err +} + +func (t transitionRepositoryImpl) addPackageTransitionRecord(tx *pg.Tx, oldPackageId string, newPackageId string, overwriteHistory bool) error { + if overwriteHistory { + // Delete record from transition history that is reserving `newPackageId` place + res, err := tx.Model(&entity.PackageTransitionEntity{}).Where("old_package_id = ?", newPackageId).Delete() + if err != nil { + return fmt.Errorf("failed to delete historical transition for package %s: %w", newPackageId, err) + } + if res.RowsAffected() >= 0 { + log.Infof("Deleted historical transition for package %s", newPackageId) + } + } + var existingTransitions []entity.PackageTransitionEntity // existing transitions to old id, i.e. package move history + err := tx.Model(&existingTransitions).Where("new_package_id = ?", oldPackageId).Select() + if err != nil { + if err != pg.ErrNoRows { + return fmt.Errorf("failed to list existing transitions for package id = %s: %w", oldPackageId, err) + } + } + + newTransition := entity.PackageTransitionEntity{ + OldPackageId: oldPackageId, + NewPackageId: newPackageId, + } + _, err = tx.Model(&newTransition).Insert() + if err != nil { + return fmt.Errorf("failed to insert new transition %+v: %w", newTransition, err) + } + + for _, tr := range existingTransitions { + if tr.OldPackageId == newPackageId { + // it doesn't make sense to redirect pkg to itself, delete the record + res, err := tx.Model(&tr).Where("old_package_id = ?", tr.OldPackageId).Delete() + if err != nil { + return fmt.Errorf("failed to delete self transition %+v: %w", tr, err) + } + if res.RowsAffected() != 1 { + return fmt.Errorf("failed to delete self transition %+v: incorrect affected row count = %d", tr, res.RowsAffected()) + } + } else { + res, err := tx.Model(&tr).Where("old_package_id = ?", tr.OldPackageId).Set("new_package_id = ?", newPackageId).Update() + if err != nil { + return fmt.Errorf("failed to update transition %+v: %w", tr, err) + } + if res.RowsAffected() != 1 { + return fmt.Errorf("failed to update transition %+v: incorrect affected row count = %d", tr, res.RowsAffected()) + } + } + } + + return nil +} + +func (t transitionRepositoryImpl) GetNewPackageId(oldPackageId string) (string, error) { + transition := &entity.PackageTransitionEntity{} + err := t.cp.GetConnection().Model(transition).Where("old_package_id = ?", oldPackageId).Select() + if err != nil { + if err != pg.ErrNoRows { + return "", fmt.Errorf("failed to get transition for package id = %s: %w", oldPackageId, err) + } + } + return transition.NewPackageId, nil +} + +func (t transitionRepositoryImpl) GetOldPackageIds(newPackageId string) ([]string, error) { + var result []string + var existingTransitions []entity.PackageTransitionEntity + err := t.cp.GetConnection().Model(&existingTransitions).Where("new_package_id = ?", newPackageId).Select() + if err != nil { + if err != pg.ErrNoRows { + return nil, fmt.Errorf("failed to list existing transiotions for package id = %s: %w", newPackageId, err) + } + } + for _, tr := range existingTransitions { + result = append(result, tr.OldPackageId) + } + return result, nil +} + +func (t transitionRepositoryImpl) ListPackageTransitions() ([]entity.PackageTransitionEntity, error) { + var result []entity.PackageTransitionEntity + err := t.cp.GetConnection().Model(&result).Select() + return result, err +} diff --git a/qubership-apihub-service/repository/UserRepository.go b/qubership-apihub-service/repository/UserRepository.go new file mode 100644 index 0000000..d5f3fcd --- /dev/null +++ b/qubership-apihub-service/repository/UserRepository.go @@ -0,0 +1,38 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repository + +import ( + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type UserRepository interface { + SaveExternalUser(userEntity *entity.UserEntity, externalIdentity *entity.ExternalIdentityEntity) error + SaveInternalUser(entity *entity.UserEntity) (bool, error) + GetUserById(userId string) (*entity.UserEntity, error) + GetUserByEmail(email string) (*entity.UserEntity, error) + GetUsers(usersListReq view.UsersListReq) ([]entity.UserEntity, error) + GetUsersByIds(userIds []string) ([]entity.UserEntity, error) + GetUsersByEmails(emails []string) ([]entity.UserEntity, error) + GetUserAvatar(userId string) (*entity.UserAvatarEntity, error) + SaveUserAvatar(entity *entity.UserAvatarEntity) error + GetUserExternalIdentity(provider string, externalId string) (*entity.ExternalIdentityEntity, error) + UpdateUserInfo(user *entity.UserEntity) error + UpdateUserPassword(userId string, passwordHash []byte) error + ClearUserPassword(userId string) error + UpdateUserExternalIdentity(provider string, externalId string, internalId string) error + PrivatePackageIdExists(privatePackageId string) (bool, error) +} diff --git a/qubership-apihub-service/repository/UserRepositoryPG.go b/qubership-apihub-service/repository/UserRepositoryPG.go new file mode 100644 index 0000000..87eccca --- /dev/null +++ b/qubership-apihub-service/repository/UserRepositoryPG.go @@ -0,0 +1,245 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repository + +import ( + "context" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/db" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + "github.com/go-pg/pg/v10" +) + +func NewUserRepositoryPG(cp db.ConnectionProvider) (UserRepository, error) { + return &userRepositoryImpl{cp: cp}, nil +} + +type userRepositoryImpl struct { + cp db.ConnectionProvider +} + +func (u userRepositoryImpl) SaveUserAvatar(entity *entity.UserAvatarEntity) error { + _, err := u.cp.GetConnection().Model(entity). + OnConflict("(\"user_id\") DO UPDATE"). + Insert() + return err +} + +func (u userRepositoryImpl) GetUserAvatar(userId string) (*entity.UserAvatarEntity, error) { + result := new(entity.UserAvatarEntity) + err := u.cp.GetConnection().Model(result). + Where("user_id = ?", userId). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (u userRepositoryImpl) SaveExternalUser(userEntity *entity.UserEntity, externalIdentity *entity.ExternalIdentityEntity) error { + ctx := context.Background() + err := u.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + _, err := tx.Model(userEntity). + OnConflict("(email) DO UPDATE SET name = EXCLUDED.name, password = EXCLUDED.password"). + Insert() + if err != nil { + return err + } + _, err = tx.Model(externalIdentity). + OnConflict("(provider, external_id) DO UPDATE"). + Insert() + return err + }) + if err != nil { + return err + } + return nil +} + +func (u userRepositoryImpl) SaveInternalUser(entity *entity.UserEntity) (bool, error) { + result, err := u.cp.GetConnection().Model(entity). + OnConflict("(email) DO NOTHING"). + Insert() + if err != nil { + return false, err + } + return result.RowsAffected() > 0, nil +} + +func (u userRepositoryImpl) GetUserById(userId string) (*entity.UserEntity, error) { + result := new(entity.UserEntity) + err := u.cp.GetConnection().Model(result). + Where("user_id = ?", userId). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (u userRepositoryImpl) GetUsersByIds(userIds []string) ([]entity.UserEntity, error) { + var result []entity.UserEntity + if len(userIds) == 0 { + return nil, nil + } + err := u.cp.GetConnection().Model(&result). + Where("user_id in (?)", pg.In(userIds)). + Select() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (u userRepositoryImpl) GetUsers(usersListReq view.UsersListReq) ([]entity.UserEntity, error) { + var result []entity.UserEntity + + query := u.cp.GetConnection().Model(&result). + Order("name ASC"). + Offset(usersListReq.Page * usersListReq.Limit). + Limit(usersListReq.Limit) + + if usersListReq.Filter != "" { + filter := "%" + utils.LikeEscaped(usersListReq.Filter) + "%" + query.Where("user_id ilike ?", filter). + WhereOr("name ilike ?", filter). + WhereOr("email ilike ?", filter) + } + + err := query.Select() + if err != nil { + return nil, err + } + return result, nil +} + +func (u userRepositoryImpl) GetAllUsers() ([]entity.UserEntity, error) { + var result []entity.UserEntity + err := u.cp.GetConnection().Model(&result). + Select() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (u userRepositoryImpl) GetUserByEmail(email string) (*entity.UserEntity, error) { + result := new(entity.UserEntity) + err := u.cp.GetConnection().Model(result). + Where("email ilike ?", email). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (u userRepositoryImpl) GetUsersByEmails(emails []string) ([]entity.UserEntity, error) { + var result []entity.UserEntity + if len(emails) == 0 { + return nil, nil + } + err := u.cp.GetConnection().Model(&result). + Where("LOWER(email) in (?)", pg.In(emails)). + Select() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (u userRepositoryImpl) GetUserExternalIdentity(provider string, externalId string) (*entity.ExternalIdentityEntity, error) { + result := new(entity.ExternalIdentityEntity) + err := u.cp.GetConnection().Model(result). + Where("provider = ?", provider). + Where("external_id = ?", externalId). + First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return result, nil +} + +func (u userRepositoryImpl) UpdateUserInfo(user *entity.UserEntity) error { + _, err := u.cp.GetConnection().Model(user). + Where("user_id = ?", user.Id). + Set("name = ?", user.Username). + Set("avatar_url = ?", user.AvatarUrl). + Update() + return err +} + +func (u userRepositoryImpl) UpdateUserPassword(userId string, passwordHash []byte) error { + entity := new(entity.UserEntity) + _, err := u.cp.GetConnection().Model(entity). + Where("user_id = ?", userId). + Set("password = ?", passwordHash). + Update() + return err +} + +func (u userRepositoryImpl) ClearUserPassword(userId string) error { + entity := new(entity.UserEntity) + _, err := u.cp.GetConnection().Model(entity). + Where("user_id = ?", userId). + Set("password = ?", nil). + Update() + return err +} + +func (u userRepositoryImpl) UpdateUserExternalIdentity(provider string, externalId string, internalId string) error { + entity := entity.ExternalIdentityEntity{Provider: provider, ExternalId: externalId, InternalId: internalId} + _, err := u.cp.GetConnection().Model(&entity). + OnConflict("(provider, external_id) DO UPDATE"). + Insert() + return err +} + +func (u userRepositoryImpl) PrivatePackageIdExists(privatePackageId string) (bool, error) { + userEnt := new(entity.UserEntity) + err := u.cp.GetConnection().Model(userEnt). + Where("private_package_id = ?", privatePackageId). + First() + if err != nil { + if err == pg.ErrNoRows { + return false, nil + } + return false, err + } + return userEnt.PrivatePackageId == privatePackageId, nil +} diff --git a/qubership-apihub-service/repository/VersionCleanupRepository.go b/qubership-apihub-service/repository/VersionCleanupRepository.go new file mode 100644 index 0000000..42b02d4 --- /dev/null +++ b/qubership-apihub-service/repository/VersionCleanupRepository.go @@ -0,0 +1,61 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package repository + +import ( + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/db" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/go-pg/pg/v10" +) + +type VersionCleanupRepository interface { + GetVersionCleanupRun(id string) (*entity.VersionCleanupEntity, error) + StoreVersionCleanupRun(entity entity.VersionCleanupEntity) error + UpdateVersionCleanupRun(runId string, status string, details string, deletedItems int) error +} + +func NewVersionCleanupRepository(cp db.ConnectionProvider) VersionCleanupRepository { + return &versionCleanupRepositoryImpl{cp: cp} +} + +type versionCleanupRepositoryImpl struct { + cp db.ConnectionProvider +} + +func (v versionCleanupRepositoryImpl) GetVersionCleanupRun(id string) (*entity.VersionCleanupEntity, error) { + var ent *entity.VersionCleanupEntity + err := v.cp.GetConnection().Model(ent).Where("run_id = ?", id).First() + if err != nil { + if err == pg.ErrNoRows { + return nil, nil + } + return nil, err + } + return ent, nil +} + +func (v versionCleanupRepositoryImpl) StoreVersionCleanupRun(entity entity.VersionCleanupEntity) error { + _, err := v.cp.GetConnection().Model(&entity).Insert() + return err +} + +func (v versionCleanupRepositoryImpl) UpdateVersionCleanupRun(runId string, status string, details string, deletedItems int) error { + _, err := v.cp.GetConnection().Model(&entity.VersionCleanupEntity{}). + Set("status=?", status). + Set("details=?", details). + Set("deleted_items=?", deletedItems). + Where("run_id = ?", runId).Update() + return err +} diff --git a/qubership-apihub-service/resources/migrations/100_transition.down.sql b/qubership-apihub-service/resources/migrations/100_transition.down.sql new file mode 100644 index 0000000..e1b582c --- /dev/null +++ b/qubership-apihub-service/resources/migrations/100_transition.down.sql @@ -0,0 +1 @@ +drop table activity_tracking_transition; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/100_transition.up.sql b/qubership-apihub-service/resources/migrations/100_transition.up.sql new file mode 100644 index 0000000..da96e56 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/100_transition.up.sql @@ -0,0 +1,54 @@ +create table activity_tracking_transition +( + id varchar + constraint activity_tracking_transition_pk + primary key, + tr_type varchar not null, + from_id varchar not null, + to_id varchar not null, + status varchar not null, + details varchar, + started_by varchar not null, + started_at timestamp without time zone not null, + finished_at timestamp without time zone, + progress_percent int, + affected_objects int +); + +create unique index activity_tracking_transition_id_uindex + on activity_tracking_transition (id); + + +create or replace function parent_package_names(character varying) returns character varying[] + language plpgsql +as +$$ +declare + split varchar[] := string_to_array($1, '.')::varchar[]; + parent_ids varchar[]; + parent_names varchar[]; +begin + + if coalesce(array_length(split, 1), 0) <= 1 then + return ARRAY[]::varchar[]; + end if; + + parent_ids = parent_ids || split[1]; + + for i in 2..(array_length(split, 1) - 1) + loop + parent_ids = parent_ids || (parent_ids[i-1] ||'.'|| split[i])::character varying; + end loop; + + execute ' +select array_agg(name) from ( + select name from package_group + join unnest($1) with ordinality t(id, ord) using (id) --sort by parent_ids array + order by t.ord) n' + into parent_names + using parent_ids; + + return parent_names; + +end; +$$; diff --git a/qubership-apihub-service/resources/migrations/101_integration_refresh_token.down.sql b/qubership-apihub-service/resources/migrations/101_integration_refresh_token.down.sql new file mode 100644 index 0000000..4193c58 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/101_integration_refresh_token.down.sql @@ -0,0 +1,4 @@ +alter table user_integration drop column refresh_token; +alter table user_integration drop column expires_at; +alter table user_integration drop column redirect_uri; +alter table user_integration drop column failed_refresh_attempts; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/101_integration_refresh_token.up.sql b/qubership-apihub-service/resources/migrations/101_integration_refresh_token.up.sql new file mode 100644 index 0000000..30aab12 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/101_integration_refresh_token.up.sql @@ -0,0 +1,6 @@ +alter table user_integration add column refresh_token varchar; +alter table user_integration add column expires_at timestamp without time zone; +alter table user_integration add column redirect_uri varchar; +alter table user_integration add column failed_refresh_attempts integer default 0; + +update user_integration set is_revoked = true; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/102_builder_version_in_version_comparison.down.sql b/qubership-apihub-service/resources/migrations/102_builder_version_in_version_comparison.down.sql new file mode 100644 index 0000000..6977768 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/102_builder_version_in_version_comparison.down.sql @@ -0,0 +1 @@ +alter table version_comparison drop column builder_version; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/102_builder_version_in_version_comparison.up.sql b/qubership-apihub-service/resources/migrations/102_builder_version_in_version_comparison.up.sql new file mode 100644 index 0000000..836bd60 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/102_builder_version_in_version_comparison.up.sql @@ -0,0 +1 @@ +alter table version_comparison add column builder_version varchar; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/103_personal_workspaces.down.sql b/qubership-apihub-service/resources/migrations/103_personal_workspaces.down.sql new file mode 100644 index 0000000..e749459 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/103_personal_workspaces.down.sql @@ -0,0 +1 @@ +alter table user_data drop column private_package_id; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/103_personal_workspaces.up.sql b/qubership-apihub-service/resources/migrations/103_personal_workspaces.up.sql new file mode 100644 index 0000000..98777ed --- /dev/null +++ b/qubership-apihub-service/resources/migrations/103_personal_workspaces.up.sql @@ -0,0 +1,6 @@ +alter table user_data add column private_package_id varchar not null default ''; + +update user_data set private_package_id = user_id; + +ALTER TABLE user_data +ADD CONSTRAINT private_package_id_unique UNIQUE (private_package_id); \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/104_transformed_documents.down.sql b/qubership-apihub-service/resources/migrations/104_transformed_documents.down.sql new file mode 100644 index 0000000..c3daf2d --- /dev/null +++ b/qubership-apihub-service/resources/migrations/104_transformed_documents.down.sql @@ -0,0 +1,2 @@ +alter table transformed_content_data drop constraint if exists "FK_transformed_content_data_operation_group"; +drop table if exists transformed_content_data; diff --git a/qubership-apihub-service/resources/migrations/104_transformed_documents.up.sql b/qubership-apihub-service/resources/migrations/104_transformed_documents.up.sql new file mode 100644 index 0000000..a5248ff --- /dev/null +++ b/qubership-apihub-service/resources/migrations/104_transformed_documents.up.sql @@ -0,0 +1,13 @@ +CREATE TABLE if not exists transformed_content_data ( + package_id varchar NOT NULL, + version varchar NOT NULL, + revision integer NOT NULL, + api_type varchar NOT NULL, + group_id varchar NOT NULL, + data bytea, + documents_info jsonb[], + PRIMARY KEY(package_id, version, revision, api_type, group_id) +); + +ALTER TABLE transformed_content_data ADD CONSTRAINT "FK_transformed_content_data_operation_group" + FOREIGN KEY (group_id) REFERENCES operation_group (group_id) ON DELETE Cascade ON UPDATE Cascade; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/105_models_for_operations.down.sql b/qubership-apihub-service/resources/migrations/105_models_for_operations.down.sql new file mode 100644 index 0000000..bd89d71 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/105_models_for_operations.down.sql @@ -0,0 +1 @@ +alter table operation drop column models; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/105_models_for_operations.up.sql b/qubership-apihub-service/resources/migrations/105_models_for_operations.up.sql new file mode 100644 index 0000000..cb38ead --- /dev/null +++ b/qubership-apihub-service/resources/migrations/105_models_for_operations.up.sql @@ -0,0 +1 @@ +alter table operation add column models jsonb; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/106_operation_custom_tag.down.sql b/qubership-apihub-service/resources/migrations/106_operation_custom_tag.down.sql new file mode 100644 index 0000000..62901f4 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/106_operation_custom_tag.down.sql @@ -0,0 +1 @@ +alter table operation drop column if exists custom_tags; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/106_operation_custom_tag.up.sql b/qubership-apihub-service/resources/migrations/106_operation_custom_tag.up.sql new file mode 100644 index 0000000..fe2c7ff --- /dev/null +++ b/qubership-apihub-service/resources/migrations/106_operation_custom_tag.up.sql @@ -0,0 +1 @@ +alter table operation add column if not exists custom_tags jsonb; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/107_business_metrics.down.sql b/qubership-apihub-service/resources/migrations/107_business_metrics.down.sql new file mode 100644 index 0000000..3218cad --- /dev/null +++ b/qubership-apihub-service/resources/migrations/107_business_metrics.down.sql @@ -0,0 +1 @@ +drop table business_metric; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/107_business_metrics.up.sql b/qubership-apihub-service/resources/migrations/107_business_metrics.up.sql new file mode 100644 index 0000000..8677bba --- /dev/null +++ b/qubership-apihub-service/resources/migrations/107_business_metrics.up.sql @@ -0,0 +1,8 @@ +create table business_metric ( + year int not null, + month int not null, + day int not null, + metric varchar not null, + data jsonb, + PRIMARY KEY(year, month, day, metric) +); \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/108_build_tracking.down.sql b/qubership-apihub-service/resources/migrations/108_build_tracking.down.sql new file mode 100644 index 0000000..1527d4a --- /dev/null +++ b/qubership-apihub-service/resources/migrations/108_build_tracking.down.sql @@ -0,0 +1,2 @@ +alter table build drop column client_build; +alter table build drop column started_at; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/108_build_tracking.up.sql b/qubership-apihub-service/resources/migrations/108_build_tracking.up.sql new file mode 100644 index 0000000..14e226d --- /dev/null +++ b/qubership-apihub-service/resources/migrations/108_build_tracking.up.sql @@ -0,0 +1,2 @@ +alter table build add column client_build boolean; +alter table build add column started_at timestamp; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/109_split_jsonPath.down.sql b/qubership-apihub-service/resources/migrations/109_split_jsonPath.down.sql new file mode 100644 index 0000000..4bfce62 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/109_split_jsonPath.down.sql @@ -0,0 +1,6 @@ +update operation +set deprecated_items = merge_json_path(deprecated_items) +where deprecated_items != '{}'; + +drop function if exists split_json_path; +drop function if exists merge_json_path; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/109_split_jsonPath.up.sql b/qubership-apihub-service/resources/migrations/109_split_jsonPath.up.sql new file mode 100644 index 0000000..69f5669 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/109_split_jsonPath.up.sql @@ -0,0 +1,40 @@ +create or replace function split_json_path(jsonb[]) returns jsonb[] +as +$$ +declare + items alias for $1; + ret jsonb[]; +begin + for i in array_lower(items, 1)..array_upper(items, 1) + loop + ret[i] := jsonb_set(items[i], '{jsonPath}', + (array_to_json(string_to_array(trim(both '"' from (items[i] -> 'jsonPath')::text), + '/')))::jsonb, false); + end loop; + return ret; +end; +$$ + language plpgsql + returns null on null input; + +create or replace function merge_json_path(jsonb[]) returns jsonb[] +as +$$ +declare + items alias for $1; + jsonpath text; + ret jsonb[]; +begin + for i in array_lower(items, 1)..array_upper(items, 1) loop + select string_agg(el, '/') into jsonpath from jsonb_array_elements_text(items[i]->'jsonPath') el; + ret[i] := jsonb_set(items[i], '{jsonPath}', to_jsonb(jsonpath), false); + end loop; + return ret; +end; +$$ + language plpgsql + RETURNS NULL ON NULL INPUT; + +update operation +set deprecated_items = split_json_path(deprecated_items) +where deprecated_items != '{}'; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/10_content_messages.down.sql b/qubership-apihub-service/resources/migrations/10_content_messages.down.sql new file mode 100644 index 0000000..b3937aa --- /dev/null +++ b/qubership-apihub-service/resources/migrations/10_content_messages.down.sql @@ -0,0 +1,2 @@ +DROP TABLE IF EXISTS published_content_messages CASCADE +; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/10_content_messages.up.sql b/qubership-apihub-service/resources/migrations/10_content_messages.up.sql new file mode 100644 index 0000000..e787538 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/10_content_messages.up.sql @@ -0,0 +1,11 @@ +DROP TABLE IF EXISTS published_content_messages CASCADE; + +create table published_content_messages +( + checksum varchar not null, + messages jsonb +); + +ALTER TABLE published_content_messages ADD CONSTRAINT "PK_published_content_messages" + PRIMARY KEY (checksum) +; diff --git a/qubership-apihub-service/resources/migrations/110_moved_pkg_comparisons.up.sql b/qubership-apihub-service/resources/migrations/110_moved_pkg_comparisons.up.sql new file mode 100644 index 0000000..2280539 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/110_moved_pkg_comparisons.up.sql @@ -0,0 +1,21 @@ +with comp as ( + select + comparison_id, + md5(package_id||'@'||version||'@'||revision||'@'||previous_package_id||'@'||previous_version||'@'||previous_revision) as new_comparison_id + from version_comparison + where comparison_id != md5(package_id||'@'||version||'@'||revision||'@'||previous_package_id||'@'||previous_version||'@'||previous_revision) +) +update version_comparison b set refs = array_replace(refs, c.comparison_id, c.new_comparison_id::varchar) +from comp c +where c.comparison_id = any(refs); + +with comp as ( + select + comparison_id, + md5(package_id||'@'||version||'@'||revision||'@'||previous_package_id||'@'||previous_version||'@'||previous_revision) as new_comparison_id + from version_comparison + where comparison_id != md5(package_id||'@'||version||'@'||revision||'@'||previous_package_id||'@'||previous_version||'@'||previous_revision) +) +update version_comparison b set comparison_id = c.new_comparison_id::varchar +from comp c +where c.comparison_id = b.comparison_id; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/111_update_build.down.sql b/qubership-apihub-service/resources/migrations/111_update_build.down.sql new file mode 100644 index 0000000..f32592c --- /dev/null +++ b/qubership-apihub-service/resources/migrations/111_update_build.down.sql @@ -0,0 +1,17 @@ +create table builder_task +( + build_id varchar not null + constraint "PK_builder_task" + primary key + constraint "FK_builder_task_build_id" + references build + on update cascade on delete cascade, + builder_id varchar not null, + version integer default 1 not null +); + +insert into builder_task select build_id, builder_id from build where build.builder_id!=''; + +alter table build drop builder_id; + +alter table build drop priority; diff --git a/qubership-apihub-service/resources/migrations/111_update_build.up.sql b/qubership-apihub-service/resources/migrations/111_update_build.up.sql new file mode 100644 index 0000000..6bb7774 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/111_update_build.up.sql @@ -0,0 +1,9 @@ +alter table build add builder_id varchar; + +update build set builder_id = (select builder_id from builder_task where builder_task.build_id=build.build_id); + +drop table builder_task; + +alter table build add priority int not null default 0; + + diff --git a/qubership-apihub-service/resources/migrations/112_migration_validation.down.sql b/qubership-apihub-service/resources/migrations/112_migration_validation.down.sql new file mode 100644 index 0000000..4b78713 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/112_migration_validation.down.sql @@ -0,0 +1,2 @@ +drop table migrated_version_changes; +alter table migration_run drop column skip_validation; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/112_migration_validation.up.sql b/qubership-apihub-service/resources/migrations/112_migration_validation.up.sql new file mode 100644 index 0000000..a8f75aa --- /dev/null +++ b/qubership-apihub-service/resources/migrations/112_migration_validation.up.sql @@ -0,0 +1,10 @@ +create table migrated_version_changes ( + package_id varchar not null, + version varchar not null, + revision varchar not null, + build_id varchar not null, + migration_id varchar not null, + changes jsonb +); + +alter table migration_run add column skip_validation boolean; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/113_default_last_active.down.sql b/qubership-apihub-service/resources/migrations/113_default_last_active.down.sql new file mode 100644 index 0000000..6abf5d8 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/113_default_last_active.down.sql @@ -0,0 +1,2 @@ +alter table build alter column last_active drop default; +alter table build alter column created_at drop default; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/113_default_last_active.up.sql b/qubership-apihub-service/resources/migrations/113_default_last_active.up.sql new file mode 100644 index 0000000..eea7df4 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/113_default_last_active.up.sql @@ -0,0 +1,2 @@ +alter table build alter column last_active set default now(); +alter table build alter column created_at set default now(); \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/114_remove_old_checksum.down.sql b/qubership-apihub-service/resources/migrations/114_remove_old_checksum.down.sql new file mode 100644 index 0000000..fe4ef1d --- /dev/null +++ b/qubership-apihub-service/resources/migrations/114_remove_old_checksum.down.sql @@ -0,0 +1,14 @@ +alter table published_sources + add checksum varchar; + +create table published_sources_data +( + package_id varchar not null + constraint published_sources_data_package_group_id_fk + references package_group + on update cascade on delete cascade, + checksum varchar not null, + data bytea, + constraint "PK_published_sources_data" + primary key (checksum, package_id) +); \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/114_remove_old_checksum.up.sql b/qubership-apihub-service/resources/migrations/114_remove_old_checksum.up.sql new file mode 100644 index 0000000..5d90956 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/114_remove_old_checksum.up.sql @@ -0,0 +1,4 @@ +alter table published_sources + drop column checksum; + +drop table published_sources_data; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/115_add_build_status_index.down.sql b/qubership-apihub-service/resources/migrations/115_add_build_status_index.down.sql new file mode 100644 index 0000000..4b6ead3 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/115_add_build_status_index.down.sql @@ -0,0 +1,3 @@ +drop index build_status_index; + +drop index build_depends_index; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/115_add_build_status_index.up.sql b/qubership-apihub-service/resources/migrations/115_add_build_status_index.up.sql new file mode 100644 index 0000000..1053817 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/115_add_build_status_index.up.sql @@ -0,0 +1,3 @@ +create index build_status_index on build (status); + +create index build_depends_index on build_depends (depend_id); \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/116_jsonb_arr_to_jsonb.down.sql b/qubership-apihub-service/resources/migrations/116_jsonb_arr_to_jsonb.down.sql new file mode 100644 index 0000000..bcc1e37 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/116_jsonb_arr_to_jsonb.down.sql @@ -0,0 +1,14 @@ +alter table version_comparison add column operation_types_arr jsonb[]; +update version_comparison set operation_types_arr = array(select jsonb_array_elements(operation_types))::jsonb[]; +alter table version_comparison drop column operation_types; +alter table version_comparison rename column operation_types_arr to operation_types; + +alter table transformed_content_data add column documents_info_arr jsonb[]; +update transformed_content_data set documents_info_arr = array(select jsonb_array_elements(documents_info))::jsonb[]; +alter table transformed_content_data drop column documents_info; +alter table transformed_content_data rename column documents_info_arr to documents_info; + +alter table operation add column deprecated_items_arr jsonb[]; +update operation set deprecated_items_arr = array(select jsonb_array_elements(deprecated_items))::jsonb[]; +alter table operation drop column deprecated_items; +alter table operation rename column deprecated_items_arr to deprecated_items; diff --git a/qubership-apihub-service/resources/migrations/116_jsonb_arr_to_jsonb.up.sql b/qubership-apihub-service/resources/migrations/116_jsonb_arr_to_jsonb.up.sql new file mode 100644 index 0000000..272f81a --- /dev/null +++ b/qubership-apihub-service/resources/migrations/116_jsonb_arr_to_jsonb.up.sql @@ -0,0 +1,3 @@ +alter table version_comparison alter column operation_types type jsonb using to_jsonb(operation_types); +alter table transformed_content_data alter column documents_info type jsonb using to_jsonb(documents_info); +alter table operation alter column deprecated_items type jsonb using to_jsonb(deprecated_items); \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/117_package_transition.down.sql b/qubership-apihub-service/resources/migrations/117_package_transition.down.sql new file mode 100644 index 0000000..e32b6f3 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/117_package_transition.down.sql @@ -0,0 +1 @@ +drop table package_transition; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/117_package_transition.up.sql b/qubership-apihub-service/resources/migrations/117_package_transition.up.sql new file mode 100644 index 0000000..011b4cf --- /dev/null +++ b/qubership-apihub-service/resources/migrations/117_package_transition.up.sql @@ -0,0 +1,12 @@ +create table package_transition +( + old_package_id varchar not null, + new_package_id varchar not null +); + +create index package_transition_old_package_id_index + on package_transition (old_package_id); + +update operation_group +set group_id=MD5(CONCAT_WS('@', package_id, version, revision, api_type, group_name))::varchar +where MD5(CONCAT_WS('@', package_id, version, revision, api_type, group_name))!= operation_group.group_id; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/118_transition_activity_sequence.down.sql b/qubership-apihub-service/resources/migrations/118_transition_activity_sequence.down.sql new file mode 100644 index 0000000..502af9b --- /dev/null +++ b/qubership-apihub-service/resources/migrations/118_transition_activity_sequence.down.sql @@ -0,0 +1,2 @@ +drop sequence activity_tracking_transition_completed_seq; +alter table activity_tracking_transition drop column completed_serial_number; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/118_transition_activity_sequence.up.sql b/qubership-apihub-service/resources/migrations/118_transition_activity_sequence.up.sql new file mode 100644 index 0000000..e2f3b43 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/118_transition_activity_sequence.up.sql @@ -0,0 +1,10 @@ +alter table activity_tracking_transition add column completed_serial_number integer; +create sequence if not exists activity_tracking_transition_completed_seq as int minvalue 0 owned by activity_tracking_transition.completed_serial_number; +with completed_att as ( + select id from activity_tracking_transition + where status = 'complete' + order by finished_at +) +update activity_tracking_transition att set completed_serial_number = nextval('activity_tracking_transition_completed_seq') +from completed_att c +where c.id = att.id; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/119_workspace_services.down.sql b/qubership-apihub-service/resources/migrations/119_workspace_services.down.sql new file mode 100644 index 0000000..f736d64 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/119_workspace_services.down.sql @@ -0,0 +1,27 @@ +--unassign duplicate service_names +with l as ( + select service_name, min(id) as id from package_group + where service_name is not null and service_name != '' + group by service_name having count(*) > 1 +) +update package_group pg set service_name = null +from l +where l.service_name = pg.service_name +and l.id != pg.id; + +delete from package_service ps +using package_group pg +where ps.package_id = pg.id +and pg.service_name is null; + +alter table package_service drop constraint "FK_package_group_workspace"; + +alter table package_service drop constraint "PK_package_service"; +alter table package_service add constraint "PK_package_service" + primary key (package_id, service_name); + +alter table package_service drop constraint package_service_workspace_id_service_name_key; + +alter table package_service add UNIQUE(service_name); + +alter table package_service drop column workspace_id; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/119_workspace_services.up.sql b/qubership-apihub-service/resources/migrations/119_workspace_services.up.sql new file mode 100644 index 0000000..c960bca --- /dev/null +++ b/qubership-apihub-service/resources/migrations/119_workspace_services.up.sql @@ -0,0 +1,14 @@ +alter table package_service add column workspace_id varchar; +update package_service set workspace_id = split_part(package_id, '.', 1); +alter table package_service alter column workspace_id set not null; + +alter table package_service drop constraint package_service_service_name_key; + +alter table package_service add UNIQUE(workspace_id, service_name); + +alter table package_service drop constraint "PK_package_service"; +alter table package_service add constraint "PK_package_service" + primary key (workspace_id, package_id, service_name); + +alter table package_service add constraint "FK_package_group_workspace" + foreign key (workspace_id) references package_group (id) on delete Cascade on update Cascade; diff --git a/qubership-apihub-service/resources/migrations/11_content_labels_metadata.down.sql b/qubership-apihub-service/resources/migrations/11_content_labels_metadata.down.sql new file mode 100644 index 0000000..fea8c31 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/11_content_labels_metadata.down.sql @@ -0,0 +1,6 @@ +ALTER TABLE published_version_revision_content + DROP COLUMN metadata; + +ALTER TABLE branch_draft_content + DROP COLUMN labels; + \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/11_content_labels_metadata.up.sql b/qubership-apihub-service/resources/migrations/11_content_labels_metadata.up.sql new file mode 100644 index 0000000..24d1965 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/11_content_labels_metadata.up.sql @@ -0,0 +1,5 @@ +ALTER TABLE published_version_revision_content + ADD COLUMN metadata jsonb; + +ALTER TABLE branch_draft_content + ADD COLUMN labels varchar[]; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/120_release_version_pattern.down.sql b/qubership-apihub-service/resources/migrations/120_release_version_pattern.down.sql new file mode 100644 index 0000000..1fe72aa --- /dev/null +++ b/qubership-apihub-service/resources/migrations/120_release_version_pattern.down.sql @@ -0,0 +1,3 @@ +update package_group +set release_version_pattern = null +where release_version_pattern = '^[0-9]{4}[.]{1}[1-4]{1}$' and (kind = 'package' or kind = 'dashboard'); diff --git a/qubership-apihub-service/resources/migrations/120_release_version_pattern.up.sql b/qubership-apihub-service/resources/migrations/120_release_version_pattern.up.sql new file mode 100644 index 0000000..3351206 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/120_release_version_pattern.up.sql @@ -0,0 +1,3 @@ +update package_group +set release_version_pattern = '^[0-9]{4}[.]{1}[1-4]{1}$' +where (kind = 'package' or kind = 'dashboard') and (release_version_pattern = '' or release_version_pattern is null); diff --git a/qubership-apihub-service/resources/migrations/121_remove_version_permissions.up.sql b/qubership-apihub-service/resources/migrations/121_remove_version_permissions.up.sql new file mode 100644 index 0000000..913ae67 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/121_remove_version_permissions.up.sql @@ -0,0 +1,2 @@ +update role set permissions = array_remove(permissions, 'manage_release_candidate_version'); +update role set permissions = array_remove(permissions, 'manage_deprecated_version'); diff --git a/qubership-apihub-service/resources/migrations/122_endpoint_calls.down.sql b/qubership-apihub-service/resources/migrations/122_endpoint_calls.down.sql new file mode 100644 index 0000000..24dffc4 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/122_endpoint_calls.down.sql @@ -0,0 +1 @@ +drop table endpoint_calls; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/122_endpoint_calls.up.sql b/qubership-apihub-service/resources/migrations/122_endpoint_calls.up.sql new file mode 100644 index 0000000..c21d35e --- /dev/null +++ b/qubership-apihub-service/resources/migrations/122_endpoint_calls.up.sql @@ -0,0 +1,7 @@ +create table endpoint_calls( + path varchar not null, + hash varchar not null, + options jsonb, + count integer, + PRIMARY KEY(path, hash) +); diff --git a/qubership-apihub-service/resources/migrations/123_versions_cleanup.down.sql b/qubership-apihub-service/resources/migrations/123_versions_cleanup.down.sql new file mode 100644 index 0000000..e7e12ea --- /dev/null +++ b/qubership-apihub-service/resources/migrations/123_versions_cleanup.down.sql @@ -0,0 +1 @@ +drop table if exists versions_cleanup_run; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/123_versions_cleanup.up.sql b/qubership-apihub-service/resources/migrations/123_versions_cleanup.up.sql new file mode 100644 index 0000000..33c1fc1 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/123_versions_cleanup.up.sql @@ -0,0 +1,16 @@ +create table if not exists versions_cleanup_run ( + run_id uuid, + started_at timestamp without time zone not null default now(), + package_id varchar not null, + delete_before timestamp without time zone not null, + status varchar not null, + details varchar, + deleted_items integer +); + +alter table versions_cleanup_run add constraint PK_versions_cleanup_run primary key (run_id); + +alter table versions_cleanup_run + add constraint versions_cleanup_run_package_group_id_fk + foreign key (package_id) references package_group + on update cascade on delete cascade; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/124_api_key_created_for.down.sql b/qubership-apihub-service/resources/migrations/124_api_key_created_for.down.sql new file mode 100644 index 0000000..06912dc --- /dev/null +++ b/qubership-apihub-service/resources/migrations/124_api_key_created_for.down.sql @@ -0,0 +1 @@ +ALTER TABLE apihub_api_keys DROP COLUMN created_for; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/124_api_key_created_for.up.sql b/qubership-apihub-service/resources/migrations/124_api_key_created_for.up.sql new file mode 100644 index 0000000..7ce422c --- /dev/null +++ b/qubership-apihub-service/resources/migrations/124_api_key_created_for.up.sql @@ -0,0 +1 @@ +ALTER TABLE apihub_api_keys ADD COLUMN created_for VARCHAR; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/125_build_metadata.down.sql b/qubership-apihub-service/resources/migrations/125_build_metadata.down.sql new file mode 100644 index 0000000..5983478 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/125_build_metadata.down.sql @@ -0,0 +1 @@ +alter table build drop column metadata; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/125_build_metadata.up.sql b/qubership-apihub-service/resources/migrations/125_build_metadata.up.sql new file mode 100644 index 0000000..81048ee --- /dev/null +++ b/qubership-apihub-service/resources/migrations/125_build_metadata.up.sql @@ -0,0 +1 @@ +alter table build add column metadata jsonb; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/126_transformed_documents_rework.down.sql b/qubership-apihub-service/resources/migrations/126_transformed_documents_rework.down.sql new file mode 100644 index 0000000..2c8a1b7 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/126_transformed_documents_rework.down.sql @@ -0,0 +1,9 @@ +alter table operation_group drop column template; +alter table operation_group drop column template_filename; + +alter table transformed_content_data drop constraint if exists transformed_content_data_pkey; +alter table transformed_content_data add constraint transformed_content_data_pkey +primary key(package_id, version, revision, api_type, group_id); + +alter table transformed_content_data drop column format; +alter table transformed_content_data drop column build_type; diff --git a/qubership-apihub-service/resources/migrations/126_transformed_documents_rework.up.sql b/qubership-apihub-service/resources/migrations/126_transformed_documents_rework.up.sql new file mode 100644 index 0000000..5aaa705 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/126_transformed_documents_rework.up.sql @@ -0,0 +1,9 @@ +alter table transformed_content_data add column build_type varchar default 'documentGroup'; +alter table transformed_content_data add column format varchar default 'json'; + +alter table transformed_content_data drop constraint if exists transformed_content_data_pkey; +alter table transformed_content_data add constraint transformed_content_data_pkey +primary key(package_id, version, revision, api_type, group_id, build_type, format); + +alter table operation_group add column template bytea; +alter table operation_group add column template_filename varchar; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/127_template_checksum.down.sql b/qubership-apihub-service/resources/migrations/127_template_checksum.down.sql new file mode 100644 index 0000000..4d0dcba --- /dev/null +++ b/qubership-apihub-service/resources/migrations/127_template_checksum.down.sql @@ -0,0 +1,8 @@ +alter table operation_group add column template bytea; + +update operation_group og set template = (select template from operation_group_template ogt where checksum = og.template_checksum) +where template_checksum != '' and template_checksum is not null; + +alter table operation_group drop column template_checksum; + +drop table operation_group_template; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/127_template_checksum.up.sql b/qubership-apihub-service/resources/migrations/127_template_checksum.up.sql new file mode 100644 index 0000000..9defd7e --- /dev/null +++ b/qubership-apihub-service/resources/migrations/127_template_checksum.up.sql @@ -0,0 +1,11 @@ +create table operation_group_template ( + checksum varchar, + template bytea, + PRIMARY KEY (checksum) +); + +insert into operation_group_template +select distinct md5(template), template from operation_group where template is not null; + +alter table operation_group alter column template type varchar using md5(template); +alter table operation_group rename column template to template_checksum; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/128_remove_ghost_operations.down.sql b/qubership-apihub-service/resources/migrations/128_remove_ghost_operations.down.sql new file mode 100644 index 0000000..9044d33 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/128_remove_ghost_operations.down.sql @@ -0,0 +1 @@ +alter table grouped_operation add column deleted boolean default false; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/128_remove_ghost_operations.up.sql b/qubership-apihub-service/resources/migrations/128_remove_ghost_operations.up.sql new file mode 100644 index 0000000..1ca1031 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/128_remove_ghost_operations.up.sql @@ -0,0 +1,2 @@ +delete from grouped_operation where deleted = true; +alter table grouped_operation drop column deleted; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/129_business_metrics_user_id.down.sql b/qubership-apihub-service/resources/migrations/129_business_metrics_user_id.down.sql new file mode 100644 index 0000000..4ebd9f8 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/129_business_metrics_user_id.down.sql @@ -0,0 +1,6 @@ +delete from business_metric where user_id != 'unknown'; + +alter table business_metric drop column user_id; + +alter table business_metric add constraint business_metric_pkey +primary key(year, month, day, metric); \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/129_business_metrics_user_id.up.sql b/qubership-apihub-service/resources/migrations/129_business_metrics_user_id.up.sql new file mode 100644 index 0000000..9b3712d --- /dev/null +++ b/qubership-apihub-service/resources/migrations/129_business_metrics_user_id.up.sql @@ -0,0 +1,5 @@ +alter table business_metric add column user_id varchar default 'unknown'; + +alter table business_metric drop constraint if exists business_metric_pkey; +alter table business_metric add constraint business_metric_pkey +primary key(year, month, day, metric, user_id); \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/12_shared_content.down.sql b/qubership-apihub-service/resources/migrations/12_shared_content.down.sql new file mode 100644 index 0000000..e661db0 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/12_shared_content.down.sql @@ -0,0 +1,2 @@ +DROP TABLE IF EXISTS shared_url_info CASCADE +; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/12_shared_content.up.sql b/qubership-apihub-service/resources/migrations/12_shared_content.up.sql new file mode 100644 index 0000000..d0073e4 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/12_shared_content.up.sql @@ -0,0 +1,18 @@ +DROP TABLE IF EXISTS shared_url_info CASCADE +; +CREATE TABLE shared_url_info +( + project_id varchar NOT NULL, + version varchar NOT NULL, + file_id varchar NOT NULL, + shared_id varchar NOT NULL +) +; +ALTER TABLE shared_url_info + ADD CONSTRAINT "PK_shared_url_info" + PRIMARY KEY (shared_id) +; +ALTER TABLE shared_url_info + ADD CONSTRAINT "shared_url_info__file_info" + UNIQUE (project_id, version, file_id) +; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/130_operation_group_history.down.sql b/qubership-apihub-service/resources/migrations/130_operation_group_history.down.sql new file mode 100644 index 0000000..07164f4 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/130_operation_group_history.down.sql @@ -0,0 +1 @@ +drop table operation_group_history; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/130_operation_group_history.up.sql b/qubership-apihub-service/resources/migrations/130_operation_group_history.up.sql new file mode 100644 index 0000000..0e8884a --- /dev/null +++ b/qubership-apihub-service/resources/migrations/130_operation_group_history.up.sql @@ -0,0 +1,25 @@ +create table operation_group_history( + group_id varchar, + action varchar, + data jsonb, + user_id varchar, + date timestamp without time zone, + automatic bool +); + +insert into operation_group_history +select +group_id, 'create' as action, +jsonb_build_object( + 'PackageId', package_id, + 'Version', version, + 'Revision', revision, + 'ApiType', api_type, + 'GroupName', group_name, + 'GroupId', group_id, + 'Description', coalesce(description, ''), + 'Autogenerated', coalesce(autogenerated, false), + 'TemplateChecksum', coalesce(template_checksum, ''), + 'TemplateFilename', coalesce(template_filename, '')) as data, +'unknown' as user_id, now() as date, true as automatic +from operation_group where autogenerated = false; diff --git a/qubership-apihub-service/resources/migrations/131_gitlab_webhook.down.sql b/qubership-apihub-service/resources/migrations/131_gitlab_webhook.down.sql new file mode 100644 index 0000000..6c17d7d --- /dev/null +++ b/qubership-apihub-service/resources/migrations/131_gitlab_webhook.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE project DROP COLUMN secret_token; +ALTER TABLE project DROP COLUMN secret_token_user_id; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/131_gitlab_webhook.up.sql b/qubership-apihub-service/resources/migrations/131_gitlab_webhook.up.sql new file mode 100644 index 0000000..3f089a8 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/131_gitlab_webhook.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE project ADD COLUMN secret_token VARCHAR; +ALTER TABLE project ADD COLUMN secret_token_user_id VARCHAR; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/132_operaion_data_cleanup.up.sql b/qubership-apihub-service/resources/migrations/132_operaion_data_cleanup.up.sql new file mode 100644 index 0000000..01b5584 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/132_operaion_data_cleanup.up.sql @@ -0,0 +1,7 @@ +alter table build_cleanup_run +add column if not exists build_result integer default 0, +add column if not exists build_src integer default 0, +add column if not exists operation_data integer default 0, +add column if not exists ts_operation_data integer default 0, +add column if not exists ts_rest_operation_data integer default 0, +add column if not exists ts_gql_operation_data integer default 0; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/132_operation_data_cleanup.down.sql b/qubership-apihub-service/resources/migrations/132_operation_data_cleanup.down.sql new file mode 100644 index 0000000..5cd9485 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/132_operation_data_cleanup.down.sql @@ -0,0 +1,7 @@ +alter table build_cleanup_run +drop column if exists build_result, +drop column if exists build_src, +drop column if exists operation_data, +drop column if exists ts_operation_data, +drop column if exists ts_rest_operation_data, +drop column if exists ts_gql_operation_data; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/133_editor_draft_redesign.down.sql b/qubership-apihub-service/resources/migrations/133_editor_draft_redesign.down.sql new file mode 100644 index 0000000..b55091d --- /dev/null +++ b/qubership-apihub-service/resources/migrations/133_editor_draft_redesign.down.sql @@ -0,0 +1,11 @@ +alter table drafted_branches drop column commit_id; + +alter table branch_draft_content +drop column blob_id, +drop column conflicted_blob_id; + +--next migration + +-- alter table branch_draft_content +-- add column commit_id varchar, +-- add column conflicted_commit_id varchar; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/133_editor_draft_redesign.up.sql b/qubership-apihub-service/resources/migrations/133_editor_draft_redesign.up.sql new file mode 100644 index 0000000..fecadec --- /dev/null +++ b/qubership-apihub-service/resources/migrations/133_editor_draft_redesign.up.sql @@ -0,0 +1,17 @@ +alter table drafted_branches +add column commit_id varchar; + +alter table branch_draft_content +add column blob_id varchar, +add column conflicted_blob_id varchar; + +--set blob_it=commit_id for some draft files because it will be impossible to calculate blob_id for them via soft migration +update branch_draft_content set blob_id = commit_id +where coalesce(commit_id, '') != '' +and data is null; + +--next migration + +-- alter table branch_draft_content +-- drop column commit_id, +-- drop column conflicted_commit_id; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/134_migration_validation_overview.down.sql b/qubership-apihub-service/resources/migrations/134_migration_validation_overview.down.sql new file mode 100644 index 0000000..9a1d56f --- /dev/null +++ b/qubership-apihub-service/resources/migrations/134_migration_validation_overview.down.sql @@ -0,0 +1,2 @@ +alter table migrated_version_changes drop column unique_changes; +drop table migration_changes; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/134_migration_validation_overview.up.sql b/qubership-apihub-service/resources/migrations/134_migration_validation_overview.up.sql new file mode 100644 index 0000000..089b5be --- /dev/null +++ b/qubership-apihub-service/resources/migrations/134_migration_validation_overview.up.sql @@ -0,0 +1,5 @@ +alter table migrated_version_changes add column unique_changes varchar[]; +create table migration_changes( + migration_id varchar, + changes jsonb, + PRIMARY KEY(migration_id)); \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/135_api_key_id.down.sql b/qubership-apihub-service/resources/migrations/135_api_key_id.down.sql new file mode 100644 index 0000000..a39d905 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/135_api_key_id.down.sql @@ -0,0 +1,2 @@ +update apihub_api_keys +set id = substring(id from 9); \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/135_api_key_id.up.sql b/qubership-apihub-service/resources/migrations/135_api_key_id.up.sql new file mode 100644 index 0000000..4fdd264 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/135_api_key_id.up.sql @@ -0,0 +1,2 @@ +update apihub_api_keys +set id = 'api-key_' || id; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/136_latest_revision_func_fix.down.sql b/qubership-apihub-service/resources/migrations/136_latest_revision_func_fix.down.sql new file mode 100644 index 0000000..f7818bb --- /dev/null +++ b/qubership-apihub-service/resources/migrations/136_latest_revision_func_fix.down.sql @@ -0,0 +1,16 @@ +create or replace function get_latest_revision(package_id varchar, version varchar) + returns integer language plpgsql +as ' + declare + latest_revision integer; + begin + execute '' + select max(revision) + from published_version + where package_id = $1 and version = $2 and deleted_at is null;'' + into latest_revision + using package_id,version; + if latest_revision is null then return 0; + end if; + return latest_revision; + end;'; diff --git a/qubership-apihub-service/resources/migrations/136_latest_revision_func_fix.up.sql b/qubership-apihub-service/resources/migrations/136_latest_revision_func_fix.up.sql new file mode 100644 index 0000000..0684aef --- /dev/null +++ b/qubership-apihub-service/resources/migrations/136_latest_revision_func_fix.up.sql @@ -0,0 +1,16 @@ +create or replace function get_latest_revision(package_id varchar, version varchar) + returns integer language plpgsql +as ' + declare + latest_revision integer; + begin + execute '' + select max(revision) + from published_version + where package_id = $1 and version = $2;'' + into latest_revision + using package_id,version; + if latest_revision is null then return 0; + end if; + return latest_revision; + end;'; diff --git a/qubership-apihub-service/resources/migrations/137_index_for_op_comparison.down.sql b/qubership-apihub-service/resources/migrations/137_index_for_op_comparison.down.sql new file mode 100644 index 0000000..13410c9 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/137_index_for_op_comparison.down.sql @@ -0,0 +1 @@ +drop index operation_comparison_comparison_id_index; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/137_index_for_op_comparison.up.sql b/qubership-apihub-service/resources/migrations/137_index_for_op_comparison.up.sql new file mode 100644 index 0000000..a699ed4 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/137_index_for_op_comparison.up.sql @@ -0,0 +1,2 @@ +create index operation_comparison_comparison_id_index + on operation_comparison (comparison_id); \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/138_agent_version.down.sql b/qubership-apihub-service/resources/migrations/138_agent_version.down.sql new file mode 100644 index 0000000..244bfe5 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/138_agent_version.down.sql @@ -0,0 +1 @@ +alter table agent drop column if exists agent_version; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/138_agent_version.up.sql b/qubership-apihub-service/resources/migrations/138_agent_version.up.sql new file mode 100644 index 0000000..26892f1 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/138_agent_version.up.sql @@ -0,0 +1 @@ +alter table agent add column if not exists agent_version varchar; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/139_search_tables_cleanup.down.sql b/qubership-apihub-service/resources/migrations/139_search_tables_cleanup.down.sql new file mode 100644 index 0000000..d06aa01 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/139_search_tables_cleanup.down.sql @@ -0,0 +1,44 @@ +CREATE TABLE ts_published_data_path_split +( + package_id varchar NOT NULL, + checksum varchar NOT NULL, + search_vector tsvector, + unique(package_id, checksum) +); + +CREATE TABLE ts_published_data_custom_split +( + package_id varchar NOT NULL, + checksum varchar NOT NULL, + search_vector tsvector, + unique(package_id, checksum) +); + +CREATE TABLE ts_published_data_errors +( + package_id varchar NOT NULL, + checksum varchar NOT NULL, + error varchar, + unique(package_id, checksum) +); + +alter table ts_published_data_errors + add constraint ts_published_data_errors_package_group_id_fk + foreign key (package_id) references package_group + on update cascade on delete cascade; + +alter table ts_published_data_custom_split + add constraint ts_published_data_custom_split_package_group_id_fk + foreign key (package_id) references package_group + on update cascade on delete cascade; + + +CREATE INDEX ts_published_data_path_split_idx +ON ts_published_data_path_split +USING gin(search_vector) +with (fastupdate = true); + +CREATE INDEX ts_published_data_custom_split_idx +ON ts_published_data_custom_split +USING gin(search_vector) +with (fastupdate = true); diff --git a/qubership-apihub-service/resources/migrations/139_search_tables_cleanup.up.sql b/qubership-apihub-service/resources/migrations/139_search_tables_cleanup.up.sql new file mode 100644 index 0000000..40c5ece --- /dev/null +++ b/qubership-apihub-service/resources/migrations/139_search_tables_cleanup.up.sql @@ -0,0 +1,5 @@ +drop table ts_published_data_path_split; +drop table ts_published_data_custom_split; +drop table ts_published_data_errors; +DROP INDEX IF EXISTS ts_published_data_path_split_idx; +DROP INDEX IF EXISTS ts_published_data_custom_split_idx; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/13_branch_editors.down.sql b/qubership-apihub-service/resources/migrations/13_branch_editors.down.sql new file mode 100644 index 0000000..6311b8c --- /dev/null +++ b/qubership-apihub-service/resources/migrations/13_branch_editors.down.sql @@ -0,0 +1 @@ +DROP TABLE IF EXISTS branch_editors; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/13_branch_editors.up.sql b/qubership-apihub-service/resources/migrations/13_branch_editors.up.sql new file mode 100644 index 0000000..5b88aa6 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/13_branch_editors.up.sql @@ -0,0 +1,7 @@ +CREATE TABLE branch_editors +( + project_id varchar NOT NULL, + branch_name varchar NOT NULL, + editors varchar[], + PRIMARY KEY (project_id, branch_name) +); diff --git a/qubership-apihub-service/resources/migrations/140_audience.down.sql b/qubership-apihub-service/resources/migrations/140_audience.down.sql new file mode 100644 index 0000000..d499af2 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/140_audience.down.sql @@ -0,0 +1 @@ +alter table operation drop column api_audience; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/140_audience.up.sql b/qubership-apihub-service/resources/migrations/140_audience.up.sql new file mode 100644 index 0000000..94d57b2 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/140_audience.up.sql @@ -0,0 +1 @@ +alter table operation add column api_audience varchar default 'external'; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/141_group_publish.down.sql b/qubership-apihub-service/resources/migrations/141_group_publish.down.sql new file mode 100644 index 0000000..fe0d83e --- /dev/null +++ b/qubership-apihub-service/resources/migrations/141_group_publish.down.sql @@ -0,0 +1 @@ +drop table operation_group_publication; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/141_group_publish.up.sql b/qubership-apihub-service/resources/migrations/141_group_publish.up.sql new file mode 100644 index 0000000..e1c56b5 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/141_group_publish.up.sql @@ -0,0 +1,6 @@ +create table operation_group_publication( + publish_id varchar, + status varchar, + details varchar, + PRIMARY KEY(publish_id) +); \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/142_metrics_fix.up.sql b/qubership-apihub-service/resources/migrations/142_metrics_fix.up.sql new file mode 100644 index 0000000..add4fc2 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/142_metrics_fix.up.sql @@ -0,0 +1,7 @@ +with migrations as ( + select distinct started_at::date, updated_at::date from migration_run + where started_at is not null and updated_at is not null +) +delete from business_metric using migrations m +where to_date(year || '-' || month || '-' || day, 'YYYY-MM-DD') between m.started_at and m.updated_at +and metric = 'release_versions_published'; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/143_clean_deleted_previous_version_ref.up.sql b/qubership-apihub-service/resources/migrations/143_clean_deleted_previous_version_ref.up.sql new file mode 100644 index 0000000..72f4349 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/143_clean_deleted_previous_version_ref.up.sql @@ -0,0 +1,31 @@ +update published_version pv +set previous_version = null, previous_version_package_id = null +from ( + with deleted_versions as ( + select d.deleted_version as "version", d.dv_package_id as package_id from ( + with dv as (select distinct pv2."version" as deleted_version, pv2.package_id as dv_package_id from published_version pv2 where pv2.deleted_at is not null), + ndv as (select distinct pv2."version" as not_deleted_version, pv2.package_id as ndv_package_id from published_version pv2 where pv2.deleted_at is null) + select * from dv + left join ndv + on dv.deleted_version = ndv.not_deleted_version and dv.dv_package_id = ndv.ndv_package_id + ) as d + where d.not_deleted_version is null and d.ndv_package_id is null) + select pv.package_id, pv."version", revision from published_version pv + join deleted_versions on + pv.previous_version = deleted_versions."version" + and (pv.previous_version_package_id = deleted_versions.package_id + or + (pv.package_id = deleted_versions.package_id and (pv.previous_version_package_id = '' or pv.previous_version_package_id is null))) + where pv.deleted_at is null +) d +where pv.package_id = d.package_id and pv."version" = d."version" and pv.revision = d.revision; + +update published_version pv +set previous_version = null, previous_version_package_id = null +from ( + select package_id, "version", revision from published_version pv + where pv."version" = pv.previous_version + and deleted_at is null + and (pv.package_id = pv.previous_version_package_id or pv.previous_version_package_id is null or pv.previous_version_package_id = '') +) eqv +where pv.package_id = eqv.package_id and pv."version" = eqv."version" and pv.revision = eqv.revision; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/144_deprecated_info_to_string.down.sql b/qubership-apihub-service/resources/migrations/144_deprecated_info_to_string.down.sql new file mode 100644 index 0000000..07588bb --- /dev/null +++ b/qubership-apihub-service/resources/migrations/144_deprecated_info_to_string.down.sql @@ -0,0 +1,2 @@ +alter table operation drop column if exists deprecated_info; +alter table operation add column if not exists deprecated_info jsonb; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/144_deprecated_info_to_string.up.sql b/qubership-apihub-service/resources/migrations/144_deprecated_info_to_string.up.sql new file mode 100644 index 0000000..993dd58 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/144_deprecated_info_to_string.up.sql @@ -0,0 +1,2 @@ +alter table operation drop column if exists deprecated_info; +alter table operation add column if not exists deprecated_info varchar; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/145_csv_publish.down.sql b/qubership-apihub-service/resources/migrations/145_csv_publish.down.sql new file mode 100644 index 0000000..9593e82 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/145_csv_publish.down.sql @@ -0,0 +1 @@ +drop table csv_dashboard_publication; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/145_csv_publish.up.sql b/qubership-apihub-service/resources/migrations/145_csv_publish.up.sql new file mode 100644 index 0000000..e42162e --- /dev/null +++ b/qubership-apihub-service/resources/migrations/145_csv_publish.up.sql @@ -0,0 +1,7 @@ +create table csv_dashboard_publication( + publish_id varchar, + status varchar, + message varchar, + csv_report bytea, + PRIMARY KEY(publish_id) +); \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/14_fix_published_path.down.sql b/qubership-apihub-service/resources/migrations/14_fix_published_path.down.sql new file mode 100644 index 0000000..1dacdcd --- /dev/null +++ b/qubership-apihub-service/resources/migrations/14_fix_published_path.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE published_version_revision_content +ALTER COLUMN path SET NOT NULL; diff --git a/qubership-apihub-service/resources/migrations/14_fix_published_path.up.sql b/qubership-apihub-service/resources/migrations/14_fix_published_path.up.sql new file mode 100644 index 0000000..965eed3 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/14_fix_published_path.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE published_version_revision_content +ALTER COLUMN path DROP NOT NULL; diff --git a/qubership-apihub-service/resources/migrations/15_branch_operations_draft_reset.down.sql b/qubership-apihub-service/resources/migrations/15_branch_operations_draft_reset.down.sql new file mode 100644 index 0000000..b65d44b --- /dev/null +++ b/qubership-apihub-service/resources/migrations/15_branch_operations_draft_reset.down.sql @@ -0,0 +1,10 @@ +ALTER TABLE branch_draft_content +RENAME COLUMN status TO action; + +ALTER TABLE branch_draft_content +DROP COLUMN last_status, +DROP COLUMN conflicted_commit_id, +DROP COLUMN conflicted_file_id; + +ALTER TABLE branch_draft_reference +RENAME COLUMN status TO action; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/15_branch_operations_draft_reset.up.sql b/qubership-apihub-service/resources/migrations/15_branch_operations_draft_reset.up.sql new file mode 100644 index 0000000..cdb1772 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/15_branch_operations_draft_reset.up.sql @@ -0,0 +1,13 @@ +truncate table branch_draft_content; +truncate table branch_draft_reference; + +ALTER TABLE branch_draft_content +RENAME COLUMN action TO status; + +ALTER TABLE branch_draft_content +ADD COLUMN last_status varchar, +ADD COLUMN conflicted_commit_id varchar, +ADD COLUMN conflicted_file_id varchar; + +ALTER TABLE branch_draft_reference +RENAME COLUMN action TO status; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/16_nullable_content_type.down.sql b/qubership-apihub-service/resources/migrations/16_nullable_content_type.down.sql new file mode 100644 index 0000000..b228927 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/16_nullable_content_type.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE branch_draft_content +ALTER COLUMN data_type SET NOT NULL; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/16_nullable_content_type.up.sql b/qubership-apihub-service/resources/migrations/16_nullable_content_type.up.sql new file mode 100644 index 0000000..bb52336 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/16_nullable_content_type.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE branch_draft_content +ALTER COLUMN data_type DROP NOT NULL; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/17_published_src.down.sql b/qubership-apihub-service/resources/migrations/17_published_src.down.sql new file mode 100644 index 0000000..151383b --- /dev/null +++ b/qubership-apihub-service/resources/migrations/17_published_src.down.sql @@ -0,0 +1,3 @@ +DROP TABLE IF EXISTS published_sources CASCADE; + +DROP TABLE IF EXISTS published_sources_data CASCADE; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/17_published_src.up.sql b/qubership-apihub-service/resources/migrations/17_published_src.up.sql new file mode 100644 index 0000000..ec8c94d --- /dev/null +++ b/qubership-apihub-service/resources/migrations/17_published_src.up.sql @@ -0,0 +1,25 @@ +CREATE TABLE published_sources +( + project_id varchar NOT NULL, + version varchar NOT NULL, + revision integer NOT NULL, + checksum varchar NOT NULL +); + +CREATE TABLE published_sources_data +( + project_id varchar NOT NULL, + checksum varchar NOT NULL, + data bytea NOT NULL +); + +ALTER TABLE published_sources_data ADD CONSTRAINT "PK_published_sources_data" + PRIMARY KEY (checksum,project_id); + +ALTER TABLE published_sources ADD CONSTRAINT "FK_published_sources_data" + FOREIGN KEY (checksum,project_id) REFERENCES published_sources_data (checksum,project_id) ON DELETE Restrict ON UPDATE Cascade; + +ALTER TABLE published_sources ADD CONSTRAINT "FK_published_sources_version_revision" + FOREIGN KEY (project_id,version,revision) REFERENCES published_version (project_id,version,revision) ON DELETE Restrict ON UPDATE Cascade; + + diff --git a/qubership-apihub-service/resources/migrations/18_drop_branch_editors.down.sql b/qubership-apihub-service/resources/migrations/18_drop_branch_editors.down.sql new file mode 100644 index 0000000..5b88aa6 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/18_drop_branch_editors.down.sql @@ -0,0 +1,7 @@ +CREATE TABLE branch_editors +( + project_id varchar NOT NULL, + branch_name varchar NOT NULL, + editors varchar[], + PRIMARY KEY (project_id, branch_name) +); diff --git a/qubership-apihub-service/resources/migrations/18_drop_branch_editors.up.sql b/qubership-apihub-service/resources/migrations/18_drop_branch_editors.up.sql new file mode 100644 index 0000000..6311b8c --- /dev/null +++ b/qubership-apihub-service/resources/migrations/18_drop_branch_editors.up.sql @@ -0,0 +1 @@ +DROP TABLE IF EXISTS branch_editors; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/19_drafted_branch_draft_reset.down.sql b/qubership-apihub-service/resources/migrations/19_drafted_branch_draft_reset.down.sql new file mode 100644 index 0000000..0fba441 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/19_drafted_branch_draft_reset.down.sql @@ -0,0 +1 @@ +DROP TABLE IF EXISTS drafted_branches; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/19_drafted_branch_draft_reset.up.sql b/qubership-apihub-service/resources/migrations/19_drafted_branch_draft_reset.up.sql new file mode 100644 index 0000000..4e50d30 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/19_drafted_branch_draft_reset.up.sql @@ -0,0 +1,12 @@ +truncate table branch_draft_content; +truncate table branch_draft_reference; + +CREATE TABLE drafted_branches +( + project_id varchar NOT NULL, + branch_name varchar NOT NULL, + change_type varchar, + original_config bytea, + editors varchar[], + PRIMARY KEY (project_id, branch_name) +); diff --git a/qubership-apihub-service/resources/migrations/1_init.down.sql b/qubership-apihub-service/resources/migrations/1_init.down.sql new file mode 100644 index 0000000..7908a9b --- /dev/null +++ b/qubership-apihub-service/resources/migrations/1_init.down.sql @@ -0,0 +1,26 @@ +DROP TABLE IF EXISTS branch_draft_content CASCADE +; + +DROP TABLE IF EXISTS branch_draft_reference CASCADE +; + +DROP TABLE IF EXISTS project CASCADE +; + +DROP TABLE IF EXISTS published_data CASCADE +; + +DROP TABLE IF EXISTS published_version CASCADE +; + +DROP TABLE IF EXISTS published_version_reference CASCADE +; + +DROP TABLE IF EXISTS published_version_revision_content CASCADE +; + +DROP TABLE IF EXISTS user_data CASCADE +; + +DROP TABLE IF EXISTS user_integration CASCADE +; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/1_init.up.sql b/qubership-apihub-service/resources/migrations/1_init.up.sql new file mode 100644 index 0000000..794d034 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/1_init.up.sql @@ -0,0 +1,270 @@ +/* ---------------------------------------------------- */ +/* Generated by Enterprise Architect Version 14.1 */ +/* Created On : 28-Apr-2022 11:55:35 */ +/* DBMS : PostgreSQL */ +/* ---------------------------------------------------- */ + +/* Drop Tables */ + +DROP TABLE IF EXISTS branch_draft_content CASCADE +; + +DROP TABLE IF EXISTS branch_draft_reference CASCADE +; + +DROP TABLE IF EXISTS project CASCADE +; + +DROP TABLE IF EXISTS published_data CASCADE +; + +DROP TABLE IF EXISTS published_version CASCADE +; + +DROP TABLE IF EXISTS published_version_reference CASCADE +; + +DROP TABLE IF EXISTS published_version_revision_content CASCADE +; + +DROP TABLE IF EXISTS user_data CASCADE +; + +DROP TABLE IF EXISTS user_integration CASCADE +; + +/* Create Tables */ + +CREATE TABLE branch_draft_content +( + project_id varchar NOT NULL, + branch_name varchar NOT NULL, + index integer NOT NULL DEFAULT 0, + name varchar NOT NULL, + file_id varchar NOT NULL, + path varchar NOT NULL, + data_type varchar NOT NULL, -- OpenAPI / Swagger / MD + data bytea, + media_type varchar NOT NULL, -- HTTP media-type + is_updated boolean NOT NULL +) +; + +CREATE TABLE branch_draft_reference +( + project_id varchar NOT NULL, + branch_name varchar NOT NULL, + reference_project_id varchar NOT NULL, + reference_version varchar NOT NULL, + relation_type varchar NOT NULL -- INCLUDE / DEPEND +) +; + +CREATE TABLE project +( + id varchar NOT NULL, + kind varchar NOT NULL, -- PROJECT / GROUP + name varchar NOT NULL, + alias varchar NOT NULL, + parent_id varchar NULL, -- Only for the GROUP kind + image_url varchar NULL, + description text NULL, + integration_type varchar NULL, -- GitLab / Local storage + default_branch varchar NULL, + default_folder varchar NULL, + repository_id varchar NULL, + delete_date timestamp without time zone NULL +) +; + +CREATE TABLE published_data +( + project_id varchar NOT NULL, + checksum varchar NOT NULL, + media_type varchar NOT NULL, -- HTTP media-type + data bytea NOT NULL, + commit_id varchar NOT NULL, + commit_date timestamp without time zone NOT NULL +) +; + +CREATE TABLE published_version +( + project_id varchar NOT NULL, + version varchar NOT NULL, + revision integer NOT NULL, + status varchar NOT NULL, -- DRAFT / APPROVED / RELEASED / ARCHIVE + publish_date timestamp without time zone NOT NULL, + delete_date timestamp without time zone NULL, + branch_name varchar NOT NULL +) +; + +CREATE TABLE published_version_reference +( + project_id varchar NOT NULL, + version varchar NOT NULL, + revision integer NOT NULL, + reference_id varchar NOT NULL, + reference_version varchar NOT NULL, + relation_type varchar NOT NULL -- INCLUDE / DEPEND +) +; + +CREATE TABLE published_version_revision_content +( + project_id varchar NOT NULL, + version varchar NOT NULL, + revision integer NOT NULL, + checksum varchar NOT NULL, + index integer NOT NULL DEFAULT 0, + file_id varchar NOT NULL, + path varchar NOT NULL, + slug varchar NOT NULL, + data_type varchar NOT NULL, -- OpenAPI / Swagger / MD + name varchar NOT NULL, + description text NULL +) +; + +CREATE TABLE user_data +( + user_id varchar NOT NULL, + email varchar NULL, + name varchar NULL, + avatar_url varchar NULL +) +; + +CREATE TABLE user_integration +( + user_id varchar NOT NULL, + integration_type varchar NOT NULL, -- GitLab + key text NULL +) +; + +CREATE TABLE favorite_projects +( + user_id varchar NOT NULL, + project_id varchar NOT NULL +) +; + +/* Create Primary Keys, Indexes, Uniques, Checks */ + +ALTER TABLE branch_draft_content ADD CONSTRAINT "PK_branch_draft_content" + PRIMARY KEY (project_id,branch_name,file_id) +; + +ALTER TABLE branch_draft_reference ADD CONSTRAINT "PK_branch_draft_reference" + PRIMARY KEY (branch_name,project_id, reference_project_id, reference_version, relation_type) +; + +ALTER TABLE project ADD CONSTRAINT "PK_project" + PRIMARY KEY (id) +; + +ALTER TABLE published_data ADD CONSTRAINT "PK_published_data" + PRIMARY KEY (checksum,project_id) +; + +ALTER TABLE published_version ADD CONSTRAINT "PK_published_version" + PRIMARY KEY (project_id,version,revision) +; + +ALTER TABLE published_version_reference ADD CONSTRAINT "PK_published_version_reference" + PRIMARY KEY (project_id,version,revision,reference_id,relation_type,reference_version) +; + +ALTER TABLE user_data ADD CONSTRAINT "PK_user_data" + PRIMARY KEY (user_id) +; + +ALTER TABLE user_integration ADD CONSTRAINT "PK_user_integration" + PRIMARY KEY (user_id, integration_type) +; + +ALTER TABLE favorite_projects ADD CONSTRAINT "PK_favorite_projects" + PRIMARY KEY (user_id, project_id) +; + +/* Create Foreign Key Constraints */ + +ALTER TABLE branch_draft_content ADD CONSTRAINT "FK_project" + FOREIGN KEY (project_id) REFERENCES project (id) ON DELETE Restrict ON UPDATE Cascade +; + +ALTER TABLE branch_draft_reference ADD CONSTRAINT "FK_project" + FOREIGN KEY (project_id) REFERENCES project (id) ON DELETE Restrict ON UPDATE Cascade +; + +ALTER TABLE project ADD CONSTRAINT "FK_parent_project_id" + FOREIGN KEY (parent_id) REFERENCES project (id) ON DELETE Restrict ON UPDATE Cascade +; + +ALTER TABLE published_version ADD CONSTRAINT "FK_project" + FOREIGN KEY (project_id) REFERENCES project (id) ON DELETE Restrict ON UPDATE Cascade +; + +ALTER TABLE published_version_reference ADD CONSTRAINT "FK_published_version" + FOREIGN KEY (project_id,version,revision) REFERENCES published_version (project_id,version,revision) ON DELETE Restrict ON UPDATE Cascade +; + +ALTER TABLE published_version_revision_content ADD CONSTRAINT "FK_published_data" + FOREIGN KEY (checksum,project_id) REFERENCES published_data (checksum,project_id) ON DELETE Restrict ON UPDATE Cascade +; + +ALTER TABLE published_version_revision_content ADD CONSTRAINT "FK_published_version_revision" + FOREIGN KEY (project_id,version,revision) REFERENCES published_version (project_id,version,revision) ON DELETE Restrict ON UPDATE Cascade +; + +/*ALTER TABLE user_integration ADD CONSTRAINT "FK_user_data" + FOREIGN KEY (user_id) REFERENCES user_data (user_id) ON DELETE Restrict ON UPDATE Cascade +;*/ + +/* Create Table Comments, Sequences for Autonumber Columns */ + +COMMENT ON COLUMN branch_draft_content.data_type + IS 'OpenAPI / Swagger / MD' +; + +COMMENT ON COLUMN branch_draft_content.media_type + IS 'HTTP media-type' +; + +COMMENT ON COLUMN branch_draft_reference.relation_type + IS 'INCLUDE / DEPEND' +; + +COMMENT ON COLUMN project.kind + IS 'PROJECT / GROUP' +; + +COMMENT ON COLUMN project.parent_id + IS 'Only for the GROUP kind' +; + +COMMENT ON COLUMN project.integration_type + IS 'GitLab / Local storage' +; + +COMMENT ON COLUMN published_data.media_type + IS 'HTTP media-type' +; + +COMMENT ON COLUMN published_version.status + IS 'DRAFT / APPROVED / RELEASED / ARCHIVE' +; + +COMMENT ON COLUMN published_version_reference.relation_type + IS 'INCLUDE / DEPEND' +; + +COMMENT ON COLUMN published_version_revision_content.data_type + IS 'OpenAPI / Swagger / MD' +; + +COMMENT ON COLUMN user_integration.integration_type + IS 'GitLab' +; diff --git a/qubership-apihub-service/resources/migrations/20_drafted_branch_constraint.down.sql b/qubership-apihub-service/resources/migrations/20_drafted_branch_constraint.down.sql new file mode 100644 index 0000000..4f7958d --- /dev/null +++ b/qubership-apihub-service/resources/migrations/20_drafted_branch_constraint.down.sql @@ -0,0 +1 @@ +ALTER TABLE drafted_branches DROP CONSTRAINT "FK_project"; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/20_drafted_branch_constraint.up.sql b/qubership-apihub-service/resources/migrations/20_drafted_branch_constraint.up.sql new file mode 100644 index 0000000..4be00cc --- /dev/null +++ b/qubership-apihub-service/resources/migrations/20_drafted_branch_constraint.up.sql @@ -0,0 +1,3 @@ +ALTER TABLE drafted_branches ADD CONSTRAINT "FK_project" + FOREIGN KEY (project_id) REFERENCES project (id) ON DELETE Cascade ON UPDATE Cascade +; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/21_cascade_deletion.down.sql b/qubership-apihub-service/resources/migrations/21_cascade_deletion.down.sql new file mode 100644 index 0000000..270619c --- /dev/null +++ b/qubership-apihub-service/resources/migrations/21_cascade_deletion.down.sql @@ -0,0 +1,43 @@ + +ALTER TABLE branch_draft_content DROP CONSTRAINT "FK_project"; +ALTER TABLE branch_draft_content ADD CONSTRAINT "FK_project" + FOREIGN KEY (project_id) REFERENCES project (id) ON DELETE Restrict ON UPDATE Cascade +; + +ALTER TABLE branch_draft_reference DROP CONSTRAINT "FK_project"; +ALTER TABLE branch_draft_reference ADD CONSTRAINT "FK_project" + FOREIGN KEY (project_id) REFERENCES project (id) ON DELETE Restrict ON UPDATE Cascade +; + +ALTER TABLE project DROP CONSTRAINT "FK_parent_project_id"; +ALTER TABLE project ADD CONSTRAINT "FK_parent_project_id" + FOREIGN KEY (parent_id) REFERENCES project (id) ON DELETE Restrict ON UPDATE Cascade +; + +ALTER TABLE published_version DROP CONSTRAINT "FK_project"; +ALTER TABLE published_version ADD CONSTRAINT "FK_project" + FOREIGN KEY (project_id) REFERENCES project (id) ON DELETE Restrict ON UPDATE Cascade +; + +ALTER TABLE published_version_reference DROP CONSTRAINT "FK_published_version"; +ALTER TABLE published_version_reference ADD CONSTRAINT "FK_published_version" + FOREIGN KEY (project_id,version,revision) REFERENCES published_version (project_id,version,revision) ON DELETE Restrict ON UPDATE Cascade +; + +ALTER TABLE published_version_revision_content DROP CONSTRAINT "FK_published_data"; +ALTER TABLE published_version_revision_content ADD CONSTRAINT "FK_published_data" + FOREIGN KEY (checksum,project_id) REFERENCES published_data (checksum,project_id) ON DELETE Restrict ON UPDATE Cascade +; + +ALTER TABLE published_version_revision_content DROP CONSTRAINT "FK_published_version_revision"; +ALTER TABLE published_version_revision_content ADD CONSTRAINT "FK_published_version_revision" + FOREIGN KEY (project_id,version,revision) REFERENCES published_version (project_id,version,revision) ON DELETE Restrict ON UPDATE Cascade +; + +ALTER TABLE published_sources DROP CONSTRAINT "FK_published_sources_data"; +ALTER TABLE published_sources ADD CONSTRAINT "FK_published_sources_data" + FOREIGN KEY (checksum,project_id) REFERENCES published_sources_data (checksum,project_id) ON DELETE Restrict ON UPDATE Cascade; + +ALTER TABLE published_sources DROP CONSTRAINT "FK_published_sources_version_revision"; +ALTER TABLE published_sources ADD CONSTRAINT "FK_published_sources_version_revision" + FOREIGN KEY (project_id,version,revision) REFERENCES published_version (project_id,version,revision) ON DELETE Restrict ON UPDATE Cascade; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/21_cascade_deletion.up.sql b/qubership-apihub-service/resources/migrations/21_cascade_deletion.up.sql new file mode 100644 index 0000000..dee9e1f --- /dev/null +++ b/qubership-apihub-service/resources/migrations/21_cascade_deletion.up.sql @@ -0,0 +1,43 @@ + +ALTER TABLE branch_draft_content DROP CONSTRAINT "FK_project"; +ALTER TABLE branch_draft_content ADD CONSTRAINT "FK_project" + FOREIGN KEY (project_id) REFERENCES project (id) ON DELETE Cascade ON UPDATE Cascade +; + +ALTER TABLE branch_draft_reference DROP CONSTRAINT "FK_project"; +ALTER TABLE branch_draft_reference ADD CONSTRAINT "FK_project" + FOREIGN KEY (project_id) REFERENCES project (id) ON DELETE Cascade ON UPDATE Cascade +; + +ALTER TABLE project DROP CONSTRAINT "FK_parent_project_id"; +ALTER TABLE project ADD CONSTRAINT "FK_parent_project_id" + FOREIGN KEY (parent_id) REFERENCES project (id) ON DELETE Cascade ON UPDATE Cascade +; + +ALTER TABLE published_version DROP CONSTRAINT "FK_project"; +ALTER TABLE published_version ADD CONSTRAINT "FK_project" + FOREIGN KEY (project_id) REFERENCES project (id) ON DELETE Cascade ON UPDATE Cascade +; + +ALTER TABLE published_version_reference DROP CONSTRAINT "FK_published_version"; +ALTER TABLE published_version_reference ADD CONSTRAINT "FK_published_version" + FOREIGN KEY (project_id,version,revision) REFERENCES published_version (project_id,version,revision) ON DELETE Cascade ON UPDATE Cascade +; + +ALTER TABLE published_version_revision_content DROP CONSTRAINT "FK_published_data"; +ALTER TABLE published_version_revision_content ADD CONSTRAINT "FK_published_data" + FOREIGN KEY (checksum,project_id) REFERENCES published_data (checksum,project_id) ON DELETE Cascade ON UPDATE Cascade +; + +ALTER TABLE published_version_revision_content DROP CONSTRAINT "FK_published_version_revision"; +ALTER TABLE published_version_revision_content ADD CONSTRAINT "FK_published_version_revision" + FOREIGN KEY (project_id,version,revision) REFERENCES published_version (project_id,version,revision) ON DELETE Cascade ON UPDATE Cascade +; + +ALTER TABLE published_sources DROP CONSTRAINT "FK_published_sources_data"; +ALTER TABLE published_sources ADD CONSTRAINT "FK_published_sources_data" + FOREIGN KEY (checksum,project_id) REFERENCES published_sources_data (checksum,project_id) ON DELETE Cascade ON UPDATE Cascade; + +ALTER TABLE published_sources DROP CONSTRAINT "FK_published_sources_version_revision"; +ALTER TABLE published_sources ADD CONSTRAINT "FK_published_sources_version_revision" + FOREIGN KEY (project_id,version,revision) REFERENCES published_version (project_id,version,revision) ON DELETE Cascade ON UPDATE Cascade; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/22_previous_version.down.sql b/qubership-apihub-service/resources/migrations/22_previous_version.down.sql new file mode 100644 index 0000000..e09af1b --- /dev/null +++ b/qubership-apihub-service/resources/migrations/22_previous_version.down.sql @@ -0,0 +1 @@ +alter table published_version drop column previous_version; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/22_previous_version.up.sql b/qubership-apihub-service/resources/migrations/22_previous_version.up.sql new file mode 100644 index 0000000..5b14a73 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/22_previous_version.up.sql @@ -0,0 +1,2 @@ +alter table published_version +ADD COLUMN previous_version varchar; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/23_media_type_datafix.down.sql b/qubership-apihub-service/resources/migrations/23_media_type_datafix.down.sql new file mode 100644 index 0000000..cd3f056 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/23_media_type_datafix.down.sql @@ -0,0 +1,6 @@ +update published_data +set media_type = 'text/plain' +where media_type in ('application/zip', 'image/jpeg', 'image/png'); +update published_data +set media_type = 'application/json' +where checksum in (select checksum from published_version_revision_content where file_id ilike '%.json'); \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/23_media_type_datafix.up.sql b/qubership-apihub-service/resources/migrations/23_media_type_datafix.up.sql new file mode 100644 index 0000000..4ff3f86 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/23_media_type_datafix.up.sql @@ -0,0 +1,12 @@ +update published_data +set media_type = 'application/zip' +where checksum in (select checksum from published_version_revision_content where file_id ilike '%.docx'); +update published_data +set media_type = 'image/jpeg' +where checksum in (select checksum from published_version_revision_content where file_id ilike '%.jpg'); +update published_data +set media_type = 'image/png' +where checksum in (select checksum from published_version_revision_content where file_id ilike '%.png'); +update published_data +set media_type = 'text/plain' +where checksum in (select checksum from published_version_revision_content where file_id ilike '%.json'); \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/24_fill_versions_repository_url.down.sql b/qubership-apihub-service/resources/migrations/24_fill_versions_repository_url.down.sql new file mode 100644 index 0000000..4c01a9f --- /dev/null +++ b/qubership-apihub-service/resources/migrations/24_fill_versions_repository_url.down.sql @@ -0,0 +1,2 @@ +update published_version v +set metadata = metadata #- '{repository_url}'; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/24_fill_versions_repository_url.up.sql b/qubership-apihub-service/resources/migrations/24_fill_versions_repository_url.up.sql new file mode 100644 index 0000000..e274a28 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/24_fill_versions_repository_url.up.sql @@ -0,0 +1,4 @@ +update published_version v +set metadata = metadata || jsonb_build_object('repository_url', p.repository_url) +from project p +where p.id = v.project_id; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/25_package_group.up.sql b/qubership-apihub-service/resources/migrations/25_package_group.up.sql new file mode 100644 index 0000000..9748bd5 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/25_package_group.up.sql @@ -0,0 +1,40 @@ +DROP TABLE IF EXISTS package_group; +ALTER TABLE published_version DROP CONSTRAINT "FK_project"; +ALTER TABLE apihub_api_keys DROP CONSTRAINT "FK_project"; +ALTER TABLE project DROP CONSTRAINT "FK_parent_project_id"; + +CREATE TABLE package_group as SELECT * FROM project; + +delete from project where kind = 'group'; + +ALTER TABLE package_group ADD CONSTRAINT "PK_project_group" + PRIMARY KEY (id) +; + +UPDATE package_group SET kind = 'package' where kind = 'project'; + +ALTER TABLE package_group + DROP COLUMN repository_url, + DROP COLUMN repository_name, + DROP COLUMN repository_id, + DROP COLUMN integration_type, + DROP COLUMN default_branch, + DROP COLUMN default_folder; + +ALTER TABLE package_group + ADD COLUMN created_at timestamp without time zone, + ADD COLUMN created_by varchar, + ADD COLUMN deleted_by varchar; + +ALTER TABLE package_group + RENAME COLUMN delete_date to deleted_at; + +ALTER TABLE package_group ADD CONSTRAINT "FK_parent_package_group" + FOREIGN KEY (parent_id) REFERENCES package_group (id) ON DELETE Cascade ON UPDATE Cascade +; +ALTER TABLE published_version ADD CONSTRAINT "FK_package_group" + FOREIGN KEY (project_id) REFERENCES package_group (id) ON DELETE Cascade ON UPDATE Cascade +; +ALTER TABLE apihub_api_keys ADD CONSTRAINT "FK_package_group" + FOREIGN KEY (project_id) REFERENCES package_group (id) ON DELETE Cascade ON UPDATE Cascade +; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/26_naming_refactoring.down.sql b/qubership-apihub-service/resources/migrations/26_naming_refactoring.down.sql new file mode 100644 index 0000000..dd751a1 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/26_naming_refactoring.down.sql @@ -0,0 +1,47 @@ +ALTER TABLE project + ADD COLUMN kind varchar, + ADD COLUMN image_url varchar; + +ALTER TABLE project + DROP COLUMN deleted_by; + +ALTER TABLE project + RENAME COLUMN group_id to parent_id; +ALTER TABLE project + RENAME COLUMN deleted_at to delete_date; + +ALTER TABLE branch_draft_reference + RENAME COLUMN reference_package_id to reference_project_id; + +ALTER TABLE favorites RENAME to favorite_projects; + +ALTER TABLE favorite_projects + RENAME COLUMN id to project_id; + +ALTER TABLE published_version + RENAME COLUMN package_id to project_id; +ALTER TABLE published_version + RENAME COLUMN published_at to publish_date; +ALTER TABLE published_version + RENAME COLUMN deleted_at to delete_date; + +ALTER TABLE published_version_revision_content + RENAME COLUMN package_id to project_id; + +ALTER TABLE published_data + RENAME COLUMN package_id to project_id; + +ALTER TABLE published_version_reference + RENAME COLUMN package_id to project_id; + +ALTER TABLE shared_url_info + RENAME COLUMN package_id to project_id; + +ALTER TABLE published_sources + RENAME COLUMN package_id to project_id; + +ALTER TABLE published_sources_data + RENAME COLUMN package_id to project_id; + +ALTER TABLE apihub_api_keys + RENAME COLUMN package_id to project_id; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/26_naming_refactoring.up.sql b/qubership-apihub-service/resources/migrations/26_naming_refactoring.up.sql new file mode 100644 index 0000000..8c0cba4 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/26_naming_refactoring.up.sql @@ -0,0 +1,47 @@ +ALTER TABLE project + DROP COLUMN kind, + DROP COLUMN image_url; + +ALTER TABLE project + ADD COLUMN deleted_by varchar; + +ALTER TABLE project + RENAME COLUMN parent_id to group_id; +ALTER TABLE project + RENAME COLUMN delete_date to deleted_at; + +ALTER TABLE branch_draft_reference + RENAME COLUMN reference_project_id to reference_package_id; + +ALTER TABLE favorite_projects RENAME to favorites; + +ALTER TABLE favorites + RENAME COLUMN project_id to id; + +ALTER TABLE published_version + RENAME COLUMN project_id to package_id; +ALTER TABLE published_version + RENAME COLUMN publish_date to published_at; +ALTER TABLE published_version + RENAME COLUMN delete_date to deleted_at; + +ALTER TABLE published_version_revision_content + RENAME COLUMN project_id to package_id; + +ALTER TABLE published_data + RENAME COLUMN project_id to package_id; + +ALTER TABLE published_version_reference + RENAME COLUMN project_id to package_id; + +ALTER TABLE shared_url_info + RENAME COLUMN project_id to package_id; + +ALTER TABLE published_sources + RENAME COLUMN project_id to package_id; + +ALTER TABLE published_sources_data + RENAME COLUMN project_id to package_id; + +ALTER TABLE apihub_api_keys + RENAME COLUMN project_id to package_id; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/27_previous_package_id.down.sql b/qubership-apihub-service/resources/migrations/27_previous_package_id.down.sql new file mode 100644 index 0000000..2115572 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/27_previous_package_id.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE published_version + DROP COLUMN previous_version_package_id; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/27_previous_package_id.up.sql b/qubership-apihub-service/resources/migrations/27_previous_package_id.up.sql new file mode 100644 index 0000000..94af14e --- /dev/null +++ b/qubership-apihub-service/resources/migrations/27_previous_package_id.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE published_version + ADD COLUMN previous_version_package_id varchar; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/28_title_field.down.sql b/qubership-apihub-service/resources/migrations/28_title_field.down.sql new file mode 100644 index 0000000..d4d98b9 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/28_title_field.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE published_version_revision_content + DROP COLUMN title; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/28_title_field.up.sql b/qubership-apihub-service/resources/migrations/28_title_field.up.sql new file mode 100644 index 0000000..723b3bd --- /dev/null +++ b/qubership-apihub-service/resources/migrations/28_title_field.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE published_version_revision_content + ADD COLUMN title varchar; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/29_agent_config.down.sql b/qubership-apihub-service/resources/migrations/29_agent_config.down.sql new file mode 100644 index 0000000..94a537f --- /dev/null +++ b/qubership-apihub-service/resources/migrations/29_agent_config.down.sql @@ -0,0 +1 @@ +DROP TABLE IF EXISTS agent_config CASCADE; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/29_agent_config.up.sql b/qubership-apihub-service/resources/migrations/29_agent_config.up.sql new file mode 100644 index 0000000..17ab4f0 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/29_agent_config.up.sql @@ -0,0 +1,10 @@ +CREATE TABLE agent_config +( + cloud varchar NOT NULL, + namespace varchar NOT NULL, + config jsonb +); + +ALTER TABLE agent_config ADD CONSTRAINT "PK_agent_config" + PRIMARY KEY (cloud, namespace) +; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/2_projectsRepoAndUrl.down.sql b/qubership-apihub-service/resources/migrations/2_projectsRepoAndUrl.down.sql new file mode 100644 index 0000000..37e3c18 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/2_projectsRepoAndUrl.down.sql @@ -0,0 +1,3 @@ +ALTER TABLE project +DROP COLUMN repository_name, +DROP COLUMN repository_url; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/2_projectsRepoAndUrl.up.sql b/qubership-apihub-service/resources/migrations/2_projectsRepoAndUrl.up.sql new file mode 100644 index 0000000..e882dc1 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/2_projectsRepoAndUrl.up.sql @@ -0,0 +1,3 @@ +ALTER TABLE project +ADD COLUMN repository_name VARCHAR, +ADD COLUMN repository_url VARCHAR; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/30_published_fields.down.sql b/qubership-apihub-service/resources/migrations/30_published_fields.down.sql new file mode 100644 index 0000000..7988ecc --- /dev/null +++ b/qubership-apihub-service/resources/migrations/30_published_fields.down.sql @@ -0,0 +1,5 @@ +ALTER TABLE published_version_revision_content +DROP COLUMN format; + +ALTER TABLE published_version_revision_content +ADD COLUMN description text; diff --git a/qubership-apihub-service/resources/migrations/30_published_fields.up.sql b/qubership-apihub-service/resources/migrations/30_published_fields.up.sql new file mode 100644 index 0000000..b57dbff --- /dev/null +++ b/qubership-apihub-service/resources/migrations/30_published_fields.up.sql @@ -0,0 +1,17 @@ +ALTER TABLE published_version_revision_content +DROP COLUMN description; + +ALTER TABLE published_version_revision_content +ADD COLUMN format varchar; + +update published_version_revision_content +set format = 'json' where file_id ilike '%.json'; + +update published_version_revision_content +set format = 'yaml' where file_id ilike '%.yaml' or file_id ilike '%.yml'; + +update published_version_revision_content +set format = 'md' where file_id ilike '%.md' or file_id ilike '%.markdown'; + +update published_version_revision_content +set format = 'unknown' where format is null; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/31_unknown_type_datafix.up.sql b/qubership-apihub-service/resources/migrations/31_unknown_type_datafix.up.sql new file mode 100644 index 0000000..c1a7d32 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/31_unknown_type_datafix.up.sql @@ -0,0 +1,2 @@ +update published_version_revision_content +set data_type = 'unknown' where data_type = 'unknown-yaml' or data_type = 'unknown-json' or data_type = 'unknown-text'; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/32_version_folder.down.sql b/qubership-apihub-service/resources/migrations/32_version_folder.down.sql new file mode 100644 index 0000000..14a81b4 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/32_version_folder.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE published_version +DROP COLUMN folder; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/32_version_folder.up.sql b/qubership-apihub-service/resources/migrations/32_version_folder.up.sql new file mode 100644 index 0000000..2b1dabb --- /dev/null +++ b/qubership-apihub-service/resources/migrations/32_version_folder.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE published_version +ADD COLUMN folder varchar; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/33_included_draft_flag.down.sql b/qubership-apihub-service/resources/migrations/33_included_draft_flag.down.sql new file mode 100644 index 0000000..a2300b7 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/33_included_draft_flag.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE branch_draft_content +DROP COLUMN included; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/33_included_draft_flag.up.sql b/qubership-apihub-service/resources/migrations/33_included_draft_flag.up.sql new file mode 100644 index 0000000..9550b66 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/33_included_draft_flag.up.sql @@ -0,0 +1,4 @@ +ALTER TABLE branch_draft_content +ADD COLUMN included boolean NOT NULL default false; + +update branch_draft_content set included = true where status = 'included'; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/34_package_service_name.down.sql b/qubership-apihub-service/resources/migrations/34_package_service_name.down.sql new file mode 100644 index 0000000..eddce16 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/34_package_service_name.down.sql @@ -0,0 +1 @@ +DROP TABLE IF EXISTS package_service CASCADE; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/34_package_service_name.up.sql b/qubership-apihub-service/resources/migrations/34_package_service_name.up.sql new file mode 100644 index 0000000..2bd6c2e --- /dev/null +++ b/qubership-apihub-service/resources/migrations/34_package_service_name.up.sql @@ -0,0 +1,12 @@ +CREATE TABLE package_service +( + package_id varchar NOT NULL, + service_name varchar NOT NULL, + UNIQUE (service_name) +); + +ALTER TABLE package_service ADD CONSTRAINT "PK_package_service" + PRIMARY KEY (package_id, service_name); + +ALTER TABLE package_service ADD CONSTRAINT "FK_package_group" + FOREIGN KEY (package_id) REFERENCES package_group (id) ON DELETE Cascade ON UPDATE Cascade; diff --git a/qubership-apihub-service/resources/migrations/35_search_tables.down.sql b/qubership-apihub-service/resources/migrations/35_search_tables.down.sql new file mode 100644 index 0000000..55897e4 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/35_search_tables.down.sql @@ -0,0 +1,3 @@ +DROP TABLE IF EXISTS ts_published_data_path_split CASCADE; +DROP TABLE IF EXISTS ts_published_data_custom_split CASCADE; +DROP TABLE IF EXISTS ts_published_data_errors CASCADE; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/35_search_tables.up.sql b/qubership-apihub-service/resources/migrations/35_search_tables.up.sql new file mode 100644 index 0000000..165551f --- /dev/null +++ b/qubership-apihub-service/resources/migrations/35_search_tables.up.sql @@ -0,0 +1,23 @@ +CREATE TABLE ts_published_data_path_split +( + package_id varchar NOT NULL, + checksum varchar NOT NULL, + search_vector tsvector, + unique(package_id, checksum) +); + +CREATE TABLE ts_published_data_custom_split +( + package_id varchar NOT NULL, + checksum varchar NOT NULL, + search_vector tsvector, + unique(package_id, checksum) +); + +CREATE TABLE ts_published_data_errors +( + package_id varchar NOT NULL, + checksum varchar NOT NULL, + error varchar, + unique(package_id, checksum) +); \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/36_search_table_gin_index.down.sql b/qubership-apihub-service/resources/migrations/36_search_table_gin_index.down.sql new file mode 100644 index 0000000..792e886 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/36_search_table_gin_index.down.sql @@ -0,0 +1,2 @@ +DROP INDEX IF EXISTS ts_published_data_path_split_idx; +DROP INDEX IF EXISTS ts_published_data_custom_split_idx; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/36_search_table_gin_index.up.sql b/qubership-apihub-service/resources/migrations/36_search_table_gin_index.up.sql new file mode 100644 index 0000000..4fb3dd0 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/36_search_table_gin_index.up.sql @@ -0,0 +1,9 @@ +CREATE INDEX ts_published_data_path_split_idx +ON ts_published_data_path_split +USING gin(search_vector) +with (fastupdate = true); + +CREATE INDEX ts_published_data_custom_split_idx +ON ts_published_data_custom_split +USING gin(search_vector) +with (fastupdate = true); diff --git a/qubership-apihub-service/resources/migrations/37_build_tables.down.sql b/qubership-apihub-service/resources/migrations/37_build_tables.down.sql new file mode 100644 index 0000000..bf36eb6 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/37_build_tables.down.sql @@ -0,0 +1,3 @@ +DROP TABLE IF EXISTS build CASCADE; +DROP TABLE IF EXISTS build_src CASCADE; +DROP TABLE IF EXISTS build_depends CASCADE; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/37_build_tables.up.sql b/qubership-apihub-service/resources/migrations/37_build_tables.up.sql new file mode 100644 index 0000000..ebf9b6d --- /dev/null +++ b/qubership-apihub-service/resources/migrations/37_build_tables.up.sql @@ -0,0 +1,42 @@ +CREATE TABLE build +( + build_id varchar NOT NULL, + status varchar NOT NULL, + details varchar NULL, + package_id varchar NOT NULL, + version varchar NOT NULL, + created_at timestamp without time zone NOT NULL, + last_active timestamp without time zone NOT NULL, + created_by varchar NOT NULL, + restart_count integer +); +ALTER TABLE build ADD CONSTRAINT "PK_build" PRIMARY KEY (build_id); + + +CREATE TABLE build_src +( + build_id varchar NOT NULL, + source bytea NULL, + config jsonb NOT NULL +); + +ALTER TABLE build_src + ADD CONSTRAINT "FK_build_src" + FOREIGN KEY (build_id) REFERENCES build (build_id) ON DELETE Cascade ON UPDATE Cascade +; + + +CREATE TABLE build_depends +( + build_id varchar NOT NULL, + depend_id varchar NOT NULL +); + +ALTER TABLE build_depends + ADD CONSTRAINT "FK_build_depends_id" + FOREIGN KEY (build_id) REFERENCES build (build_id) ON DELETE Cascade ON UPDATE Cascade +; +ALTER TABLE build_depends + ADD CONSTRAINT "FK_build_depends_depend" + FOREIGN KEY (depend_id) REFERENCES build (build_id) ON DELETE Cascade ON UPDATE Cascade +; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/38_version_changelog.down.sql b/qubership-apihub-service/resources/migrations/38_version_changelog.down.sql new file mode 100644 index 0000000..258bbca --- /dev/null +++ b/qubership-apihub-service/resources/migrations/38_version_changelog.down.sql @@ -0,0 +1 @@ +DROP TABLE IF EXISTS published_version_validation; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/38_version_changelog.up.sql b/qubership-apihub-service/resources/migrations/38_version_changelog.up.sql new file mode 100644 index 0000000..ed5c1bf --- /dev/null +++ b/qubership-apihub-service/resources/migrations/38_version_changelog.up.sql @@ -0,0 +1,15 @@ +CREATE TABLE published_version_validation +( + package_id varchar NOT NULL, + version varchar NOT NULL, + revision integer NOT NULL, + changelog jsonb, + spectral jsonb, + bwc jsonb +); + +ALTER TABLE published_version_validation ADD CONSTRAINT "PK_published_version_validation" + PRIMARY KEY (package_id,version,revision); + +ALTER TABLE published_version_validation ADD CONSTRAINT "FK_published_version_validation" + FOREIGN KEY (package_id,version,revision) REFERENCES published_version (package_id,version,revision) ON DELETE Cascade ON UPDATE Cascade; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/39_build_result.down.sql b/qubership-apihub-service/resources/migrations/39_build_result.down.sql new file mode 100644 index 0000000..81ba880 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/39_build_result.down.sql @@ -0,0 +1 @@ +DROP TABLE IF EXISTS build_result; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/39_build_result.up.sql b/qubership-apihub-service/resources/migrations/39_build_result.up.sql new file mode 100644 index 0000000..8ed2ed8 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/39_build_result.up.sql @@ -0,0 +1,8 @@ +CREATE TABLE build_result +( + build_id varchar NOT NULL, + data bytea NOT NULL +); + +ALTER TABLE build_result ADD CONSTRAINT "FK_build_result_build_id" + FOREIGN KEY (build_id) REFERENCES build (build_id) ON DELETE Cascade ON UPDATE Cascade; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/3_draftActions.down.sql b/qubership-apihub-service/resources/migrations/3_draftActions.down.sql new file mode 100644 index 0000000..7c3f16a --- /dev/null +++ b/qubership-apihub-service/resources/migrations/3_draftActions.down.sql @@ -0,0 +1,9 @@ +ALTER TABLE branch_draft_content +DROP COLUMN moved_from, +DROP COLUMN action; + +ALTER TABLE branch_draft_reference +DROP COLUMN action; + +ALTER TABLE branch_draft_content +ADD COLUMN is_updated BOOLEAN; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/3_draftActions.up.sql b/qubership-apihub-service/resources/migrations/3_draftActions.up.sql new file mode 100644 index 0000000..bae37ec --- /dev/null +++ b/qubership-apihub-service/resources/migrations/3_draftActions.up.sql @@ -0,0 +1,9 @@ +ALTER TABLE branch_draft_content +DROP COLUMN is_updated; + +ALTER TABLE branch_draft_content +ADD COLUMN action VARCHAR, +ADD COLUMN moved_from VARCHAR; + +ALTER TABLE branch_draft_reference +ADD COLUMN action VARCHAR; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/40_add_rev_pk.down.sql b/qubership-apihub-service/resources/migrations/40_add_rev_pk.down.sql new file mode 100644 index 0000000..61e1840 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/40_add_rev_pk.down.sql @@ -0,0 +1,2 @@ +alter table published_version_revision_content + drop constraint published_version_revision_content_pk; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/40_add_rev_pk.up.sql b/qubership-apihub-service/resources/migrations/40_add_rev_pk.up.sql new file mode 100644 index 0000000..b4a52f4 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/40_add_rev_pk.up.sql @@ -0,0 +1,3 @@ +alter table published_version_revision_content + add constraint published_version_revision_content_pk + primary key (package_id, version, revision, file_id); \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/41_nullable_included.down.sql b/qubership-apihub-service/resources/migrations/41_nullable_included.down.sql new file mode 100644 index 0000000..405b9a1 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/41_nullable_included.down.sql @@ -0,0 +1 @@ +alter table branch_draft_content alter column included set not null; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/41_nullable_included.up.sql b/qubership-apihub-service/resources/migrations/41_nullable_included.up.sql new file mode 100644 index 0000000..11fd53e --- /dev/null +++ b/qubership-apihub-service/resources/migrations/41_nullable_included.up.sql @@ -0,0 +1 @@ +alter table branch_draft_content alter column included drop not null; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/42_use_zero_pg.down.sql b/qubership-apihub-service/resources/migrations/42_use_zero_pg.down.sql new file mode 100644 index 0000000..8214e54 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/42_use_zero_pg.down.sql @@ -0,0 +1,4 @@ +update branch_draft_content set included = null where included = false; +update branch_draft_content set publish = null where publish = false; +update user_integration set is_revoked = null where is_revoked = false; +update build set restart_count = null where restart_count = 0; diff --git a/qubership-apihub-service/resources/migrations/42_use_zero_pg.up.sql b/qubership-apihub-service/resources/migrations/42_use_zero_pg.up.sql new file mode 100644 index 0000000..077fdc6 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/42_use_zero_pg.up.sql @@ -0,0 +1,4 @@ +update branch_draft_content set included = false where included is null; +update branch_draft_content set publish = false where publish is null; +update user_integration set is_revoked = false where is_revoked is null; +update build set restart_count = 0 where restart_count is null; diff --git a/qubership-apihub-service/resources/migrations/43_builder_task.down.sql b/qubership-apihub-service/resources/migrations/43_builder_task.down.sql new file mode 100644 index 0000000..a205dab --- /dev/null +++ b/qubership-apihub-service/resources/migrations/43_builder_task.down.sql @@ -0,0 +1 @@ +DROP TABLE IF EXISTS builder_task; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/43_builder_task.up.sql b/qubership-apihub-service/resources/migrations/43_builder_task.up.sql new file mode 100644 index 0000000..5abdf52 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/43_builder_task.up.sql @@ -0,0 +1,10 @@ +CREATE TABLE builder_task +( + build_id varchar NOT NULL, + builder_id varchar NOT NULL +); + +ALTER TABLE builder_task ADD CONSTRAINT "FK_builder_task_build_id" + FOREIGN KEY (build_id) REFERENCES build (build_id) ON DELETE Cascade ON UPDATE Cascade; + +ALTER TABLE builder_task ADD CONSTRAINT "PK_builder_task" PRIMARY KEY (build_id); \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/44_config_folders.down.sql b/qubership-apihub-service/resources/migrations/44_config_folders.down.sql new file mode 100644 index 0000000..cecc4dc --- /dev/null +++ b/qubership-apihub-service/resources/migrations/44_config_folders.down.sql @@ -0,0 +1,6 @@ +ALTER TABLE branch_draft_content +DROP COLUMN is_folder; +ALTER TABLE branch_draft_content +DROP COLUMN from_folder; +ALTER TABLE branch_draft_content +ALTER COLUMN name set not null; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/44_config_folders.up.sql b/qubership-apihub-service/resources/migrations/44_config_folders.up.sql new file mode 100644 index 0000000..76660c8 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/44_config_folders.up.sql @@ -0,0 +1,5 @@ +ALTER TABLE branch_draft_content +ADD COLUMN is_folder bool, +ADD COLUMN from_folder bool; +ALTER TABLE branch_draft_content +ALTER COLUMN name DROP not null; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/45_user_avatar_table.down.sql b/qubership-apihub-service/resources/migrations/45_user_avatar_table.down.sql new file mode 100644 index 0000000..702ee7a --- /dev/null +++ b/qubership-apihub-service/resources/migrations/45_user_avatar_table.down.sql @@ -0,0 +1,2 @@ +DROP TABLE IF EXISTS user_avatar_data CASCADE +; diff --git a/qubership-apihub-service/resources/migrations/45_user_avatar_table.up.sql b/qubership-apihub-service/resources/migrations/45_user_avatar_table.up.sql new file mode 100644 index 0000000..2d11064 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/45_user_avatar_table.up.sql @@ -0,0 +1,11 @@ +CREATE TABLE user_avatar_data +( + user_id varchar NOT NULL, + avatar bytea NULL, + checksum bytea NULL +) +; + +ALTER TABLE user_avatar_data ADD CONSTRAINT "PK_user_avatar_data" + PRIMARY KEY (user_id) +; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/46_access_control.down.sql b/qubership-apihub-service/resources/migrations/46_access_control.down.sql new file mode 100644 index 0000000..0556a52 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/46_access_control.down.sql @@ -0,0 +1,3 @@ +drop table if exists system_role; +drop table if exists package_member_role; +alter table package_group drop column default_role; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/46_access_control.up.sql b/qubership-apihub-service/resources/migrations/46_access_control.up.sql new file mode 100644 index 0000000..75c8280 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/46_access_control.up.sql @@ -0,0 +1,21 @@ +CREATE TABLE system_role +( + user_id varchar NOT NULL, + role varchar NOT NULL +); +CREATE TABLE package_member_role +( + user_id varchar NOT NULL, + package_id varchar NOT NULL, + role varchar NOT NULL, + created_by varchar NOT NULL, + created_at timestamp without time zone NOT NULL, + updated_by varchar NULL, + updated_at timestamp without time zone NULL +); + +ALTER TABLE package_group +ADD COLUMN default_role varchar NOT NULL default 'Viewer'; + +ALTER TABLE package_member_role ADD CONSTRAINT "PK_package_member_role" + PRIMARY KEY (package_id, user_id); diff --git a/qubership-apihub-service/resources/migrations/47_version_labels.down.sql b/qubership-apihub-service/resources/migrations/47_version_labels.down.sql new file mode 100644 index 0000000..d8a0319 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/47_version_labels.down.sql @@ -0,0 +1,2 @@ +alter table published_version + drop column labels; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/47_version_labels.up.sql b/qubership-apihub-service/resources/migrations/47_version_labels.up.sql new file mode 100644 index 0000000..3f75e8d --- /dev/null +++ b/qubership-apihub-service/resources/migrations/47_version_labels.up.sql @@ -0,0 +1,2 @@ +alter table published_version + add labels varchar array; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/48_access_control_constraint.down.sql b/qubership-apihub-service/resources/migrations/48_access_control_constraint.down.sql new file mode 100644 index 0000000..47e18dc --- /dev/null +++ b/qubership-apihub-service/resources/migrations/48_access_control_constraint.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE package_member_role DROP CONSTRAINT "FK_package_group"; +ALTER TABLE system_role DROP CONSTRAINT "PK_system_role"; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/48_access_control_constraint.up.sql b/qubership-apihub-service/resources/migrations/48_access_control_constraint.up.sql new file mode 100644 index 0000000..534e60e --- /dev/null +++ b/qubership-apihub-service/resources/migrations/48_access_control_constraint.up.sql @@ -0,0 +1,4 @@ +ALTER TABLE package_member_role ADD CONSTRAINT "FK_package_group" + FOREIGN KEY (package_id) REFERENCES package_group (id) ON DELETE Cascade ON UPDATE Cascade; +ALTER TABLE system_role ADD CONSTRAINT "PK_system_role" + PRIMARY KEY (user_id); \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/49_api_key_roles.down.sql b/qubership-apihub-service/resources/migrations/49_api_key_roles.down.sql new file mode 100644 index 0000000..38ae687 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/49_api_key_roles.down.sql @@ -0,0 +1 @@ +ALTER TABLE apihub_api_keys DROP COLUMN role; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/49_api_key_roles.up.sql b/qubership-apihub-service/resources/migrations/49_api_key_roles.up.sql new file mode 100644 index 0000000..74fa247 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/49_api_key_roles.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE apihub_api_keys +ADD COLUMN role varchar NOT NULL default 'Admin'; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/4_intKeyRevoked.down.sql b/qubership-apihub-service/resources/migrations/4_intKeyRevoked.down.sql new file mode 100644 index 0000000..72e2ff1 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/4_intKeyRevoked.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE user_integration +DROP COLUMN is_revoked; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/4_intKeyRevoked.up.sql b/qubership-apihub-service/resources/migrations/4_intKeyRevoked.up.sql new file mode 100644 index 0000000..88497cb --- /dev/null +++ b/qubership-apihub-service/resources/migrations/4_intKeyRevoked.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE user_integration + ADD COLUMN is_revoked boolean DEFAULT false; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/50_oauth_token.up.sql b/qubership-apihub-service/resources/migrations/50_oauth_token.up.sql new file mode 100644 index 0000000..9de4d7e --- /dev/null +++ b/qubership-apihub-service/resources/migrations/50_oauth_token.up.sql @@ -0,0 +1 @@ +DELETE FROM user_integration \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/51_internal_users.down.sql b/qubership-apihub-service/resources/migrations/51_internal_users.down.sql new file mode 100644 index 0000000..89c0924 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/51_internal_users.down.sql @@ -0,0 +1,3 @@ +DROP TABLE IF EXISTS external_identity; +ALTER TABLE user_data DROP COLUMN password; +ALTER TABLE user_data DROP CONSTRAINT email_unique; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/51_internal_users.up.sql b/qubership-apihub-service/resources/migrations/51_internal_users.up.sql new file mode 100644 index 0000000..2e08d9a --- /dev/null +++ b/qubership-apihub-service/resources/migrations/51_internal_users.up.sql @@ -0,0 +1,23 @@ +CREATE TABLE external_identity +( + provider varchar NOT NULL, + external_id varchar NOT NULL, + internal_id varchar NOT NULL, + PRIMARY KEY(provider, external_id) +); + +ALTER TABLE user_data +ADD COLUMN password bytea; + +ALTER TABLE external_identity ADD CONSTRAINT "FK_user_data" + FOREIGN KEY (internal_id) REFERENCES user_data (user_id) ON DELETE Cascade ON UPDATE Cascade; + +update user_data set email = LOWER(email); + +delete from user_data +where email in ( + select email from user_data + group by email having count(email) > 1); + +ALTER TABLE user_data +ADD CONSTRAINT email_unique UNIQUE (email); \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/52_workspaces.down.sql b/qubership-apihub-service/resources/migrations/52_workspaces.down.sql new file mode 100644 index 0000000..77d21f1 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/52_workspaces.down.sql @@ -0,0 +1 @@ +update package_group set kind = 'group' where parent_id is null; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/52_workspaces.up.sql b/qubership-apihub-service/resources/migrations/52_workspaces.up.sql new file mode 100644 index 0000000..d0f1996 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/52_workspaces.up.sql @@ -0,0 +1 @@ +update package_group set kind = 'workspace' where parent_id is null; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/53_packageId.down.sql b/qubership-apihub-service/resources/migrations/53_packageId.down.sql new file mode 100644 index 0000000..dbe3047 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/53_packageId.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE project +DROP COLUMN package_id \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/53_packageId.up.sql b/qubership-apihub-service/resources/migrations/53_packageId.up.sql new file mode 100644 index 0000000..2489df1 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/53_packageId.up.sql @@ -0,0 +1,6 @@ +ALTER TABLE project + ADD COLUMN package_id varchar; + +update project p +set package_id = id +where (select count(id) from package_group where id = p.id and kind = 'package') > 0 \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/54_version_folders.down.sql b/qubership-apihub-service/resources/migrations/54_version_folders.down.sql new file mode 100644 index 0000000..cfdf587 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/54_version_folders.down.sql @@ -0,0 +1 @@ +alter table published_version add column folder varchar; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/54_version_folders.up.sql b/qubership-apihub-service/resources/migrations/54_version_folders.up.sql new file mode 100644 index 0000000..1c5f106 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/54_version_folders.up.sql @@ -0,0 +1,11 @@ +update published_version +set labels = labels || folder +where folder is not null +and folder != any(labels); + +update published_version +set labels = ARRAY[folder] +where folder is not null +and labels is null; + +alter table published_version drop column folder; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/55_default_released_version.down.sql b/qubership-apihub-service/resources/migrations/55_default_released_version.down.sql new file mode 100644 index 0000000..c21add5 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/55_default_released_version.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE package_group + DROP COLUMN default_released_version; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/55_default_released_version.up.sql b/qubership-apihub-service/resources/migrations/55_default_released_version.up.sql new file mode 100644 index 0000000..9346ab3 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/55_default_released_version.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE package_group + ADD COLUMN default_released_version VARCHAR; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/56_remove_api_keys_fk.up.sql b/qubership-apihub-service/resources/migrations/56_remove_api_keys_fk.up.sql new file mode 100644 index 0000000..a21c08c --- /dev/null +++ b/qubership-apihub-service/resources/migrations/56_remove_api_keys_fk.up.sql @@ -0,0 +1,2 @@ +alter table apihub_api_keys + drop constraint "FK_package_group"; diff --git a/qubership-apihub-service/resources/migrations/57_rest_operations.down.sql b/qubership-apihub-service/resources/migrations/57_rest_operations.down.sql new file mode 100644 index 0000000..51530df --- /dev/null +++ b/qubership-apihub-service/resources/migrations/57_rest_operations.down.sql @@ -0,0 +1,4 @@ +ALTER TABLE published_version_revision_content DROP COLUMN operation_ids; +DROP TABLE IF EXISTS operation; +DROP TABLE IF EXISTS operation_data; +DROP TABLE IF EXISTS changed_operation; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/57_rest_operations.up.sql b/qubership-apihub-service/resources/migrations/57_rest_operations.up.sql new file mode 100644 index 0000000..38f4880 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/57_rest_operations.up.sql @@ -0,0 +1,45 @@ +ALTER TABLE published_version_revision_content +ADD COLUMN operation_ids varchar[]; + +CREATE TABLE operation +( +package_id varchar NOT NULL, +version varchar NOT NULL, +revision integer NOT NULL, +operation_id varchar NOT NULL, +data_hash varchar NOT NULL, +deprecated boolean NOT NULL, +kind varchar NULL, +title varchar NULL, +metadata jsonb NULL, +type varchar NOT NULL, +CONSTRAINT pk_operation PRIMARY KEY (package_id, version, revision, operation_id) +); + +CREATE TABLE operation_data +( +data_hash varchar NOT NULL, +data bytea NULL, +search_scope jsonb NULL, +CONSTRAINT pk_operation_data PRIMARY KEY (data_hash) +); + +ALTER TABLE operation ADD CONSTRAINT "FK_published_version" + FOREIGN KEY (package_id,version,revision) REFERENCES published_version (package_id,version,revision) ON DELETE Cascade ON UPDATE Cascade; + +ALTER TABLE operation ADD CONSTRAINT "FK_operation_data" + FOREIGN KEY (data_hash) REFERENCES operation_data (data_hash) ON DELETE Cascade ON UPDATE Cascade; + +CREATE TABLE changed_operation +( +package_id varchar NOT NULL, +version varchar NOT NULL, +revision integer NOT NULL, +previous_package_id varchar NOT NULL, +previous_version varchar NOT NULL, +previous_revision integer NOT NULL, +operation_id varchar NOT NULL, +data_hash varchar NULL, +previous_data_hash varchar NULL, +changes_summary jsonb NULL, +changes jsonb NULL); \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/58_operations_search.down.sql b/qubership-apihub-service/resources/migrations/58_operations_search.down.sql new file mode 100644 index 0000000..5f9e9b4 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/58_operations_search.down.sql @@ -0,0 +1,2 @@ +DROP TABLE ts_operation_data; +DROP INDEX IF EXISTS ts_operation_data_idx; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/58_operations_search.up.sql b/qubership-apihub-service/resources/migrations/58_operations_search.up.sql new file mode 100644 index 0000000..411f627 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/58_operations_search.up.sql @@ -0,0 +1,28 @@ +CREATE TABLE ts_operation_data( + data_hash varchar NOT NULL, + scope_all tsvector, + scope_request tsvector, + scope_response tsvector, + scope_annotation tsvector, + scope_properties tsvector, + scope_examples tsvector, + CONSTRAINT pk_ts_operation_data PRIMARY KEY(data_hash) +); + +ALTER TABLE ts_operation_data ADD CONSTRAINT "FK_operation_data" + FOREIGN KEY (data_hash) REFERENCES operation_data (data_hash) ON DELETE Cascade ON UPDATE Cascade; + +insert into ts_operation_data +select data_hash, +to_tsvector(jsonb_extract_path_text(search_scope, 'all')) scope_all, +to_tsvector(jsonb_extract_path_text(search_scope, 'request')) scope_request, +to_tsvector(jsonb_extract_path_text(search_scope, 'response')) scope_response, +to_tsvector(jsonb_extract_path_text(search_scope, 'annotation')) scope_annotation, +to_tsvector(jsonb_extract_path_text(search_scope, 'properties')) scope_properties, +to_tsvector(jsonb_extract_path_text(search_scope, 'examples')) scope_examples +from operation_data; + +CREATE INDEX ts_operation_data_idx +ON ts_operation_data +USING gin(scope_request,scope_response,scope_annotation,scope_properties,scope_examples) +with (fastupdate = true); \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/59_version_createdby.down.sql b/qubership-apihub-service/resources/migrations/59_version_createdby.down.sql new file mode 100644 index 0000000..36c81b8 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/59_version_createdby.down.sql @@ -0,0 +1 @@ +ALTER TABLE published_version DROP COLUMN created_by; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/59_version_createdby.up.sql b/qubership-apihub-service/resources/migrations/59_version_createdby.up.sql new file mode 100644 index 0000000..d580e7f --- /dev/null +++ b/qubership-apihub-service/resources/migrations/59_version_createdby.up.sql @@ -0,0 +1 @@ +ALTER TABLE published_version ADD COLUMN created_by varchar; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/5_draft_file_commit_id.down.sql b/qubership-apihub-service/resources/migrations/5_draft_file_commit_id.down.sql new file mode 100644 index 0000000..d947b42 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/5_draft_file_commit_id.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE branch_draft_content + DROP COLUMN commit_id; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/5_draft_file_commit_id.up.sql b/qubership-apihub-service/resources/migrations/5_draft_file_commit_id.up.sql new file mode 100644 index 0000000..eff821c --- /dev/null +++ b/qubership-apihub-service/resources/migrations/5_draft_file_commit_id.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE branch_draft_content + ADD COLUMN commit_id varchar; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/60_ref_relation_type.down.sql b/qubership-apihub-service/resources/migrations/60_ref_relation_type.down.sql new file mode 100644 index 0000000..e19b0b3 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/60_ref_relation_type.down.sql @@ -0,0 +1,22 @@ +ALTER TABLE published_version_reference ADD COLUMN relation_type varchar; + +with pkggroup as ( + select id as package_id from package_group where kind = 'group' +) +update published_version_reference pvr +set relation_type = 'import' + from pkggroup + where pvr.package_id = pkggroup.package_id; + +with pkgnotgroup as ( + select id as package_id from package_group where kind != 'group' +) +update published_version_reference pvr +set relation_type = 'depend' + from pkgnotgroup +where pvr.package_id = pkgnotgroup.package_id; + +ALTER TABLE published_version_reference DROP CONSTRAINT "PK_published_version_reference"; +ALTER TABLE published_version_reference ADD CONSTRAINT "PK_published_version_reference" + PRIMARY KEY (package_id,version,revision,reference_id,relation_type,reference_version) +; diff --git a/qubership-apihub-service/resources/migrations/60_ref_relation_type.up.sql b/qubership-apihub-service/resources/migrations/60_ref_relation_type.up.sql new file mode 100644 index 0000000..d5d1f54 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/60_ref_relation_type.up.sql @@ -0,0 +1,7 @@ +DELETE FROM published_version_reference WHERE relation_type = 'depend'; + +ALTER TABLE published_version_reference DROP CONSTRAINT "PK_published_version_reference"; +ALTER TABLE published_version_reference DROP COLUMN relation_type; +ALTER TABLE published_version_reference ADD CONSTRAINT "PK_published_version_reference" + PRIMARY KEY (package_id,version,revision,reference_id,reference_version) +; diff --git a/qubership-apihub-service/resources/migrations/61_agent_reg.down.sql b/qubership-apihub-service/resources/migrations/61_agent_reg.down.sql new file mode 100644 index 0000000..4e47c68 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/61_agent_reg.down.sql @@ -0,0 +1 @@ +DROP TABLE agent; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/61_agent_reg.up.sql b/qubership-apihub-service/resources/migrations/61_agent_reg.up.sql new file mode 100644 index 0000000..b7f34fc --- /dev/null +++ b/qubership-apihub-service/resources/migrations/61_agent_reg.up.sql @@ -0,0 +1,12 @@ +CREATE TABLE agent +( + agent_id varchar NOT NULL, + cloud varchar NOT NULL, + namespace varchar NOT NULL, + url varchar NOT NULL, + last_active timestamp without time zone NOT NULL, + backend_version varchar NOT NULL, + frontend_version varchar NOT NULL, + name varchar, + PRIMARY KEY (agent_id) +); \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/62_package_group_service.down.sql b/qubership-apihub-service/resources/migrations/62_package_group_service.down.sql new file mode 100644 index 0000000..35885a4 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/62_package_group_service.down.sql @@ -0,0 +1 @@ +alter table package_group drop column service_name; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/62_package_group_service.up.sql b/qubership-apihub-service/resources/migrations/62_package_group_service.up.sql new file mode 100644 index 0000000..fd184f9 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/62_package_group_service.up.sql @@ -0,0 +1,4 @@ +alter table package_group add column service_name varchar; +update package_group pg set service_name = s.service_name +from package_service s +where s.package_id = pg.id; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/63_open_count.down.sql b/qubership-apihub-service/resources/migrations/63_open_count.down.sql new file mode 100644 index 0000000..3f31b25 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/63_open_count.down.sql @@ -0,0 +1,3 @@ +drop table published_version_open_count; +drop table published_document_open_count; +drop table operation_open_count; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/63_open_count.up.sql b/qubership-apihub-service/resources/migrations/63_open_count.up.sql new file mode 100644 index 0000000..ac96a6b --- /dev/null +++ b/qubership-apihub-service/resources/migrations/63_open_count.up.sql @@ -0,0 +1,25 @@ +create table published_version_open_count +( + package_id varchar not null, + version varchar not null, + open_count bigint, + PRIMARY KEY(package_id, version) +); + +create table published_document_open_count +( + package_id varchar not null, + version varchar not null, + slug varchar not null, + open_count bigint, + PRIMARY KEY(package_id, version, slug) +); + +create table operation_open_count +( + package_id varchar not null, + version varchar not null, + operation_id varchar not null, + open_count bigint, + PRIMARY KEY(package_id, version, operation_id) +); diff --git a/qubership-apihub-service/resources/migrations/64_metadata_migration.down.sql b/qubership-apihub-service/resources/migrations/64_metadata_migration.down.sql new file mode 100644 index 0000000..a29cb59 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/64_metadata_migration.down.sql @@ -0,0 +1,8 @@ +ALTER TABLE published_data + ADD COLUMN metadata jsonb; + +UPDATE published_data AS pd +SET metadata = JSONB_SET(metadata, '{commitId}',(select metadata -> 'commitId' from published_version_revision_content as pvrc WHERE pd.package_id = pvrc.package_id AND pd.checksum = pvrc.checksum LIMIT 1)); + +UPDATE published_data AS pd +SET metadata = JSONB_SET(metadata, '{commitDate}',(select metadata -> 'commitDate' from published_version_revision_content as pvrc WHERE pd.package_id = pvrc.package_id AND pd.checksum = pvrc.checksum LIMIT 1)); diff --git a/qubership-apihub-service/resources/migrations/64_metadata_migration.up.sql b/qubership-apihub-service/resources/migrations/64_metadata_migration.up.sql new file mode 100644 index 0000000..997b34e --- /dev/null +++ b/qubership-apihub-service/resources/migrations/64_metadata_migration.up.sql @@ -0,0 +1,8 @@ +UPDATE published_version_revision_content AS pvrc +SET metadata = JSONB_SET(metadata, '{commitId}',(select metadata -> 'commitId' from published_data as pd WHERE pd.package_id = pvrc.package_id AND pd.checksum = pvrc.checksum LIMIT 1)); + +UPDATE published_version_revision_content AS pvrc +SET metadata = JSONB_SET(metadata, '{commitDate}',(select metadata -> 'commitDate' from published_data as pd WHERE pd.package_id = pvrc.package_id AND pd.checksum = pvrc.checksum LIMIT 1)); + +ALTER TABLE published_data + DROP COLUMN metadata; diff --git a/qubership-apihub-service/resources/migrations/65_filename.down.sql b/qubership-apihub-service/resources/migrations/65_filename.down.sql new file mode 100644 index 0000000..715543f --- /dev/null +++ b/qubership-apihub-service/resources/migrations/65_filename.down.sql @@ -0,0 +1,3 @@ +alter table published_version_revision_content + drop column filename; + diff --git a/qubership-apihub-service/resources/migrations/65_filename.up.sql b/qubership-apihub-service/resources/migrations/65_filename.up.sql new file mode 100644 index 0000000..a33cb8f --- /dev/null +++ b/qubership-apihub-service/resources/migrations/65_filename.up.sql @@ -0,0 +1,2 @@ +alter table published_version_revision_content + add filename varchar; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/66_add_src_index.down.sql b/qubership-apihub-service/resources/migrations/66_add_src_index.down.sql new file mode 100644 index 0000000..5f5aedc --- /dev/null +++ b/qubership-apihub-service/resources/migrations/66_add_src_index.down.sql @@ -0,0 +1 @@ +drop index published_sources_package_id_version_revision_uindex; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/66_add_src_index.up.sql b/qubership-apihub-service/resources/migrations/66_add_src_index.up.sql new file mode 100644 index 0000000..9f25ce8 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/66_add_src_index.up.sql @@ -0,0 +1,2 @@ +create unique index published_sources_package_id_version_revision_uindex + on published_sources (package_id, version, revision); \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/67_add_release_version_pattern.down.sql b/qubership-apihub-service/resources/migrations/67_add_release_version_pattern.down.sql new file mode 100644 index 0000000..f5c9974 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/67_add_release_version_pattern.down.sql @@ -0,0 +1 @@ +ALTER TABLE package_group DROP COLUMN release_version_pattern; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/67_add_release_version_pattern.up.sql b/qubership-apihub-service/resources/migrations/67_add_release_version_pattern.up.sql new file mode 100644 index 0000000..3823e95 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/67_add_release_version_pattern.up.sql @@ -0,0 +1 @@ +ALTER TABLE package_group ADD COLUMN release_version_pattern varchar; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/68_activity_tracking.down.sql b/qubership-apihub-service/resources/migrations/68_activity_tracking.down.sql new file mode 100644 index 0000000..83fd747 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/68_activity_tracking.down.sql @@ -0,0 +1 @@ +drop table activity_tracking; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/68_activity_tracking.up.sql b/qubership-apihub-service/resources/migrations/68_activity_tracking.up.sql new file mode 100644 index 0000000..6f050fe --- /dev/null +++ b/qubership-apihub-service/resources/migrations/68_activity_tracking.up.sql @@ -0,0 +1,12 @@ +create table activity_tracking +( + id varchar not null, + e_type varchar not null, + data jsonb, + package_id varchar, + version varchar, + date timestamp without time zone, + user_id varchar, + + PRIMARY KEY(id) +); \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/69_builder_notifications.down.sql b/qubership-apihub-service/resources/migrations/69_builder_notifications.down.sql new file mode 100644 index 0000000..8788b31 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/69_builder_notifications.down.sql @@ -0,0 +1 @@ +DROP TABLE IF EXISTS builder_notifications; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/69_builder_notifications.up.sql b/qubership-apihub-service/resources/migrations/69_builder_notifications.up.sql new file mode 100644 index 0000000..9483bf1 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/69_builder_notifications.up.sql @@ -0,0 +1,7 @@ +CREATE TABLE builder_notifications +( +build_id varchar NOT NULL, +severity varchar, +message varchar, +file_id integer, +FOREIGN KEY (build_id) REFERENCES build (build_id) ON DELETE Cascade ON UPDATE Cascade); \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/6_content_path.down.sql b/qubership-apihub-service/resources/migrations/6_content_path.down.sql new file mode 100644 index 0000000..b154196 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/6_content_path.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE branch_draft_content +ALTER COLUMN path SET NOT NULL; diff --git a/qubership-apihub-service/resources/migrations/6_content_path.up.sql b/qubership-apihub-service/resources/migrations/6_content_path.up.sql new file mode 100644 index 0000000..32261a1 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/6_content_path.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE branch_draft_content +ALTER COLUMN path DROP NOT NULL; diff --git a/qubership-apihub-service/resources/migrations/70_role_redesign.down.sql b/qubership-apihub-service/resources/migrations/70_role_redesign.down.sql new file mode 100644 index 0000000..a9054b4 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/70_role_redesign.down.sql @@ -0,0 +1,6 @@ +--data migration from roles array to role varchar is not possible +alter table package_member_role add column role varchar; +alter table package_member_role drop column roles; +alter table apihub_api_keys add column role varchar; +alter table apihub_api_keys drop column roles; +drop table role; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/70_role_redesign.up.sql b/qubership-apihub-service/resources/migrations/70_role_redesign.up.sql new file mode 100644 index 0000000..41909d8 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/70_role_redesign.up.sql @@ -0,0 +1,36 @@ +alter table package_member_role add column roles varchar array default '{}'; + +update package_member_role set roles = array_append(roles, lower(role)::varchar); +update package_group set default_role = lower(default_role); + +alter table package_member_role drop column role; +delete from package_member_role where roles = ARRAY[]::varchar[]; +delete from package_member_role where roles = ARRAY[null]::varchar[]; + +create table role +( + id varchar not null, + role varchar not null, + rank int not null, + permissions varchar array, + read_only bool, + PRIMARY KEY(id) +); + +insert into role(id, role, rank, permissions, read_only) +values +('admin', 'Admin', 1000, ARRAY['read', 'create_and_update_package', 'delete_package', 'manage_draft_version', 'manage_release_candidate_version', 'manage_release_version', 'manage_archived_version', 'manage_deprecated_version', 'user_access_management', 'access_token_management'], true), +('release-manager', 'Release Manager', 4, ARRAY['read', 'manage_release_version'], false), +('owner', 'Owner', 3, ARRAY['read', 'create_and_update_package', 'delete_package', 'manage_draft_version', 'manage_release_candidate_version', 'manage_release_version', 'manage_archived_version', 'manage_deprecated_version'], false), +('editor', 'Editor', 2, ARRAY['read', 'manage_draft_version', 'manage_release_candidate_version', 'manage_archived_version', 'manage_deprecated_version'], false), +('viewer', 'Viewer', 1, ARRAY['read'], true), +('none', 'None', 0, ARRAY[]::varchar[], true); + +alter table apihub_api_keys add column roles varchar array default '{}'; +update apihub_api_keys set roles = array_append(roles, lower(role)::varchar); + +alter table apihub_api_keys drop column role; +update apihub_api_keys +set roles = ARRAY['admin']::varchar[] +where roles = ARRAY[]::varchar[] +or roles = ARRAY[null]::varchar[]; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/71_builder_notifications_fix.up.sql b/qubership-apihub-service/resources/migrations/71_builder_notifications_fix.up.sql new file mode 100644 index 0000000..b7c61c4 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/71_builder_notifications_fix.up.sql @@ -0,0 +1,2 @@ +truncate table builder_notifications; +alter table builder_notifications alter column file_id type varchar; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/72_old_editor_role_permissions.down.sql b/qubership-apihub-service/resources/migrations/72_old_editor_role_permissions.down.sql new file mode 100644 index 0000000..a3379b5 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/72_old_editor_role_permissions.down.sql @@ -0,0 +1,3 @@ +update role +set permissions = ARRAY['read', 'manage_draft_version', 'manage_release_candidate_version', 'manage_archived_version', 'manage_deprecated_version'] +where id = 'editor'; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/72_old_editor_role_permissions.up.sql b/qubership-apihub-service/resources/migrations/72_old_editor_role_permissions.up.sql new file mode 100644 index 0000000..7748707 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/72_old_editor_role_permissions.up.sql @@ -0,0 +1,3 @@ +update role +set permissions = ARRAY['read', 'manage_draft_version', 'manage_release_version', 'manage_archived_version', 'manage_deprecated_version'] +where id = 'editor'; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/73_delete_sources.up.sql b/qubership-apihub-service/resources/migrations/73_delete_sources.up.sql new file mode 100644 index 0000000..5fa3e0f --- /dev/null +++ b/qubership-apihub-service/resources/migrations/73_delete_sources.up.sql @@ -0,0 +1,2 @@ +truncate table published_sources_data cascade; +truncate table published_sources cascade; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/74_sources_deduplication.down.sql b/qubership-apihub-service/resources/migrations/74_sources_deduplication.down.sql new file mode 100644 index 0000000..832845f --- /dev/null +++ b/qubership-apihub-service/resources/migrations/74_sources_deduplication.down.sql @@ -0,0 +1,2 @@ +alter table published_sources drop column config; +alter table published_sources drop column metadata; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/74_sources_deduplication.up.sql b/qubership-apihub-service/resources/migrations/74_sources_deduplication.up.sql new file mode 100644 index 0000000..4e347e6 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/74_sources_deduplication.up.sql @@ -0,0 +1,5 @@ +truncate table published_sources_data cascade; +truncate table published_sources cascade; + +alter table published_sources add column config bytea; +alter table published_sources add column metadata bytea; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/75_nullable_sources.down.sql b/qubership-apihub-service/resources/migrations/75_nullable_sources.down.sql new file mode 100644 index 0000000..59f0210 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/75_nullable_sources.down.sql @@ -0,0 +1,2 @@ +delete from published_sources_data where data is null; +alter table published_sources_data alter column data set not null; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/75_nullable_sources.up.sql b/qubership-apihub-service/resources/migrations/75_nullable_sources.up.sql new file mode 100644 index 0000000..7ba021e --- /dev/null +++ b/qubership-apihub-service/resources/migrations/75_nullable_sources.up.sql @@ -0,0 +1 @@ +alter table published_sources_data alter column data drop not null; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/76_unbind_services.up.sql b/qubership-apihub-service/resources/migrations/76_unbind_services.up.sql new file mode 100644 index 0000000..77fbb21 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/76_unbind_services.up.sql @@ -0,0 +1,2 @@ +delete from package_service where package_id in (select id from package_group where deleted_at is not null and service_name is not null); +update package_group set service_name = null where deleted_at is not null and service_name is not null; diff --git a/qubership-apihub-service/resources/migrations/77_operation_type_lower_case.down.sql b/qubership-apihub-service/resources/migrations/77_operation_type_lower_case.down.sql new file mode 100644 index 0000000..39d78cf --- /dev/null +++ b/qubership-apihub-service/resources/migrations/77_operation_type_lower_case.down.sql @@ -0,0 +1,5 @@ +update operation +set type = 'Rest' where type = 'rest'; + +update operation +set type = 'GraphQL' where type = 'graphql'; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/77_operation_type_lower_case.up.sql b/qubership-apihub-service/resources/migrations/77_operation_type_lower_case.up.sql new file mode 100644 index 0000000..29f4956 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/77_operation_type_lower_case.up.sql @@ -0,0 +1,5 @@ +update operation +set type = 'rest' where type = 'Rest'; + +update operation +set type = 'graphql' where type = 'GraphQL'; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/78_version_deleted_by.down.sql b/qubership-apihub-service/resources/migrations/78_version_deleted_by.down.sql new file mode 100644 index 0000000..1309245 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/78_version_deleted_by.down.sql @@ -0,0 +1 @@ +alter table published_version drop column deleted_by; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/78_version_deleted_by.up.sql b/qubership-apihub-service/resources/migrations/78_version_deleted_by.up.sql new file mode 100644 index 0000000..76c5971 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/78_version_deleted_by.up.sql @@ -0,0 +1,3 @@ +alter table published_version add column deleted_by varchar default null; + +update published_version set deleted_by = 'unknown' where deleted_at is not null; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/79_parent_package_names_func.down.sql b/qubership-apihub-service/resources/migrations/79_parent_package_names_func.down.sql new file mode 100644 index 0000000..e7174ab --- /dev/null +++ b/qubership-apihub-service/resources/migrations/79_parent_package_names_func.down.sql @@ -0,0 +1 @@ +drop function if exists parent_package_names; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/79_parent_package_names_func.up.sql b/qubership-apihub-service/resources/migrations/79_parent_package_names_func.up.sql new file mode 100644 index 0000000..8295c5d --- /dev/null +++ b/qubership-apihub-service/resources/migrations/79_parent_package_names_func.up.sql @@ -0,0 +1,31 @@ +create or replace function parent_package_names(varchar) +returns varchar[] language plpgsql +as ' +declare +split varchar[] := string_to_array($1, ''.'')::varchar[]; +parent_ids varchar[]; +parent_names varchar[]; +begin + +if coalesce(array_length(split, 1), 0) <= 1 then + return ARRAY[]::varchar[]; +end if; + +parent_ids = parent_ids || split[1]; + +for i in 2..(array_length(split, 1) - 1) +loop + parent_ids = parent_ids || (parent_ids[i-1] ||''.''|| split[i]); +end loop; + +execute '' +select array_agg(name) from ( + select name from package_group + join unnest($1) with ordinality t(id, ord) using (id) --sort by parent_ids array + order by t.ord) n'' +into parent_names +using parent_ids; + +return parent_names; + +end;'; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/7_apihub_api_keys.down.sql b/qubership-apihub-service/resources/migrations/7_apihub_api_keys.down.sql new file mode 100644 index 0000000..165ff46 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/7_apihub_api_keys.down.sql @@ -0,0 +1 @@ +DROP TABLE IF EXISTS apihub_api_keys CASCADE; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/7_apihub_api_keys.up.sql b/qubership-apihub-service/resources/migrations/7_apihub_api_keys.up.sql new file mode 100644 index 0000000..27f611c --- /dev/null +++ b/qubership-apihub-service/resources/migrations/7_apihub_api_keys.up.sql @@ -0,0 +1,16 @@ +CREATE TABLE apihub_api_keys +( + id varchar PRIMARY KEY, + project_id varchar NOT NULL, + name varchar NOT NULL, + created_by varchar NOT NULL, + created_at timestamp without time zone NOT NULL, + deleted_by varchar NULL, + deleted_at timestamp without time zone NULL, + api_key varchar NOT NULL +) +; + +ALTER TABLE apihub_api_keys ADD CONSTRAINT "FK_project" + FOREIGN KEY (project_id) REFERENCES project (id) ON DELETE Cascade ON UPDATE Cascade +; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/80_at_table_remove_version.down.sql b/qubership-apihub-service/resources/migrations/80_at_table_remove_version.down.sql new file mode 100644 index 0000000..ab90d00 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/80_at_table_remove_version.down.sql @@ -0,0 +1,2 @@ +alter table activity_tracking + add version varchar; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/80_at_table_remove_version.up.sql b/qubership-apihub-service/resources/migrations/80_at_table_remove_version.up.sql new file mode 100644 index 0000000..f18365d --- /dev/null +++ b/qubership-apihub-service/resources/migrations/80_at_table_remove_version.up.sql @@ -0,0 +1,2 @@ +alter table activity_tracking + drop column version; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/81_remove_ref_relation_type.down.sql b/qubership-apihub-service/resources/migrations/81_remove_ref_relation_type.down.sql new file mode 100644 index 0000000..2b55572 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/81_remove_ref_relation_type.down.sql @@ -0,0 +1,4 @@ +alter table branch_draft_reference drop constraint "PK_branch_draft_reference"; +alter table branch_draft_reference add column relation_type varchar not null default 'depend'; +alter table branch_draft_reference add constraint "PK_branch_draft_reference" + primary key (branch_name,project_id, reference_package_id, reference_version, relation_type); \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/81_remove_ref_relation_type.up.sql b/qubership-apihub-service/resources/migrations/81_remove_ref_relation_type.up.sql new file mode 100644 index 0000000..2009645 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/81_remove_ref_relation_type.up.sql @@ -0,0 +1,4 @@ +alter table branch_draft_reference drop constraint "PK_branch_draft_reference"; +alter table branch_draft_reference drop column relation_type; +alter table branch_draft_reference add constraint "PK_branch_draft_reference" + primary key (branch_name,project_id, reference_package_id, reference_version); \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/82_frontend_version_delete.down.sql b/qubership-apihub-service/resources/migrations/82_frontend_version_delete.down.sql new file mode 100644 index 0000000..8d5d22e --- /dev/null +++ b/qubership-apihub-service/resources/migrations/82_frontend_version_delete.down.sql @@ -0,0 +1 @@ +alter table agent add column if not exists frontend_version varchar; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/82_frontend_version_delete.up.sql b/qubership-apihub-service/resources/migrations/82_frontend_version_delete.up.sql new file mode 100644 index 0000000..e94f5b5 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/82_frontend_version_delete.up.sql @@ -0,0 +1 @@ +alter table agent drop column if exists frontend_version; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/83_cleaning_job_lock.down.sql b/qubership-apihub-service/resources/migrations/83_cleaning_job_lock.down.sql new file mode 100644 index 0000000..e4ddb52 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/83_cleaning_job_lock.down.sql @@ -0,0 +1 @@ +drop table build_cleanup_run; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/83_cleaning_job_lock.up.sql b/qubership-apihub-service/resources/migrations/83_cleaning_job_lock.up.sql new file mode 100644 index 0000000..be960f1 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/83_cleaning_job_lock.up.sql @@ -0,0 +1 @@ +create table build_cleanup_run(run_id integer primary key, scheduled_at timestamp, deleted_rows integer); \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/84_migrations_table.up.sql b/qubership-apihub-service/resources/migrations/84_migrations_table.up.sql new file mode 100644 index 0000000..3de2f87 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/84_migrations_table.up.sql @@ -0,0 +1,9 @@ +create table if not exists stored_schema_migration +( + num integer not null, + up_hash varchar not null, + sql_up varchar not null, + down_hash varchar null, + sql_down varchar null, + PRIMARY KEY(num) +); \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/85_excludeFromSearch.down.sql b/qubership-apihub-service/resources/migrations/85_excludeFromSearch.down.sql new file mode 100644 index 0000000..d29ce10 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/85_excludeFromSearch.down.sql @@ -0,0 +1 @@ +alter table package_group drop column if exists exclude_from_search; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/85_excludeFromSearch.up.sql b/qubership-apihub-service/resources/migrations/85_excludeFromSearch.up.sql new file mode 100644 index 0000000..029701c --- /dev/null +++ b/qubership-apihub-service/resources/migrations/85_excludeFromSearch.up.sql @@ -0,0 +1 @@ +alter table package_group add column if not exists exclude_from_search bool default false; diff --git a/qubership-apihub-service/resources/migrations/86_grouping_prefix.down.sql b/qubership-apihub-service/resources/migrations/86_grouping_prefix.down.sql new file mode 100644 index 0000000..2f6bc9b --- /dev/null +++ b/qubership-apihub-service/resources/migrations/86_grouping_prefix.down.sql @@ -0,0 +1,3 @@ +drop table operations_group; + +alter table package_group drop column rest_grouping_prefix; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/86_grouping_prefix.up.sql b/qubership-apihub-service/resources/migrations/86_grouping_prefix.up.sql new file mode 100644 index 0000000..0c690a8 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/86_grouping_prefix.up.sql @@ -0,0 +1,16 @@ +create table operations_group +( + package_id varchar not null, + version varchar not null, + revision integer not null, + group_name varchar not null, + api_type varchar not null, + operation_ids varchar array null, + autogenerated boolean not null, + PRIMARY KEY(package_id, version, revision, group_name, api_type) +); + +ALTER TABLE operations_group ADD CONSTRAINT "FK_published_version" + FOREIGN KEY (package_id, version, revision) REFERENCES published_version (package_id, version, revision) ON DELETE Cascade ON UPDATE Cascade; + +alter table package_group add column rest_grouping_prefix varchar default null; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/87_comparison_table.down.sql b/qubership-apihub-service/resources/migrations/87_comparison_table.down.sql new file mode 100644 index 0000000..1379b1d --- /dev/null +++ b/qubership-apihub-service/resources/migrations/87_comparison_table.down.sql @@ -0,0 +1,9 @@ +alter table operation_comparison rename to changed_operation; + +alter table changed_operation drop constraint "FK_version_comparison"; + +alter table changed_operation drop column comparison_id; + +drop table version_comparison; + +truncate table changed_operation; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/87_comparison_table.up.sql b/qubership-apihub-service/resources/migrations/87_comparison_table.up.sql new file mode 100644 index 0000000..eec9b94 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/87_comparison_table.up.sql @@ -0,0 +1,26 @@ +create table version_comparison +( + package_id varchar not null, + version varchar not null, + revision integer not null, + previous_package_id varchar not null, + previous_version varchar not null, + previous_revision integer not null, + comparison_id varchar not null, + operation_types jsonb[] null, + refs varchar[] null, + open_count bigint not null, + last_active timestamp without time zone not null, + no_content boolean not null, + PRIMARY KEY(package_id, version, revision, previous_package_id, previous_version, previous_revision), + UNIQUE(comparison_id) +); + +truncate table changed_operation; + +alter table changed_operation RENAME TO operation_comparison; + +alter table operation_comparison add column comparison_id varchar; + +ALTER TABLE operation_comparison ADD CONSTRAINT "FK_version_comparison" + FOREIGN KEY (comparison_id) REFERENCES version_comparison (comparison_id) ON DELETE Cascade ON UPDATE Cascade; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/88_refs_redesign.down.sql b/qubership-apihub-service/resources/migrations/88_refs_redesign.down.sql new file mode 100644 index 0000000..32092f6 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/88_refs_redesign.down.sql @@ -0,0 +1,16 @@ +delete from published_version_reference where parent_reference_id != ''; + +alter table published_version_reference +drop column reference_revision, +drop column parent_reference_id, +drop column parent_reference_version, +drop column parent_reference_revision; + +ALTER TABLE published_version_reference DROP CONSTRAINT if exists "PK_published_version_reference"; +ALTER TABLE published_version_reference ADD CONSTRAINT "PK_published_version_reference" + PRIMARY KEY ( + package_id, + version, + revision, + reference_id, + reference_version); \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/88_refs_redesign.up.sql b/qubership-apihub-service/resources/migrations/88_refs_redesign.up.sql new file mode 100644 index 0000000..7a9d187 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/88_refs_redesign.up.sql @@ -0,0 +1,58 @@ +alter table published_version_reference +add column reference_revision integer not null default 0, +add column parent_reference_id varchar not null default '', +add column parent_reference_version varchar not null default '', +add column parent_reference_revision integer not null default 0; + +ALTER TABLE published_version_reference DROP CONSTRAINT if exists "PK_published_version_reference"; +ALTER TABLE published_version_reference ADD CONSTRAINT "PK_published_version_reference" + PRIMARY KEY ( + package_id, + version, + revision, + reference_id, + reference_version, + reference_revision, + parent_reference_id, + parent_reference_version, + parent_reference_revision) + ; + +--set latest revisions for all references +update published_version_reference r set reference_revision = maxrev.revision +from +( + select package_id, version, max(revision) as revision from published_version + group by package_id, version +) maxrev +where maxrev.package_id = r.reference_id +and maxrev.version = r.reference_version; + +--calculate references tree and store it in a flat way preserving parent link +insert into published_version_reference +with recursive rec as ( + select 0 as depth, s.package_id, s.version, s.revision, ''::varchar as parent_id, ''::varchar as parent_version, 0 as parent_revision, + s.package_id root_package_id, s.version root_version, s.revision root_revision + from published_version_reference s + inner join published_version_reference t + on s.package_id = t.package_id + and s.version = t.version + and s.revision = t.revision + where s.parent_reference_id = '' + union + select rec.depth+1 as depth, s.reference_id as package_id, s.reference_version as version, s.revision, + rec.package_id as parent_id, rec.version as parent_version, rec.revision as parent_revision, + rec.root_package_id root_package_id, rec.root_version root_version, rec.root_revision root_revision + from published_version_reference s + inner join rec + on rec.package_id = s.package_id + and rec.version = s.version + and rec.revision = s.revision + where s.parent_reference_id = '' +) +select r.root_package_id as package_id, r.root_version as version, r.root_revision as revision, +r.package_id as reference_id, r.version as reference_version, r.revision as reference_revision, +r.parent_id as parent_reference_id, r.parent_version as parent_reference_version, r.parent_revision as parent_reference_revision +from rec r +where r.depth > 0 +and not (r.parent_id = r.root_package_id and r.parent_version = r.root_version and r.parent_revision = r.root_revision); diff --git a/qubership-apihub-service/resources/migrations/89_deprecated_operation.down.sql b/qubership-apihub-service/resources/migrations/89_deprecated_operation.down.sql new file mode 100644 index 0000000..f7eef94 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/89_deprecated_operation.down.sql @@ -0,0 +1,3 @@ +alter table operation drop column if exists deprecated_info; +alter table operation drop column if exists deprecated_items; +alter table operation drop column if exists previous_release_versions; diff --git a/qubership-apihub-service/resources/migrations/89_deprecated_operation.up.sql b/qubership-apihub-service/resources/migrations/89_deprecated_operation.up.sql new file mode 100644 index 0000000..7bb80d3 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/89_deprecated_operation.up.sql @@ -0,0 +1,3 @@ +alter table operation add column if not exists deprecated_info jsonb; +alter table operation add column if not exists deprecated_items jsonb[]; +alter table operation add column if not exists previous_release_versions varchar[]; diff --git a/qubership-apihub-service/resources/migrations/8_draft_publish_flag.down.sql b/qubership-apihub-service/resources/migrations/8_draft_publish_flag.down.sql new file mode 100644 index 0000000..eb19ce3 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/8_draft_publish_flag.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE branch_draft_content +DROP COLUMN publish; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/8_draft_publish_flag.up.sql b/qubership-apihub-service/resources/migrations/8_draft_publish_flag.up.sql new file mode 100644 index 0000000..3d685e1 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/8_draft_publish_flag.up.sql @@ -0,0 +1,5 @@ +ALTER TABLE branch_draft_content + ADD COLUMN publish boolean; + +UPDATE branch_draft_content + SET publish = true; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/90_split_favorites.down.sql b/qubership-apihub-service/resources/migrations/90_split_favorites.down.sql new file mode 100644 index 0000000..94cba0d --- /dev/null +++ b/qubership-apihub-service/resources/migrations/90_split_favorites.down.sql @@ -0,0 +1,19 @@ +ALTER TABLE favorite_projects DROP CONSTRAINT "PK_favorite_projects"; + +CREATE TABLE favorites +( + user_id varchar NOT NULL, + id varchar NOT NULL +); + +ALTER TABLE favorites ADD CONSTRAINT "PK_favorite_projects" + PRIMARY KEY (user_id, id); + +INSERT INTO favorites +(SELECT user_id, project_id from favorite_projects) ON CONFLICT DO NOTHING; + +INSERT INTO favorites +(SELECT user_id, package_id from favorite_packages) ON CONFLICT DO NOTHING; + +DROP TABLE favorite_projects; +DROP TABLE favorite_packages; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/90_split_favorites.up.sql b/qubership-apihub-service/resources/migrations/90_split_favorites.up.sql new file mode 100644 index 0000000..165243a --- /dev/null +++ b/qubership-apihub-service/resources/migrations/90_split_favorites.up.sql @@ -0,0 +1,51 @@ +ALTER TABLE favorites DROP CONSTRAINT "PK_favorite_projects"; + +delete from favorites f +where f.user_id not in (select ud.user_id from user_data ud); + +CREATE TABLE favorite_projects +( + user_id varchar NOT NULL, + project_id varchar NOT NULL +); + +ALTER TABLE favorite_projects ADD CONSTRAINT "PK_favorite_projects" + PRIMARY KEY (user_id, project_id); + +ALTER TABLE favorite_projects ADD CONSTRAINT "FK_favorite_projects_project" + FOREIGN KEY (project_id) REFERENCES project(id) ON DELETE CASCADE; + +ALTER TABLE favorite_projects ADD CONSTRAINT "FK_favorite_projects_user_data" + FOREIGN KEY (user_id) REFERENCES user_data(user_id) ON DELETE CASCADE; + + +CREATE TABLE favorite_packages +( + user_id varchar NOT NULL, + package_id varchar NOT NULL +); + +ALTER TABLE favorite_packages ADD CONSTRAINT "PK_favorite_packages" + PRIMARY KEY (user_id, package_id); + +ALTER TABLE favorite_packages ADD CONSTRAINT "FK_favorite_packages_package_group" + FOREIGN KEY (package_id) REFERENCES package_group(id) ON DELETE CASCADE; + +ALTER TABLE favorite_packages ADD CONSTRAINT "FK_favorite_packages_user_data" + FOREIGN KEY (user_id) REFERENCES user_data(user_id) ON DELETE CASCADE; + +INSERT INTO favorite_projects +( + select f.* from favorites f + inner join project as p + on p.id = f.id +) ON CONFLICT DO NOTHING; + +INSERT INTO favorite_packages +( + select f.* from favorites f + inner join package_group as p + on p.id = f.id +) ON CONFLICT DO NOTHING; + +DROP TABLE favorites; diff --git a/qubership-apihub-service/resources/migrations/91_add_package_id_fks.down.sql b/qubership-apihub-service/resources/migrations/91_add_package_id_fks.down.sql new file mode 100644 index 0000000..752ac3a --- /dev/null +++ b/qubership-apihub-service/resources/migrations/91_add_package_id_fks.down.sql @@ -0,0 +1,21 @@ +alter table build drop constraint if exists build_package_group_id_fk; + +alter table published_data drop constraint if exists published_data_package_group_id_fk; + +alter table published_sources_data drop constraint if exists published_sources_data_package_group_id_fk ; + +alter table shared_url_info drop constraint if exists shared_url_info_package_group_id_fk; + +alter table published_version_open_count drop constraint if exists published_version_open_count_package_group_id_fk; + +alter table activity_tracking drop constraint if exists activity_tracking_package_group_id_fk; + +alter table public.migrated_version drop constraint if exists migrated_version_package_group_id_fk; + +alter table published_document_open_count drop constraint if exists published_document_open_count_package_group_id_fk; + +alter table operation_comparison drop constraint if exists operation_comparison_package_group_id_fk; + +alter table ts_published_data_errors drop constraint if exists ts_published_data_errors_package_group_id_fk; + +alter table ts_published_data_custom_split drop constraint if exists ts_published_data_custom_split_package_group_id_fk; diff --git a/qubership-apihub-service/resources/migrations/91_add_package_id_fks.up.sql b/qubership-apihub-service/resources/migrations/91_add_package_id_fks.up.sql new file mode 100644 index 0000000..4722895 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/91_add_package_id_fks.up.sql @@ -0,0 +1,54 @@ +alter table build + add constraint build_package_group_id_fk + foreign key (package_id) references package_group + on update cascade on delete cascade; + +alter table published_data + add constraint published_data_package_group_id_fk + foreign key (package_id) references package_group + on update cascade on delete cascade; + +alter table published_sources_data + add constraint published_sources_data_package_group_id_fk + foreign key (package_id) references package_group + on update cascade on delete cascade; + +alter table shared_url_info + add constraint shared_url_info_package_group_id_fk + foreign key (package_id) references package_group + on update cascade on delete cascade; + +alter table published_version_open_count + add constraint published_version_open_count_package_group_id_fk + foreign key (package_id) references package_group + on update cascade on delete cascade; + +alter table activity_tracking + add constraint activity_tracking_package_group_id_fk + foreign key (package_id) references package_group + on update cascade on delete cascade; + +alter table migrated_version + add constraint migrated_version_package_group_id_fk + foreign key (package_id) references package_group + on update cascade on delete cascade; + +alter table published_document_open_count + add constraint published_document_open_count_package_group_id_fk + foreign key (package_id) references package_group + on update cascade on delete cascade; + +alter table operation_comparison + add constraint operation_comparison_package_group_id_fk + foreign key (package_id) references package_group + on update cascade on delete cascade; + +alter table ts_published_data_errors + add constraint ts_published_data_errors_package_group_id_fk + foreign key (package_id) references package_group + on update cascade on delete cascade; + +alter table ts_published_data_custom_split + add constraint ts_published_data_custom_split_package_group_id_fk + foreign key (package_id) references package_group + on update cascade on delete cascade; diff --git a/qubership-apihub-service/resources/migrations/92_excluded_refs.down.sql b/qubership-apihub-service/resources/migrations/92_excluded_refs.down.sql new file mode 100644 index 0000000..4bd742b --- /dev/null +++ b/qubership-apihub-service/resources/migrations/92_excluded_refs.down.sql @@ -0,0 +1 @@ +alter table published_version_reference drop column excluded; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/92_excluded_refs.up.sql b/qubership-apihub-service/resources/migrations/92_excluded_refs.up.sql new file mode 100644 index 0000000..ec9880c --- /dev/null +++ b/qubership-apihub-service/resources/migrations/92_excluded_refs.up.sql @@ -0,0 +1,22 @@ +alter table published_version_reference add column excluded boolean default false; + +update published_version_reference r set excluded = true +from ( + select + distinct on (package_id, version, revision, reference_id) + package_id, version, revision, reference_id, reference_version, reference_revision, parent_reference_id, parent_reference_version, parent_reference_revision + from published_version_reference + order by package_id, version, revision, reference_id, reference_version desc, reference_revision desc +) p +where r.package_id = p.package_id +and r.version = p.version +and r.revision = p.revision +and r.reference_id = p.reference_id + +and +(r.reference_version != p.reference_version +or r.reference_revision != p.reference_revision +or r.parent_reference_id != p.parent_reference_id +or r.parent_reference_version != p.parent_reference_version +or r.parent_reference_revision != p.parent_reference_revision) +; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/93_operation_comparison_fk.up.sql b/qubership-apihub-service/resources/migrations/93_operation_comparison_fk.up.sql new file mode 100644 index 0000000..647e5dc --- /dev/null +++ b/qubership-apihub-service/resources/migrations/93_operation_comparison_fk.up.sql @@ -0,0 +1 @@ +alter table operation_comparison drop constraint if exists operation_comparison_package_group_id_fk; diff --git a/qubership-apihub-service/resources/migrations/94_manual_operation_grouping.down.sql b/qubership-apihub-service/resources/migrations/94_manual_operation_grouping.down.sql new file mode 100644 index 0000000..17aa3fe --- /dev/null +++ b/qubership-apihub-service/resources/migrations/94_manual_operation_grouping.down.sql @@ -0,0 +1,17 @@ +drop table operation_group cascade; +drop table grouped_operation; + +create table operations_group +( + package_id varchar not null, + version varchar not null, + revision integer not null, + group_name varchar not null, + api_type varchar not null, + operation_ids varchar array null, + autogenerated boolean not null, + PRIMARY KEY(package_id, version, revision, group_name, api_type) +); + +ALTER TABLE operations_group ADD CONSTRAINT "FK_published_version" + FOREIGN KEY (package_id, version, revision) REFERENCES published_version (package_id, version, revision) ON DELETE Cascade ON UPDATE Cascade; diff --git a/qubership-apihub-service/resources/migrations/94_manual_operation_grouping.up.sql b/qubership-apihub-service/resources/migrations/94_manual_operation_grouping.up.sql new file mode 100644 index 0000000..05df7fb --- /dev/null +++ b/qubership-apihub-service/resources/migrations/94_manual_operation_grouping.up.sql @@ -0,0 +1,33 @@ +drop table operations_group; + +create table operation_group +( + package_id varchar not null, + version varchar not null, + revision integer not null, + api_type varchar not null, + group_name varchar not null, + autogenerated boolean not null, + group_id varchar not null, + description varchar null, + PRIMARY KEY(package_id, version, revision, api_type, group_name), + UNIQUE(group_id) +); + +create table grouped_operation +( + group_id varchar not null, + package_id varchar not null, + version varchar not null, + revision integer not null, + operation_id varchar not null +); + +ALTER TABLE grouped_operation ADD CONSTRAINT "FK_operation_group" + FOREIGN KEY (group_id) REFERENCES operation_group (group_id) ON DELETE Cascade ON UPDATE Cascade; + +ALTER TABLE grouped_operation ADD CONSTRAINT "FK_operation" + FOREIGN KEY (package_id, version, revision, operation_id) REFERENCES operation (package_id, version, revision, operation_id) ON DELETE Cascade ON UPDATE Cascade; + +ALTER TABLE operation_group ADD CONSTRAINT "FK_published_version" + FOREIGN KEY (package_id, version, revision) REFERENCES published_version (package_id, version, revision) ON DELETE Cascade ON UPDATE Cascade; diff --git a/qubership-apihub-service/resources/migrations/95_src_fix.down.sql b/qubership-apihub-service/resources/migrations/95_src_fix.down.sql new file mode 100644 index 0000000..af2e2ce --- /dev/null +++ b/qubership-apihub-service/resources/migrations/95_src_fix.down.sql @@ -0,0 +1,22 @@ +alter table published_sources DROP CONSTRAINT IF EXISTS published_sources_published_sources_archives_checksum_fk; + +alter table published_sources drop column archive_checksum; + +drop table published_sources_archives; + +delete from published_sources +where (package_id, checksum) in +( + select package_id, checksum from published_sources + except + select package_id, checksum from published_sources_data +); + +delete from published_sources where checksum is null; + +alter table published_sources + add constraint "FK_published_sources_data" + foreign key (checksum, package_id) references published_sources_data; + +alter table published_sources + alter column checksum set not null; diff --git a/qubership-apihub-service/resources/migrations/95_src_fix.up.sql b/qubership-apihub-service/resources/migrations/95_src_fix.up.sql new file mode 100644 index 0000000..c27f1b1 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/95_src_fix.up.sql @@ -0,0 +1,17 @@ +create table published_sources_archives +( + checksum varchar + constraint published_sources_archives_pk + primary key, + data bytea +); + +alter table published_sources + add archive_checksum varchar; + +alter table published_sources + drop constraint "FK_published_sources_data"; + +alter table published_sources + alter column checksum drop not null; + diff --git a/qubership-apihub-service/resources/migrations/96_latest_revision_func.down.sql b/qubership-apihub-service/resources/migrations/96_latest_revision_func.down.sql new file mode 100644 index 0000000..9ddaf79 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/96_latest_revision_func.down.sql @@ -0,0 +1 @@ +drop function if exists get_latest_revision \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/96_latest_revision_func.up.sql b/qubership-apihub-service/resources/migrations/96_latest_revision_func.up.sql new file mode 100644 index 0000000..f7818bb --- /dev/null +++ b/qubership-apihub-service/resources/migrations/96_latest_revision_func.up.sql @@ -0,0 +1,16 @@ +create or replace function get_latest_revision(package_id varchar, version varchar) + returns integer language plpgsql +as ' + declare + latest_revision integer; + begin + execute '' + select max(revision) + from published_version + where package_id = $1 and version = $2 and deleted_at is null;'' + into latest_revision + using package_id,version; + if latest_revision is null then return 0; + end if; + return latest_revision; + end;'; diff --git a/qubership-apihub-service/resources/migrations/97_activity_event_revision.down.sql b/qubership-apihub-service/resources/migrations/97_activity_event_revision.down.sql new file mode 100644 index 0000000..771ee91 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/97_activity_event_revision.down.sql @@ -0,0 +1,3 @@ +update activity_tracking set data = data::jsonb - 'revision' where e_type = 'publish_new_version'; +update activity_tracking set data = data::jsonb - 'revision' where e_type = 'patch_version_meta'; +update activity_tracking set data = data::jsonb - 'revision' where e_type = 'delete_version'; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/97_activity_event_revision.up.sql b/qubership-apihub-service/resources/migrations/97_activity_event_revision.up.sql new file mode 100644 index 0000000..45569f5 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/97_activity_event_revision.up.sql @@ -0,0 +1,7 @@ +update activity_tracking set data = jsonb_set(data, '{revision}',to_jsonb(1)) where e_type = 'publish_new_version'; + +update activity_tracking as at set data = jsonb_set(data, '{revision}',to_jsonb((select revision from published_version as pv where pv.version = at.data #>> '{version}' and pv.package_id = at.package_id order by revision desc limit 1))) +where e_type = 'patch_version_meta'; + +update activity_tracking as at set data = jsonb_set(data, '{revision}',to_jsonb((select revision from published_version as pv where pv.version = at.data #>> '{version}' and pv.package_id = at.package_id order by revision desc limit 1))) +where e_type = 'delete_version'; diff --git a/qubership-apihub-service/resources/migrations/98_deleted_grouped_operations.down.sql b/qubership-apihub-service/resources/migrations/98_deleted_grouped_operations.down.sql new file mode 100644 index 0000000..1ca1031 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/98_deleted_grouped_operations.down.sql @@ -0,0 +1,2 @@ +delete from grouped_operation where deleted = true; +alter table grouped_operation drop column deleted; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/98_deleted_grouped_operations.up.sql b/qubership-apihub-service/resources/migrations/98_deleted_grouped_operations.up.sql new file mode 100644 index 0000000..93f0072 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/98_deleted_grouped_operations.up.sql @@ -0,0 +1 @@ +alter table grouped_operation add column deleted boolean default false; diff --git a/qubership-apihub-service/resources/migrations/99_gql_search_scopes.down.sql b/qubership-apihub-service/resources/migrations/99_gql_search_scopes.down.sql new file mode 100644 index 0000000..f5bf239 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/99_gql_search_scopes.down.sql @@ -0,0 +1,14 @@ +drop index ts_graphql_operation_data_idx; +drop table ts_graphql_operation_data; + +alter table ts_rest_operation_data add column scope_all tsvector; + +drop index ts_operation_data_idx; + +update ts_rest_operation_data r_od set scope_all = (select scope_all from ts_operation_data od where od.data_hash = r_od.data_hash); + +drop table ts_operation_data; + +alter table ts_rest_operation_data rename constraint pk_ts_rest_operation_data to pk_ts_operation_data; +alter index ts_rest_operation_data_idx rename to ts_operation_data_idx; +alter table ts_rest_operation_data rename to ts_operation_data; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/99_gql_search_scopes.up.sql b/qubership-apihub-service/resources/migrations/99_gql_search_scopes.up.sql new file mode 100644 index 0000000..1367d02 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/99_gql_search_scopes.up.sql @@ -0,0 +1,32 @@ +alter table ts_operation_data rename to ts_rest_operation_data; +alter index ts_operation_data_idx rename to ts_rest_operation_data_idx; +alter table ts_rest_operation_data rename constraint pk_ts_operation_data to pk_ts_rest_operation_data; + +CREATE TABLE ts_operation_data( + data_hash varchar NOT NULL, + scope_all tsvector, + CONSTRAINT pk_ts_operation_data PRIMARY KEY(data_hash) +); + +insert into ts_operation_data +select data_hash, scope_all from ts_rest_operation_data where scope_all is not null; + +CREATE INDEX ts_operation_data_idx +ON ts_operation_data +USING gin(scope_all) +with (fastupdate = true); + +alter table ts_rest_operation_data drop column scope_all; + +CREATE TABLE ts_graphql_operation_data( + data_hash varchar NOT NULL, + scope_argument tsvector, + scope_property tsvector, + scope_annotation tsvector, + CONSTRAINT pk_ts_graphql_operation_data PRIMARY KEY(data_hash) +); + +CREATE INDEX ts_graphql_operation_data_idx +ON ts_graphql_operation_data +USING gin(scope_argument, scope_property, scope_annotation) +with (fastupdate = true); diff --git a/qubership-apihub-service/resources/migrations/9_published_metadata.down.sql b/qubership-apihub-service/resources/migrations/9_published_metadata.down.sql new file mode 100644 index 0000000..8358ec7 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/9_published_metadata.down.sql @@ -0,0 +1,25 @@ +ALTER TABLE published_version + ADD COLUMN branch_name varchar; + +update published_version t +set branch_name = jsonb_extract_path_text(s.metadata, 'branch_name') +from published_version s +where t.project_id = s.project_id + and t.version = s.version + and t.revision = s.revision; + +ALTER TABLE published_version + DROP COLUMN metadata; + +ALTER TABLE published_data + ADD COLUMN commit_id varchar, + ADD COLUMN commit_date timestamp without time zone; + +update published_data t +set commit_id = jsonb_extract_path_text(s.metadata, 'commit_id') +from published_data s +where t.project_id = s.project_id + and t.checksum = s.checksum; + +ALTER TABLE published_data + DROP COLUMN metadata; \ No newline at end of file diff --git a/qubership-apihub-service/resources/migrations/9_published_metadata.up.sql b/qubership-apihub-service/resources/migrations/9_published_metadata.up.sql new file mode 100644 index 0000000..aab2e27 --- /dev/null +++ b/qubership-apihub-service/resources/migrations/9_published_metadata.up.sql @@ -0,0 +1,26 @@ +ALTER TABLE published_version + ADD COLUMN metadata jsonb; + +update published_version t +set metadata = jsonb_build_object('branch_name', s.branch_name) +from published_version s +where t.project_id = s.project_id + and t.version = s.version + and t.revision = s.revision; + +ALTER TABLE published_version + DROP COLUMN branch_name; + + +ALTER TABLE published_data + ADD COLUMN metadata jsonb; + +update published_data t +set metadata = jsonb_build_object('commit_id', s.commit_id, 'commit_date', to_char(s.commit_date,'DD-MM-YYYY hh24:mi:ss')) +from published_data s +where t.project_id = s.project_id + and t.checksum = s.checksum; + +ALTER TABLE published_data + DROP COLUMN commit_id, + DROP COLUMN commit_date; diff --git a/qubership-apihub-service/security/ApihubApiKeyStrategy.go b/qubership-apihub-service/security/ApihubApiKeyStrategy.go new file mode 100644 index 0000000..eb216d3 --- /dev/null +++ b/qubership-apihub-service/security/ApihubApiKeyStrategy.go @@ -0,0 +1,62 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package security + +import ( + goctx "context" + "fmt" + "net/http" + + "github.com/gorilla/mux" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" + "github.com/shaj13/go-guardian/v2/auth" +) + +func NewApihubApiKeyStrategy(apihubApiKeyService service.ApihubApiKeyService) auth.Strategy { + return &apihubApiKeyStrategyImpl{apihubApiKeyService: apihubApiKeyService} +} + +type apihubApiKeyStrategyImpl struct { + apihubApiKeyService service.ApihubApiKeyService +} + +func (a apihubApiKeyStrategyImpl) Authenticate(ctx goctx.Context, r *http.Request) (auth.Info, error) { + apiKey := r.Header.Get("api-key") + if apiKey == "" { + return nil, fmt.Errorf("authentication failed: %v is empty", "api-key") + } + packageId := getStringParam(r, "packageId") + apiKeyRevoked, apiKeyView, err := a.apihubApiKeyService.GetApiKeyStatus(apiKey, packageId) + if err != nil { + return nil, err + } + if apiKeyView == nil { + return nil, fmt.Errorf("authentication failed: %v is not valid", "api-key") + } + if apiKeyRevoked { + return nil, fmt.Errorf("authentication failed: %v has been revoked", "api-key") + } + userExtensions := auth.Extensions{} + userExtensions.Set(context.ApikeyPackageIdExt, apiKeyView.PackageId) + userExtensions.Set(context.ApikeyRoleExt, context.MergeApikeyRoles(apiKeyView.Roles)) + return auth.NewDefaultUser(apiKeyView.Name, apiKeyView.Id, []string{}, userExtensions), nil +} + +func getStringParam(r *http.Request, p string) string { + params := mux.Vars(r) + return params[p] +} diff --git a/qubership-apihub-service/security/Auth.go b/qubership-apihub-service/security/Auth.go new file mode 100644 index 0000000..9737e3d --- /dev/null +++ b/qubership-apihub-service/security/Auth.go @@ -0,0 +1,175 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package security + +import ( + "crypto/rsa" + "crypto/x509" + "encoding/json" + "encoding/pem" + "fmt" + "net/http" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/controller" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + "github.com/shaj13/go-guardian/v2/auth" + "github.com/shaj13/go-guardian/v2/auth/strategies/jwt" + "github.com/shaj13/go-guardian/v2/auth/strategies/token" + "github.com/shaj13/go-guardian/v2/auth/strategies/union" + "github.com/shaj13/libcache" + _ "github.com/shaj13/libcache/fifo" + _ "github.com/shaj13/libcache/lru" + + "time" +) + +var apihubApiKeyStrategy auth.Strategy +var jwtStrategy auth.Strategy +var strategy union.Union +var keeper jwt.SecretsKeeper +var integrationService service.IntegrationsService +var userService service.UserService +var roleService service.RoleService +var systemInfoService service.SystemInfoService + +var customJwtStrategy auth.Strategy + +const CustomJwtAuthHeader = "X-Apihub-Authorization" + +var publicKey []byte + +const gitIntegrationExt = "gitIntegration" + +func SetupGoGuardian(intService service.IntegrationsService, userServiceLocal service.UserService, roleServiceLocal service.RoleService, apiKeyService service.ApihubApiKeyService, systemService service.SystemInfoService) error { + integrationService = intService + userService = userServiceLocal + roleService = roleServiceLocal + apihubApiKeyStrategy = NewApihubApiKeyStrategy(apiKeyService) + systemInfoService = systemService + + block, _ := pem.Decode(systemInfoService.GetJwtPrivateKey()) + pkcs8PrivateKey, err := x509.ParsePKCS8PrivateKey(block.Bytes) + if err != nil { + return fmt.Errorf("can't parse pkcs1 private key. Error - %s", err.Error()) + } + privateKey, ok := pkcs8PrivateKey.(*rsa.PrivateKey) + if !ok { + return fmt.Errorf("can't parse pkcs8 private key to rsa.PrivateKey. Error - %s", err.Error()) + } + publicKey = x509.MarshalPKCS1PublicKey(&privateKey.PublicKey) + + keeper = jwt.StaticSecret{ + ID: "secret-id", + Secret: privateKey, + Algorithm: jwt.RS256, + } + + cache := libcache.LRU.New(1000) + cache.SetTTL(time.Minute * 60) + cache.RegisterOnExpired(func(key, _ interface{}) { + cache.Delete(key) + }) + jwtStrategy = jwt.New(cache, keeper) + strategy = union.New(jwtStrategy, apihubApiKeyStrategy) + customJwtStrategy = jwt.New(cache, keeper, token.SetParser(token.XHeaderParser(CustomJwtAuthHeader))) + return nil +} + +type UserView struct { + AccessToken string `json:"token"` + RenewToken string `json:"renewToken"` + User view.User `json:"user"` +} + +func CreateLocalUserToken(w http.ResponseWriter, r *http.Request) { + email, password, ok := r.BasicAuth() + if !ok { + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusUnauthorized, + Message: http.StatusText(http.StatusUnauthorized), + }) + return + } + user, err := userService.AuthenticateUser(email, password) + if err != nil { + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusUnauthorized, + Message: http.StatusText(http.StatusUnauthorized), + Debug: err.Error(), + }) + return + } + userView, err := CreateTokenForUser(*user) + if err != nil { + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusUnauthorized, + Message: http.StatusText(http.StatusUnauthorized), + Debug: err.Error(), + }) + return + } + + response, _ := json.Marshal(userView) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + w.Write(response) +} + +func CreateTokenForUser(dbUser view.User) (*UserView, error) { + user := auth.NewUserInfo(dbUser.Name, dbUser.Id, []string{}, auth.Extensions{}) + accessDuration := jwt.SetExpDuration(time.Hour * 12) // should be more than one minute! + + status, err := integrationService.GetUserApiKeyStatus(view.GitlabIntegration, dbUser.Id) + if err != nil { + return nil, fmt.Errorf("failed to check gitlab integration status: %v", err) + } + extensions := user.GetExtensions() + + gitIntegrationExtensionValue := "false" + if status.Status == service.ApiKeyStatusPresent { + gitIntegrationExtensionValue = "true" + } + systemRole, err := roleService.GetUserSystemRole(user.GetID()) + if err != nil { + return nil, fmt.Errorf("failed to check user system role: %v", err.Error()) + } + if systemRole != "" { + extensions.Set(context.SystemRoleExt, systemRole) + } + extensions.Set(gitIntegrationExt, gitIntegrationExtensionValue) + user.SetExtensions(extensions) + + token, err := jwt.IssueAccessToken(user, keeper, accessDuration) + + if err != nil { + return nil, err + } + + renewDuration := jwt.SetExpDuration(time.Hour * 24 * 30) // approximately one month + renewToken, err := jwt.IssueAccessToken(user, keeper, renewDuration) + if err != nil { + return nil, err + } + + userView := UserView{AccessToken: token, RenewToken: renewToken, User: dbUser} + return &userView, nil +} + +func GetPublicKey() []byte { + return publicKey +} diff --git a/qubership-apihub-service/security/Middleware.go b/qubership-apihub-service/security/Middleware.go new file mode 100644 index 0000000..1e7710f --- /dev/null +++ b/qubership-apihub-service/security/Middleware.go @@ -0,0 +1,214 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package security + +import ( + "fmt" + "net/http" + "runtime/debug" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/controller" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/shaj13/go-guardian/v2/auth" + "github.com/shaj13/go-guardian/v2/auth/strategies/union" + log "github.com/sirupsen/logrus" +) + +func Secure(next http.HandlerFunc) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + defer func() { + if err := recover(); err != nil { + log.Errorf("Request failed with panic: %v", err) + log.Tracef("Stacktrace: %v", string(debug.Stack())) + debug.PrintStack() + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: http.StatusText(http.StatusInternalServerError), + Debug: fmt.Sprintf("%v", err), + }) + return + } + }() + _, user, err := strategy.AuthenticateRequest(r) + if err != nil { + if multiError, ok := err.(union.MultiError); ok { + for _, e := range multiError { + if customError, ok := e.(*exception.CustomError); ok { + if customError.Status == http.StatusForbidden { + log.Debugf("Authorization failed(403): %+v", err) + controller.RespondWithCustomError(w, customError) + return + } + } + } + } + log.Debugf("Authorization failed(401): %+v", err) + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusUnauthorized, + Message: http.StatusText(http.StatusUnauthorized), + Debug: fmt.Sprintf("%v", err), + }) + return + } + + r = auth.RequestWithUser(user, r) + next.ServeHTTP(w, r) + } +} + +func SecureJWT(next http.HandlerFunc) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + defer func() { + if err := recover(); err != nil { + log.Errorf("Request failed with panic: %v", err) + log.Tracef("Stacktrace: %v", string(debug.Stack())) + debug.PrintStack() + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: http.StatusText(http.StatusInternalServerError), + Debug: fmt.Sprintf("%v", err), + }) + return + } + }() + user, err := jwtStrategy.Authenticate(r.Context(), r) + if err != nil { + log.Debugf("Authorization failed(401): %+v", err) + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusUnauthorized, + Message: http.StatusText(http.StatusUnauthorized), + Debug: fmt.Sprintf("%v", err), + }) + return + } + + r = auth.RequestWithUser(user, r) + next.ServeHTTP(w, r) + } +} + +func SecureWebsocket(next http.HandlerFunc) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + defer func() { + if err := recover(); err != nil { + log.Errorf("Request failed with panic: %v", err) + log.Tracef("Stacktrace: %v", string(debug.Stack())) + debug.PrintStack() + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: http.StatusText(http.StatusInternalServerError), + Debug: fmt.Sprintf("%v", err), + }) + return + } + }() + token := r.URL.Query().Get("token") + r.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token)) + _, user, err := strategy.AuthenticateRequest(r) + if err != nil { + log.Errorf("Authorization failed(401): %+v", err) + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusUnauthorized, + Message: http.StatusText(http.StatusUnauthorized), + Debug: fmt.Sprintf("%v", err), + }) + return + } + //log.Debugf("User %s Authenticated", user.GetUserName()) + + r = auth.RequestWithUser(user, r) + next.ServeHTTP(w, r) + } +} + +func NoSecure(next http.HandlerFunc) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + defer func() { + if err := recover(); err != nil { + log.Errorf("Request failed with panic: %v", err) + log.Tracef("Stacktrace: %v", string(debug.Stack())) + debug.PrintStack() + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: http.StatusText(http.StatusInternalServerError), + Debug: fmt.Sprintf("%v", err), + }) + return + } + }() + next.ServeHTTP(w, r) + } +} + +func SecureAgentProxy(next http.HandlerFunc) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + defer func() { + if err := recover(); err != nil { + log.Errorf("Request failed with panic: %v", err) + log.Tracef("Stacktrace: %v", string(debug.Stack())) + debug.PrintStack() + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: http.StatusText(http.StatusInternalServerError), + Debug: fmt.Sprintf("%v", err), + }) + return + } + }() + user, err := customJwtStrategy.Authenticate(r.Context(), r) + if err != nil { + log.Debugf("Authorization failed(401): %+v", err) + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusUnauthorized, + Message: http.StatusText(http.StatusUnauthorized), + Debug: fmt.Sprintf("%v", err), + }) + return + } + r = auth.RequestWithUser(user, r) + next.ServeHTTP(w, r) + } +} + +func SecureProxy(next http.HandlerFunc) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + defer func() { + if err := recover(); err != nil { + log.Errorf("Request failed with panic: %v", err) + log.Tracef("Stacktrace: %v", string(debug.Stack())) + debug.PrintStack() + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: http.StatusText(http.StatusInternalServerError), + Debug: fmt.Sprintf("%v", err), + }) + return + } + }() + user, err := customJwtStrategy.Authenticate(r.Context(), r) + if err != nil { + log.Debugf("Authorization failed(401): %+v", err) + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusUnauthorized, + Message: http.StatusText(http.StatusUnauthorized), + Debug: fmt.Sprintf("%v", err), + }) + return + } + r = auth.RequestWithUser(user, r) + r.Header.Del(CustomJwtAuthHeader) + next.ServeHTTP(w, r) + } +} diff --git a/qubership-apihub-service/security/Oauth2Controller.go b/qubership-apihub-service/security/Oauth2Controller.go new file mode 100644 index 0000000..d1dca94 --- /dev/null +++ b/qubership-apihub-service/security/Oauth2Controller.go @@ -0,0 +1,245 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package security + +import ( + "crypto/tls" + "encoding/base64" + "encoding/json" + "fmt" + "net/http" + "time" + "net/url" + "strings" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/controller" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + "github.com/go-resty/resty/v2" + log "github.com/sirupsen/logrus" +) + +type Oauth20Controller interface { + GitlabOauthCallback(w http.ResponseWriter, r *http.Request) + StartOauthProcessWithGitlab(w http.ResponseWriter, r *http.Request) +} + +func NewOauth20Controller(integrationService service.IntegrationsService, userService service.UserService, systemInfoService service.SystemInfoService) Oauth20Controller { + return &oauth20ControllerImpl{ + integrationService: integrationService, + userService: userService, + systemInfoService: systemInfoService, + clientId: systemInfoService.GetClientID(), + clientSecret: systemInfoService.GetClientSecret(), + gitlabUrl: systemInfoService.GetGitlabUrl(), + } +} + +type oauth20ControllerImpl struct { + integrationService service.IntegrationsService + userService service.UserService + systemInfoService service.SystemInfoService + clientId string + clientSecret string + gitlabUrl string +} + +type GitlabUserInfo struct { + Username string `json:"username"` + Email string `json:"email"` + Name string `json:"name"` +} + +const gitlabOauthTokenUri string = "/oauth/token" + +const gitlabOauthAuthorize string = "/oauth/authorize" +const gitlabUserUri string = "/api/v4/user" + +func (o oauth20ControllerImpl) GitlabOauthCallback(w http.ResponseWriter, r *http.Request) { + code := r.FormValue("code") + if code == "" { + log.Error("Gitlab access code is empty") + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Access code from gitlab is empty", + }) + return + } + redirectUri := r.URL.Query().Get("redirectUri") + if redirectUri == "" { + redirectUri = "/" + } else { + url, _ := url.Parse(redirectUri) + var validHost bool + for _, host := range o.systemInfoService.GetAllowedHosts() { + if strings.Contains(url.Host, host) { + validHost = true + break + } + } + if !validHost { + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.HostNotAllowed, + Message: exception.HostNotAllowedMsg, + Params: map[string]interface{}{"host": redirectUri}, + }) + return + } + } + + req := makeRequest() + authRedirectUri := fmt.Sprintf("%s%s", o.systemInfoService.GetAPIHubUrl(), "/login/ncgitlab/callback?redirectUri="+redirectUri) + //todo move query parameters to body with Content-Type: application/x-www-form-urlencoded https://www.rfc-editor.org/rfc/rfc6749#section-4.1.3 + url := fmt.Sprintf("%s%s?client_id=%s&client_secret=%s&code=%s&grant_type=authorization_code&redirect_uri=%s", + o.gitlabUrl, gitlabOauthTokenUri, o.clientId, o.clientSecret, code, authRedirectUri) + + resp, err := req.Post(url) + if resp.StatusCode() == http.StatusNotFound { + log.Error("Couldn't call gitlab Oauth2.0 rest url") + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusNotFound, + Message: "Couldn't call gitlab Oauth2.0 rest url", + Debug: err.Error()}) + return + } + if err != nil || resp.StatusCode() != http.StatusOK { + log.Errorf("Failed to get access token from gitlab: status code %d %v", resp.StatusCode(), err) + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: resp.StatusCode(), + Message: "Failed to get access token from gitlab", + Debug: err.Error()}) + return + } + + var gitlabOauthAccessResponse view.OAuthAccessResponse + if err := json.Unmarshal(resp.Body(), &gitlabOauthAccessResponse); err != nil { + log.Errorf("Couldn't parse JSON response from gitlab Oauth: %v", err) + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Couldn't parse JSON response from gitlab Oauth", + Debug: err.Error()}) + return + } + expiresIn := view.GetTokenExpirationDate(gitlabOauthAccessResponse.ExpiresIn) + + accessToken := gitlabOauthAccessResponse.AccessToken + gitlabUser, err := getUserByToken(o.gitlabUrl, accessToken) + + if err != nil { + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Couldn't get username from gitlab", + Debug: err.Error()}) + return + } + user, err := o.userService.GetOrCreateUserForIntegration(view.User{Id: gitlabUser.Username, Email: gitlabUser.Email, Name: gitlabUser.Name}, view.ExternalGitlabIntegration) + if err != nil { + controller.RespondWithError(w, "Failed to login via gitlab", err) + return + } + err = o.integrationService.SetOauthGitlabTokenForUser(view.GitlabIntegration, user.Id, accessToken, gitlabOauthAccessResponse.RefreshToken, expiresIn, authRedirectUri) + if err != nil { + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "failed to set oauth token for user - $user", + Debug: err.Error(), + Params: map[string]interface{}{"user": user.Id}, + }) + return + } + + userView, err := CreateTokenForUser(*user) + if err != nil { + log.Errorf("Create token for saml process has error -%s", err.Error()) + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Create token for saml process has error - $error", + Params: map[string]interface{}{"error": err.Error()}, + }) + return + } + + response, _ := json.Marshal(userView) + cookieValue := base64.StdEncoding.EncodeToString(response) + + http.SetCookie(w, &http.Cookie{ + Name: "userView", + Value: cookieValue, + MaxAge: int((time.Hour * 12).Seconds()), + Secure: true, + HttpOnly: false, + Path: "/", + }) + http.Redirect(w, r, redirectUri, http.StatusFound) +} + +func (o oauth20ControllerImpl) StartOauthProcessWithGitlab(w http.ResponseWriter, r *http.Request) { + redirectUri := r.URL.Query().Get("redirectUri") + if redirectUri == "" { + redirectUri = "/" + } else { + url, _ := url.Parse(redirectUri) + var validHost bool + for _, host := range o.systemInfoService.GetAllowedHosts() { + if strings.Contains(url.Host, host) { + validHost = true + break + } + } + if !validHost { + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.HostNotAllowed, + Message: exception.HostNotAllowedMsg, + Params: map[string]interface{}{"host": redirectUri}, + }) + return + } + } + + fullRedirectUrl := fmt.Sprintf("%s%s?redirectUri=%s", o.systemInfoService.GetAPIHubUrl(), "/login/ncgitlab/callback", redirectUri) + http.Redirect(w, r, fmt.Sprintf("%s%s?client_id=%s&response_type=code&redirect_uri=%s", o.gitlabUrl, gitlabOauthAuthorize, o.clientId, fullRedirectUrl), http.StatusFound) +} + +func getUserByToken(gitlabUrl string, oauthToken string) (*GitlabUserInfo, error) { + req := makeRequest() + resp, err := req.Get(fmt.Sprintf("%s%s?access_token=%s", gitlabUrl, gitlabUserUri, oauthToken)) + if err != nil { + return nil, err + } + if resp.StatusCode() == http.StatusNotFound { + return nil, fmt.Errorf("couldn't call gitlab Oauth2.0 rest url - %d", resp.StatusCode()) + } + if err != nil || resp.StatusCode() != http.StatusOK { + return nil, fmt.Errorf("failed to get access token from gitlab: status code %d %v", resp.StatusCode(), err) + } + var gitlabUserInfo GitlabUserInfo + if err := json.Unmarshal(resp.Body(), &gitlabUserInfo); err != nil { + return nil, fmt.Errorf("couldn't parse JSON response from gitlab user: %s", err.Error()) + + } + return &gitlabUserInfo, nil +} + +func makeRequest() *resty.Request { + tr := http.Transport{TLSClientConfig: &tls.Config{InsecureSkipVerify: true}} + cl := http.Client{Transport: &tr, Timeout: time.Second * 60} + client := resty.NewWithClient(&cl) + req := client.R() + req.SetHeader("accept", "application/json") + return req +} diff --git a/qubership-apihub-service/security/SamlAuthController.go b/qubership-apihub-service/security/SamlAuthController.go new file mode 100644 index 0000000..cfec207 --- /dev/null +++ b/qubership-apihub-service/security/SamlAuthController.go @@ -0,0 +1,461 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package security + +import ( + "context" + "crypto/rsa" + "crypto/tls" + "crypto/x509" + "encoding/base64" + "encoding/json" + "errors" + "fmt" + "net/http" + "net/url" + "os" + "strings" + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/controller" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service" + "github.com/crewjam/saml" + "github.com/crewjam/saml/samlsp" + dsig "github.com/russellhaering/goxmldsig" + log "github.com/sirupsen/logrus" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type SamlAuthController interface { + AssertionConsumerHandler(w http.ResponseWriter, r *http.Request) + StartSamlAuthentication(w http.ResponseWriter, r *http.Request) + ServeMetadata(w http.ResponseWriter, r *http.Request) + GetSystemSSOInfo(w http.ResponseWriter, r *http.Request) +} + +func NewSamlAuthController(userService service.UserService, systemInfoService service.SystemInfoService) SamlAuthController { + return &authenticationControllerImpl{ + samlInstance: createSamlInstance(systemInfoService), + userService: userService, + systemInfoService: systemInfoService, + } +} + +type SamlInstance struct { + saml *samlsp.Middleware + error error +} +type authenticationControllerImpl struct { + samlInstance SamlInstance + userService service.UserService + systemInfoService service.SystemInfoService +} + +const samlAttributeEmail string = "http://schemas.xmlsoap.org/ws/2005/05/identity/claims/emailaddress" +const samlAttributeName string = "http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname" +const samlAttributeSurname string = "http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname" +const samlAttributeUserAvatar string = "thumbnailPhoto" +const samlAttributeUserId string = "User-Principal-Name" + +func (a *authenticationControllerImpl) ServeMetadata(w http.ResponseWriter, r *http.Request) { + if a.samlInstance.error != nil { + log.Errorf("Cannot serveMetadata with nil samlInstanse") + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Code: exception.SamlInstanceIsNull, + Message: exception.SamlInstanceIsNullMsg, + Params: map[string]interface{}{"error": a.samlInstance.error.Error()}, + }) + return + } + a.samlInstance.saml.ServeMetadata(w, r) +} + +// StartSamlAuthentication Frontend calls this endpoint to SSO login user via SAML +func (a *authenticationControllerImpl) StartSamlAuthentication(w http.ResponseWriter, r *http.Request) { + if a.samlInstance.error != nil { + log.Errorf("Cannot StartSamlAuthentication with nil samlInstance") + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Code: exception.SamlInstanceIsNull, + Message: exception.SamlInstanceIsNullMsg, + Params: map[string]interface{}{"error": a.samlInstance.error.Error()}, + }) + return + } + redirectUrlStr := r.URL.Query().Get("redirectUri") + + log.Debugf("redirect url - %s", redirectUrlStr) + + redirectUrl, err := url.Parse(redirectUrlStr) + if err != nil { + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectRedirectUrlError, + Message: exception.IncorrectRedirectUrlErrorMsg, + Params: map[string]interface{}{"error": err.Error()}, + }) + return + } + var validHost bool + for _, host := range a.systemInfoService.GetAllowedHosts() { + if strings.Contains(redirectUrl.Host, host) { + validHost = true + break + } + } + if !validHost { + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.HostNotAllowed, + Message: exception.HostNotAllowedMsg, + Params: map[string]interface{}{"host": redirectUrlStr}, + }) + return + } + + //Current URL is something like /api/v2/auth/saml, and it's a dedicated login endpoint. + //Frontend detects missing/bad/expired token by 401 response and goes to the endpoint itself with redirectUri as a parameter. + //But saml library is using middleware logic, i.e. it expects that client is trying to call some business endpoint and checks the security. + //SAML library stores original URL and after successful auth redirects to it. + //This is a different flow that we have. Changing r.URL to redirectUrl allows us to adapt to library's middleware flow, it will redirect to expected endpoint automatically. + r.URL = redirectUrl + + // Note that we do not use built-in session mechanism from saml lib except request tracking cookie + a.samlInstance.saml.HandleStartAuthFlow(w, r) +} + +// AssertionConsumerHandler This endpoint is called by ADFS when auth procedure is complete on it's side. ADFS posts the response here. +func (a *authenticationControllerImpl) AssertionConsumerHandler(w http.ResponseWriter, r *http.Request) { + if a.samlInstance.error != nil { + log.Errorf("Cannot run AssertionConsumerHandler with nill samlInstanse") + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Code: exception.SamlInstanceIsNull, + Message: exception.SamlInstanceIsNullMsg, + Params: map[string]interface{}{"error": a.samlInstance.error.Error()}, + }) + return + } + + if err := r.ParseForm(); err != nil { + http.Error(w, fmt.Sprintf("Failed to parse ACS form: %s", err), http.StatusBadRequest) + return + } + possibleRequestIDs := []string{} + if a.samlInstance.saml.ServiceProvider.AllowIDPInitiated { + possibleRequestIDs = append(possibleRequestIDs, "") + } + trackedRequests := a.samlInstance.saml.RequestTracker.GetTrackedRequests(r) + for _, tr := range trackedRequests { + possibleRequestIDs = append(possibleRequestIDs, tr.SAMLRequestID) + } + assertion, err := a.samlInstance.saml.ServiceProvider.ParseResponse(r, possibleRequestIDs) + if err != nil { + log.Errorf("Parsing SAML response process error: %s", err.Error()) + var ire *saml.InvalidResponseError + if errors.As(err, &ire) { + log.Errorf("Parsing SAML response process private error: %s", ire.PrivateErr.Error()) + log.Debugf("ACS response data: %s", ire.Response) + } + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Code: exception.SamlResponseHasParsingError, + Message: exception.SamlResponseHasParsingErrorMsg, + Params: map[string]interface{}{"error": err.Error()}, + }) + return + } + if assertion == nil { + log.Errorf("Assertion from SAML response is nil") + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusInternalServerError, + Code: exception.AssertionIsNull, + Message: exception.AssertionIsNullMsg, + }) + return + } + + // Add Apihub auth info cookie + a.setUserViewCookie(w, assertion) + + // Extract original redirect URI from request tracking cookie + redirectURI := "/" + if trackedRequestIndex := r.Form.Get("RelayState"); trackedRequestIndex != "" { + log.Debugf("trackedRequestIndex = %s", trackedRequestIndex) + trackedRequest, err := a.samlInstance.saml.RequestTracker.GetTrackedRequest(r, trackedRequestIndex) + if err != nil { + if errors.Is(err, http.ErrNoCookie) && a.samlInstance.saml.ServiceProvider.AllowIDPInitiated { + if uri := r.Form.Get("RelayState"); uri != "" { + redirectURI = uri + log.Debugf("redirectURI is found in RelayState and updated to %s", redirectURI) + } + } + controller.RespondWithError(w, "Unable to retrieve redirect URL: failed to get tracked request", err) + return + } else { + err = a.samlInstance.saml.RequestTracker.StopTrackingRequest(w, r, trackedRequestIndex) + if err != nil { + log.Warnf("Failed to stop tracking request: %s", err) + // but it's not a showstopper, so continue processing + } + redirectURI = trackedRequest.URI + log.Debugf("redirectURI is found in trackedRequest and updated to %s", redirectURI) + } + } + + http.Redirect(w, r, redirectURI, http.StatusFound) +} + +func (a *authenticationControllerImpl) setUserViewCookie(w http.ResponseWriter, assertion *saml.Assertion) { + assertionAttributes := getAssertionAttributes(assertion) + + userView, err := a.getOrCreateUser(assertionAttributes) + if err != nil { + controller.RespondWithError(w, "Failed to get or create SSO user", err) + return + } + + response, _ := json.Marshal(userView) + cookieValue := base64.StdEncoding.EncodeToString(response) + + http.SetCookie(w, &http.Cookie{ + Name: "userView", + Value: cookieValue, + MaxAge: int((time.Hour * 12).Seconds()), + Secure: true, + HttpOnly: false, + Path: "/", + }) + log.Debugf("Auth user result object: %+v", userView) +} + +func getAssertionAttributes(assertion *saml.Assertion) map[string][]string { + assertionAttributes := make(map[string][]string) + for _, attributeStatement := range assertion.AttributeStatements { + for _, attr := range attributeStatement.Attributes { + claimName := attr.FriendlyName + if claimName == "" { + claimName = attr.Name + } + for _, value := range attr.Values { + assertionAttributes[claimName] = append(assertionAttributes[claimName], value.Value) + } + } + } + return assertionAttributes +} + +func (a *authenticationControllerImpl) getOrCreateUser(assertionAttributes map[string][]string) (*UserView, error) { + samlUser := view.User{} + if len(assertionAttributes[samlAttributeUserId]) != 0 { + userLogin := assertionAttributes[samlAttributeUserId][0] + if strings.Contains(userLogin, "@") { + samlUser.Id = strings.Split(assertionAttributes[samlAttributeUserId][0], "@")[0] + } else { + samlUser.Id = userLogin + } + log.Debugf("Attributes from saml response for user %s - %v", samlUser.Id, assertionAttributes) + } else { + log.Error("UserId is empty in saml response") + log.Errorf("Attributes from saml response - %v", assertionAttributes) + return nil, &exception.CustomError{ + Status: http.StatusInternalServerError, + Code: exception.SamlResponseHaveNoUserId, + Message: exception.SamlResponseHaveNoUserIdMsg, + } + } + + if len(assertionAttributes[samlAttributeName]) != 0 { + samlUser.Name = assertionAttributes[samlAttributeName][0] + } + if len(assertionAttributes[samlAttributeSurname]) != 0 { + samlUser.Name = fmt.Sprintf("%s %s", samlUser.Name, assertionAttributes[samlAttributeSurname][0]) + } + if len(assertionAttributes[samlAttributeEmail]) != 0 { + samlUser.Email = assertionAttributes[samlAttributeEmail][0] + } else { + log.Error("Email is empty in saml response") + log.Errorf("Attributes from saml response - %v", assertionAttributes) + return nil, &exception.CustomError{ + Status: http.StatusInternalServerError, + Code: exception.SamlResponseMissingEmail, + Message: exception.SamlResponseMissingEmailMsg, + } + } + + if len(assertionAttributes[samlAttributeUserAvatar]) != 0 { + samlUser.AvatarUrl = fmt.Sprintf("/api/v2/users/%s/profile/avatar", samlUser.Id) + avatar := assertionAttributes[samlAttributeUserAvatar][0] + + decodedAvatar, err := base64.StdEncoding.DecodeString(avatar) + if err != nil { + log.Errorf("Failed to decode user avatar during SSO user login: %s", err) + return nil, &exception.CustomError{ + Status: http.StatusInternalServerError, + Code: exception.SamlResponseHasBrokenContent, + Message: exception.SamlResponseHasBrokenContentMsg, + Params: map[string]interface{}{"userId": samlUser.Id, "error": err.Error()}, + Debug: "Failed to decode user avatar", + } + } + err = a.userService.StoreUserAvatar(samlUser.Id, decodedAvatar) + if err != nil { + return nil, fmt.Errorf("failed to store user avatar: %w", err) + } + } + + user, err := a.userService.GetOrCreateUserForIntegration(samlUser, view.ExternalSamlIntegration) + if err != nil { + return nil, fmt.Errorf("failed to create user for SSO integration: %w", err) + } + userView, err := CreateTokenForUser(*user) + if err != nil { + return nil, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to create token for SSO user", + Debug: err.Error(), + } + } + return userView, nil +} + +func (a *authenticationControllerImpl) GetSystemSSOInfo(w http.ResponseWriter, r *http.Request) { + controller.RespondWithJson(w, http.StatusOK, + view.SystemConfigurationInfo{ + SSOIntegrationEnabled: a.samlInstance.error == nil, + AutoRedirect: a.samlInstance.error == nil, + DefaultWorkspaceId: a.systemInfoService.GetDefaultWorkspaceId(), + }) +} + +func createSamlInstance(systemInfoService service.SystemInfoService) SamlInstance { + var samlInstance SamlInstance + var err error + crt, err := os.CreateTemp("", "apihub.cert") + if err != nil { + log.Errorf("Apihub.cert temp file wasn't created. Error - %s", err.Error()) + samlInstance.error = err + return samlInstance + } + decodeSamlCert, err := base64.StdEncoding.DecodeString(systemInfoService.GetSamlCrt()) + if err != nil { + samlInstance.error = err + return samlInstance + } + + _, err = crt.WriteString(string(decodeSamlCert)) + + if err != nil { + log.Errorf("SAML_CRT error - %s", err) + samlInstance.error = err + return samlInstance + } + + key, err := os.CreateTemp("", "apihub.key") + if err != nil { + log.Errorf("Apihub.key temp file wasn't created. Error - %s", err.Error()) + samlInstance.error = err + return samlInstance + } + decodePrivateKey, err := base64.StdEncoding.DecodeString(systemInfoService.GetSamlKey()) + if err != nil { + samlInstance.error = err + return samlInstance + } + + _, err = key.WriteString(string(decodePrivateKey)) + + if err != nil { + log.Errorf("SAML_KEY error - %s", err) + samlInstance.error = err + return samlInstance + } + + defer key.Close() + defer crt.Close() + defer os.Remove(key.Name()) + defer os.Remove(crt.Name()) + + keyPair, err := tls.LoadX509KeyPair(crt.Name(), key.Name()) + if err != nil { + log.Errorf("keyPair error - %s", err) + samlInstance.error = err + return samlInstance + } + + keyPair.Leaf, err = x509.ParseCertificate(keyPair.Certificate[0]) + if err != nil { + log.Errorf("keyPair.Leaf error - %s", err) + samlInstance.error = err + return samlInstance + } + metadataUrl := systemInfoService.GetADFSMetadataUrl() + if metadataUrl == "" { + log.Error("metadataUrl env is empty") + samlInstance.error = err + return samlInstance + } + idpMetadataURL, err := url.Parse(metadataUrl) + if err != nil { + log.Errorf("idpMetadataURL error - %s", err) + samlInstance.error = err + return samlInstance + } + + tr := http.Transport{TLSClientConfig: &tls.Config{InsecureSkipVerify: true}} + cl := http.Client{Transport: &tr, Timeout: time.Second * 60} + idpMetadata, err := samlsp.FetchMetadata(context.Background(), &cl, *idpMetadataURL) + + if err != nil { + log.Errorf("idpMetadata error - %s", err) + samlInstance.error = err + return samlInstance + } + rootURLPath := systemInfoService.GetAPIHubUrl() + if rootURLPath == "" { + log.Error("rootURLPath env is empty") + samlInstance.error = err + return samlInstance + } + rootURL, err := url.Parse(rootURLPath) + if err != nil { + log.Errorf("rootURL error - %s", err) + samlInstance.error = err + return samlInstance + } + + samlSP, err := samlsp.New(samlsp.Options{ + URL: *rootURL, + Key: keyPair.PrivateKey.(*rsa.PrivateKey), + Certificate: keyPair.Leaf, + IDPMetadata: idpMetadata, + EntityID: rootURL.Path, + }) + if err != nil { + log.Errorf("New saml instanse wasn't created. Error -%s", err.Error()) + samlInstance.error = err + return samlInstance + } + + samlSP.ServiceProvider.SignatureMethod = dsig.RSASHA256SignatureMethod + samlSP.ServiceProvider.AuthnNameIDFormat = saml.TransientNameIDFormat + samlSP.ServiceProvider.AllowIDPInitiated = true + log.Infof("SAML instance initialized") + samlInstance.saml = samlSP + return samlInstance +} diff --git a/qubership-apihub-service/security/TokenController.go b/qubership-apihub-service/security/TokenController.go new file mode 100644 index 0000000..e554d17 --- /dev/null +++ b/qubership-apihub-service/security/TokenController.go @@ -0,0 +1,47 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package security + +import ( + "net/http" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/controller" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" +) + +type JwtPubKeyController interface { + GetRsaPublicKey(w http.ResponseWriter, r *http.Request) +} + +func NewJwtPubKeyController() JwtPubKeyController { + return &jwtPubKeyControllerImpl{} +} + +type jwtPubKeyControllerImpl struct { +} + +func (t jwtPubKeyControllerImpl) GetRsaPublicKey(w http.ResponseWriter, r *http.Request) { + key := GetPublicKey() + if key == nil { + controller.RespondWithCustomError(w, &exception.CustomError{ + Status: http.StatusNotFound, + Message: "public key not found", + }) + return + } + + w.Header().Set("Content-Type", "application/octet-stream") + w.Write(key) +} diff --git a/qubership-apihub-service/service/ActivityTrackingService.go b/qubership-apihub-service/service/ActivityTrackingService.go new file mode 100644 index 0000000..79ef4df --- /dev/null +++ b/qubership-apihub-service/service/ActivityTrackingService.go @@ -0,0 +1,334 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "fmt" + "net/http" + "strings" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + log "github.com/sirupsen/logrus" +) + +type ActivityTrackingService interface { + TrackEvent(event view.ActivityTrackingEvent) // return no error due to async processing + + GetActivityHistory_deprecated(ctx context.SecurityContext, req view.ActivityHistoryReq) (*view.PkgActivityResponse_deprecated, error) + GetActivityHistory(ctx context.SecurityContext, req view.ActivityHistoryReq) (*view.PkgActivityResponse, error) + GetEventsForPackage_deprecated(packageId string, includeRefs bool, limit int, page int, textFilter string, types []string) (*view.PkgActivityResponse_deprecated, error) + GetEventsForPackage(packageId string, includeRefs bool, limit int, page int, textFilter string, types []string) (*view.PkgActivityResponse, error) +} + +func NewActivityTrackingService(repo repository.ActivityTrackingRepository, publishedRepo repository.PublishedRepository, userService UserService) ActivityTrackingService { + return &activityTrackingServiceImpl{repo: repo, publishedRepo: publishedRepo, userService: userService} +} + +type activityTrackingServiceImpl struct { + repo repository.ActivityTrackingRepository + publishedRepo repository.PublishedRepository + userService UserService +} + +func (a activityTrackingServiceImpl) TrackEvent(event view.ActivityTrackingEvent) { + utils.SafeAsync(func() { + a.trackEventInternal(event) + }) +} + +func (a activityTrackingServiceImpl) GetActivityHistory_deprecated(ctx context.SecurityContext, req view.ActivityHistoryReq) (*view.PkgActivityResponse_deprecated, error) { + var ids []string + + if req.OnlyFavorite || req.OnlyShared || len(req.Kind) > 0 { + packagesFilter := view.PackageListReq{ + OnlyFavorite: req.OnlyFavorite, + OnlyShared: req.OnlyShared, + Kind: req.Kind, + } + packages, err := a.publishedRepo.GetFilteredPackagesWithOffset(packagesFilter, ctx.GetUserId()) + if err != nil { + return nil, fmt.Errorf("failed to get packages by filer : %v.Error - %w", packagesFilter, err) + } + if packages == nil || len(packages) == 0 { + return &view.PkgActivityResponse_deprecated{Events: make([]view.PkgActivityResponseItem_depracated, 0)}, err + } + + for _, pkg := range packages { + ids = append(ids, pkg.Id) + } + } + + // TODO: security check! need to check view rights + + atTypes := view.ConvertEventTypes(req.Types) + + ents, err := a.repo.GetEventsForPackages_deprecated(ids, req.Limit, req.Page, req.TextFilter, atTypes) + if err != nil { + return nil, fmt.Errorf("failed to get events for packages: %w", err) + } + if ents == nil || len(ents) == 0 { + return &view.PkgActivityResponse_deprecated{Events: make([]view.PkgActivityResponseItem_depracated, 0)}, err + } + + return a.makePkgActivityResponse_deprecated(ents) +} + +func (a activityTrackingServiceImpl) GetActivityHistory(ctx context.SecurityContext, req view.ActivityHistoryReq) (*view.PkgActivityResponse, error) { + var ids []string + + if req.OnlyFavorite || req.OnlyShared || len(req.Kind) > 0 { + packagesFilter := view.PackageListReq{ + OnlyFavorite: req.OnlyFavorite, + OnlyShared: req.OnlyShared, + Kind: req.Kind, + } + packages, err := a.publishedRepo.GetFilteredPackagesWithOffset(packagesFilter, ctx.GetUserId()) + if err != nil { + return nil, fmt.Errorf("failed to get packages by filer : %v.Error - %w", packagesFilter, err) + } + if packages == nil || len(packages) == 0 { + return &view.PkgActivityResponse{Events: make([]view.PkgActivityResponseItem, 0)}, err + } + + for _, pkg := range packages { + ids = append(ids, pkg.Id) + } + } + + // TODO: security check! need to check view rights + + atTypes := view.ConvertEventTypes(req.Types) + + ents, err := a.repo.GetEventsForPackages(ids, req.Limit, req.Page, req.TextFilter, atTypes) + if err != nil { + return nil, fmt.Errorf("failed to get events for packages: %w", err) + } + if ents == nil || len(ents) == 0 { + return &view.PkgActivityResponse{Events: make([]view.PkgActivityResponseItem, 0)}, err + } + + return a.makePkgActivityResponse(ents) +} + +func (a activityTrackingServiceImpl) GetEventsForPackage_deprecated(packageId string, includeRefs bool, limit int, page int, textFilter string, typeGroups []string) (*view.PkgActivityResponse_deprecated, error) { + pkgEnt, err := a.publishedRepo.GetPackage(packageId) + if err != nil { + return nil, fmt.Errorf("failed to get package %s for events: %w", packageId, err) + } + if pkgEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"packageId": packageId}, + } + } + + var ids []string + if includeRefs { + childIds, err := a.publishedRepo.GetAllChildPackageIdsIncludingParent(packageId) + if err != nil { + return nil, err + } + ids = append(ids, childIds...) + } else { + ids = append(ids, packageId) + } + + atTypes := view.ConvertEventTypes(typeGroups) + + ents, err := a.repo.GetEventsForPackages_deprecated(ids, limit, page, textFilter, atTypes) + if err != nil { + return nil, err + } + + return a.makePkgActivityResponse_deprecated(ents) +} + +func (a activityTrackingServiceImpl) GetEventsForPackage(packageId string, includeRefs bool, limit int, page int, textFilter string, typeGroups []string) (*view.PkgActivityResponse, error) { + pkgEnt, err := a.publishedRepo.GetPackage(packageId) + if err != nil { + return nil, fmt.Errorf("failed to get package %s for events: %w", packageId, err) + } + if pkgEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"packageId": packageId}, + } + } + + var ids []string + if includeRefs { + childIds, err := a.publishedRepo.GetAllChildPackageIdsIncludingParent(packageId) + if err != nil { + return nil, err + } + ids = append(ids, childIds...) + } else { + ids = append(ids, packageId) + } + + atTypes := view.ConvertEventTypes(typeGroups) + + ents, err := a.repo.GetEventsForPackages(ids, limit, page, textFilter, atTypes) + if err != nil { + return nil, err + } + + return a.makePkgActivityResponse(ents) +} + +func (a activityTrackingServiceImpl) makePkgActivityResponse_deprecated(ents []entity.EnrichedActivityTrackingEntity_deprecated) (*view.PkgActivityResponse_deprecated, error) { + result := view.PkgActivityResponse_deprecated{} + result.Events = make([]view.PkgActivityResponseItem_depracated, len(ents)) + for i, ent := range ents { + if ent.Type == string(view.ATETPublishNewRevision) || + ent.Type == string(view.ATETPublishNewVersion) || + ent.Type == string(view.ATETPatchVersionMeta) || + ent.Type == string(view.ATETDeleteVersion) { + if ent.Data != nil && getVersion(ent.Data) != "" { + if ent.NotLatestRevision { + ent.Data["notLatestRevision"] = true + } + ent.Data["version"] = view.MakeVersionRefKey(getVersion(ent.Data), getRevision(ent.Data)) + delete(ent.Data, "revision") + } + } + if ent.Type == string(view.ATETPatchVersionMeta) { + versionMeta := ent.Data["versionMeta"].([]interface{}) + for j, field := range versionMeta { + fieldStr := field.(string) + if fieldStr == "versionLabels" { + newLabelsI, newLabelsPresent := ent.Data["newVersionLabels"] + oldLabelsI, oldLabelsPresent := ent.Data["oldVersionLabels"] + if newLabelsPresent && oldLabelsPresent { + newLabels := newLabelsI.([]interface{}) + oldLabels := oldLabelsI.([]interface{}) + newLabelsStr := make([]string, len(newLabels)) + oldLabelsStr := make([]string, len(oldLabels)) + for k, v := range newLabels { + newLabelsStr[k] = v.(string) + } + for k, v := range oldLabels { + oldLabelsStr[k] = v.(string) + } + field = fmt.Sprintf("%s from [%s] to [%s]", field, strings.Join(oldLabelsStr, ", "), strings.Join(newLabelsStr, ", ")) + versionMeta[j] = field + } + } + if fieldStr == "status" { + oldStatus, oldStatusPresent := ent.Data["oldStatus"] + newStatus, newStatusPresent := ent.Data["newStatus"] + if oldStatusPresent && newStatusPresent { + field = fmt.Sprintf("%s from '%s' to '%s'", field, oldStatus, newStatus) + versionMeta[j] = field + } + } + } + ent.Data["versionMeta"] = versionMeta + } + result.Events[i] = entity.MakeActivityTrackingEventView_depracated(ent) + } + return &result, nil +} + +func (a activityTrackingServiceImpl) makePkgActivityResponse(ents []entity.EnrichedActivityTrackingEntity) (*view.PkgActivityResponse, error) { + result := view.PkgActivityResponse{} + result.Events = make([]view.PkgActivityResponseItem, len(ents)) + for i, ent := range ents { + if ent.Type == string(view.ATETPublishNewRevision) || + ent.Type == string(view.ATETPublishNewVersion) || + ent.Type == string(view.ATETPatchVersionMeta) || + ent.Type == string(view.ATETDeleteVersion) || + ent.Type == string(view.ATETCreateManualGroup) || + ent.Type == string(view.ATETDeleteManualGroup) || + ent.Type == string(view.ATETOperationsGroupParameters) { + if ent.Data != nil && getVersion(ent.Data) != "" { + if ent.NotLatestRevision { + ent.Data["notLatestRevision"] = true + } + ent.Data["version"] = view.MakeVersionRefKey(getVersion(ent.Data), getRevision(ent.Data)) + delete(ent.Data, "revision") + } + } + if ent.Type == string(view.ATETPatchVersionMeta) { + versionMeta := ent.Data["versionMeta"].([]interface{}) + for j, field := range versionMeta { + fieldStr := field.(string) + if fieldStr == "versionLabels" { + newLabelsI, newLabelsPresent := ent.Data["newVersionLabels"] + oldLabelsI, oldLabelsPresent := ent.Data["oldVersionLabels"] + if newLabelsPresent && oldLabelsPresent { + newLabels := newLabelsI.([]interface{}) + oldLabels := oldLabelsI.([]interface{}) + newLabelsStr := make([]string, len(newLabels)) + oldLabelsStr := make([]string, len(oldLabels)) + for k, v := range newLabels { + newLabelsStr[k] = v.(string) + } + for k, v := range oldLabels { + oldLabelsStr[k] = v.(string) + } + field = fmt.Sprintf("%s from [%s] to [%s]", field, strings.Join(oldLabelsStr, ", "), strings.Join(newLabelsStr, ", ")) + versionMeta[j] = field + } + } + if fieldStr == "status" { + oldStatus, oldStatusPresent := ent.Data["oldStatus"] + newStatus, newStatusPresent := ent.Data["newStatus"] + if oldStatusPresent && newStatusPresent { + field = fmt.Sprintf("%s from '%s' to '%s'", field, oldStatus, newStatus) + versionMeta[j] = field + } + } + } + ent.Data["versionMeta"] = versionMeta + } + result.Events[i] = entity.MakeActivityTrackingEventView(ent) + } + return &result, nil +} + +func (a activityTrackingServiceImpl) trackEventInternal(event view.ActivityTrackingEvent) { + ent := entity.MakeActivityTrackingEventEntity(event) + err := a.repo.CreateEvent(&ent) + if err != nil { + log.Errorf("Failed to save tracked event %+v to DB with err: %s", ent, err) + } +} + +func getVersion(m map[string]interface{}) string { + if versionName, ok := m["version"].(string); ok { + return versionName + } + return "" +} + +func getRevision(m map[string]interface{}) int { + if revision, ok := m["revision"].(float64); ok { + return int(revision) + } + if revision, ok := m["revision"].(int); ok { + return revision + } + return 0 +} diff --git a/qubership-apihub-service/service/AgentRegistrationService.go b/qubership-apihub-service/service/AgentRegistrationService.go new file mode 100644 index 0000000..57dfba9 --- /dev/null +++ b/qubership-apihub-service/service/AgentRegistrationService.go @@ -0,0 +1,122 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "fmt" + "strings" + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type AgentRegistrationService interface { + ProcessAgentSignal(view.AgentKeepaliveMessage) (*view.AgentVersion, error) + ListAgents(onlyActive bool, showIncompatible bool) ([]view.AgentInstance, error) + GetAgent(id string) (*view.AgentInstance, error) +} + +func NewAgentRegistrationService(repository repository.AgentRepository) AgentRegistrationService { + return &agentRegistrationServiceImpl{ + repository: repository, + } +} + +type agentRegistrationServiceImpl struct { + repository repository.AgentRepository +} + +const EXPECTED_AGENT_VERSION = "1.0.0" + +func (a agentRegistrationServiceImpl) ProcessAgentSignal(message view.AgentKeepaliveMessage) (*view.AgentVersion, error) { + ent := entity.AgentEntity{ + AgentId: view.MakeAgentId(message.Cloud, message.Namespace), + Cloud: message.Cloud, + Namespace: message.Namespace, + Url: message.Url, + BackendVersion: message.BackendVersion, + Name: message.Name, + LastActive: time.Now(), + AgentVersion: message.AgentVersion, + } + + err := a.repository.CreateOrUpdateAgent(ent) + if err != nil { + return nil, err + } + return &view.AgentVersion{Version: EXPECTED_AGENT_VERSION}, nil +} + +func (a agentRegistrationServiceImpl) ListAgents(onlyActive bool, showIncompatible bool) ([]view.AgentInstance, error) { + ents, err := a.repository.ListAgents(onlyActive) + if err != nil { + return nil, err + } + + result := make([]view.AgentInstance, 0) + for _, ent := range ents { + compErr := CheckAgentCompatibility(ent.AgentVersion) + if !showIncompatible && compErr != nil { + continue + } + agentView := entity.MakeAgentView(ent) + agentView.CompatibilityError = compErr + result = append(result, agentView) + } + + return result, nil +} + +func (a agentRegistrationServiceImpl) GetAgent(id string) (*view.AgentInstance, error) { + ent, err := a.repository.GetAgent(id) + if err != nil { + return nil, err + } + if ent == nil { + return nil, nil + } + res := entity.MakeAgentView(*ent) + res.CompatibilityError = CheckAgentCompatibility(ent.AgentVersion) + return &res, nil +} + +func CheckAgentCompatibility(actualAgentVersion string) *view.AgentCompatibilityError { + if EXPECTED_AGENT_VERSION == actualAgentVersion { + return nil + } + if actualAgentVersion == "" { + return &view.AgentCompatibilityError{ + Severity: view.SeverityError, + Message: fmt.Sprintf("This Agent instance does not support versioning. Please, contact your System Administrator to update this Agent instance to version %s.", EXPECTED_AGENT_VERSION), + } + } + backendVersion := strings.Split(EXPECTED_AGENT_VERSION, ".") + actualVersion := strings.Split(actualAgentVersion, ".") + if backendVersion[0] != actualVersion[0] { + return &view.AgentCompatibilityError{ + Severity: view.SeverityError, + Message: fmt.Sprintf("Current version %s of Agent is incompatible with APIHUB. Please, contact your System Administrator to update this Agent instance to version %s.", actualAgentVersion, EXPECTED_AGENT_VERSION), + } + } + if backendVersion[1] != actualVersion[1] || backendVersion[2] != actualVersion[2] { + return &view.AgentCompatibilityError{ + Severity: view.SeverityWarning, + Message: fmt.Sprintf("Difference in minor/patch version of Agent detected. We recommend to contact your System Administrator to update this Agent instance to version %s.", EXPECTED_AGENT_VERSION), + } + } + return nil +} diff --git a/qubership-apihub-service/service/ApihubApiKeyService.go b/qubership-apihub-service/service/ApihubApiKeyService.go new file mode 100644 index 0000000..56ae619 --- /dev/null +++ b/qubership-apihub-service/service/ApihubApiKeyService.go @@ -0,0 +1,840 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "fmt" + "net/http" + "os" + "strings" + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/crypto" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + "github.com/google/uuid" + log "github.com/sirupsen/logrus" +) + +type ApihubApiKeyService interface { + CreateApiKey_deprecated(ctx context.SecurityContext, packageId, name string, requestRoles []string) (*view.ApihubApiKey_deprecated, error) + CreateApiKey_v3_deprecated(ctx context.SecurityContext, packageId, name string, requestRoles []string) (*view.ApihubApiKey_v3_deprecated, error) + CreateApiKey(ctx context.SecurityContext, packageId, name string, createdFor string, requestRoles []string) (*view.ApihubApiKey, error) + RevokePackageApiKey(ctx context.SecurityContext, apiKeyId string, packageId string) error + GetProjectApiKeys_deprecated(packageId string) (*view.ApihubApiKeys_deprecated, error) + GetProjectApiKeys_v3_deprecated(packageId string) (*view.ApihubApiKeys_v3_deprecated, error) + GetProjectApiKeys(packageId string) (*view.ApihubApiKeys, error) + GetApiKeyStatus(apiKey string, packageId string) (bool, *view.ApihubApiKey, error) + GetApiKeyByKey(apiKey string) (*view.ApihubApiKeyExtAuthView, error) + GetApiKeyById(apiKeyId string) (*view.ApihubApiKeyExtAuthView, error) + CreateSystemApiKey(apiKey string) error +} + +func NewApihubApiKeyService(apihubApiKeyRepository repository.ApihubApiKeyRepository, + publishedRepo repository.PublishedRepository, + atService ActivityTrackingService, + userService UserService, + roleRepository repository.RoleRepository, + isSysadm func(context.SecurityContext) bool) ApihubApiKeyService { + + return &apihubApiKeyServiceImpl{ + apiKeyRepository: apihubApiKeyRepository, + publishedRepo: publishedRepo, + atService: atService, + userService: userService, + roleRepository: roleRepository, + isSysadm: isSysadm, + } +} + +type apihubApiKeyServiceImpl struct { + apiKeyRepository repository.ApihubApiKeyRepository + publishedRepo repository.PublishedRepository + atService ActivityTrackingService + userService UserService + roleRepository repository.RoleRepository + isSysadm func(context.SecurityContext) bool +} + +const API_KEY_PREFIX = "api-key_" + +func (t apihubApiKeyServiceImpl) CreateApiKey_deprecated(ctx context.SecurityContext, packageId, name string, requestRoles []string) (*view.ApihubApiKey_deprecated, error) { + // validate request roles first + if len(requestRoles) > 0 { + allRoles, err := t.roleRepository.GetAllRoles() + if err != nil { + return nil, err + } + existingIds := map[string]struct{}{} + for _, role := range allRoles { + existingIds[role.Id] = struct{}{} + } + for _, roleId := range requestRoles { + if _, exists := existingIds[roleId]; !exists { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.RoleNotFound, + Message: exception.RoleNotFoundMsg, + Params: map[string]interface{}{"role": roleId}, + } + } + } + } + + var resultRoles []string + + if packageId != "*" { + packageEnt, err := t.publishedRepo.GetPackage(packageId) + if err != nil { + return nil, err + } + if packageEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"packageId": packageId}, + } + } + if packageEnt.DefaultRole == view.NoneRoleId && packageEnt.ParentId == "" { + if !t.isSysadm(ctx) { + return nil, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + Debug: exception.PrivateWorkspaceNotModifiableMsg, + } + } + } + roleEnts, err := t.roleRepository.GetAvailablePackageRoles(packageId, ctx.GetUserId()) + if err != nil { + return nil, err + } + if len(requestRoles) > 0 { + // check requested roles against available for current user + availableIds := map[string]struct{}{} + for _, role := range roleEnts { + availableIds[role.Id] = struct{}{} + } + for _, roleId := range requestRoles { + if _, exists := availableIds[roleId]; !exists { + // user do not have permission for the role + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.NotAvailableRole, + Message: exception.NotAvailableRoleMsg, + Params: map[string]interface{}{"role": roleId}, + } + } + } + // all request roles passed the check, so now we can add it to result + resultRoles = append(resultRoles, requestRoles...) + } else { + userRoles, err := t.roleRepository.GetPackageRolesHierarchyForUser(packageId, ctx.GetUserId()) + if err != nil { + return nil, err + } + for _, roleEnt := range userRoles { + resultRoles = append(resultRoles, roleEnt.RoleId) + } + if !utils.SliceContains(resultRoles, packageEnt.DefaultRole) { + resultRoles = append(resultRoles, packageEnt.DefaultRole) + } + } + } else { + if len(requestRoles) > 0 { + resultRoles = append(resultRoles, requestRoles...) // set all request roles to result. Requester is sysadmin(requirements for *), so it's ok + } else { + resultRoles = append(resultRoles, view.SysadmRole) // request roles not set - fallback to sysadmin role to keep old behavior + } + } + apiKey := crypto.CreateRandomHash() + keyToCreate := view.ApihubApiKey_deprecated{ + Id: t.makeApiKeyId(), + PackageId: packageId, + Name: name, + CreatedBy: ctx.GetUserId(), + CreatedAt: time.Now(), + ApiKey: apiKey, + Roles: resultRoles, + } + apiKeyHash := crypto.CreateSHA256Hash([]byte(apiKey)) + apihubApiKeyEntity := entity.MakeApihubApiKeyEntity_deprecated(keyToCreate, apiKeyHash) + err := t.apiKeyRepository.SaveApiKey_deprecated(apihubApiKeyEntity) + if err != nil { + return nil, err + } + + if packageId != "*" { + dataMap := map[string]interface{}{} + dataMap["apiKeyId"] = apihubApiKeyEntity.Id + dataMap["apiKeyName"] = apihubApiKeyEntity.Name + dataMap["apiKeyRoleIds"] = apihubApiKeyEntity.Roles + t.atService.TrackEvent(view.ActivityTrackingEvent{ + Type: view.ATETGenerateApiKey, + Data: dataMap, + PackageId: packageId, // Will not work for * case due to constraint in DB + Date: time.Now(), + UserId: ctx.GetUserId(), + }) + } + + return &keyToCreate, nil +} + +func (t apihubApiKeyServiceImpl) CreateApiKey_v3_deprecated(ctx context.SecurityContext, packageId, name string, requestRoles []string) (*view.ApihubApiKey_v3_deprecated, error) { + // validate request roles first + if len(requestRoles) > 0 { + allRoles, err := t.roleRepository.GetAllRoles() + if err != nil { + return nil, err + } + existingIds := map[string]struct{}{} + for _, role := range allRoles { + existingIds[role.Id] = struct{}{} + } + for _, roleId := range requestRoles { + if _, exists := existingIds[roleId]; !exists { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.RoleNotFound, + Message: exception.RoleNotFoundMsg, + Params: map[string]interface{}{"role": roleId}, + } + } + } + } + + var resultRoles []string + + if packageId != "*" { + packageEnt, err := t.publishedRepo.GetPackage(packageId) + if err != nil { + return nil, err + } + if packageEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"packageId": packageId}, + } + } + if packageEnt.DefaultRole == view.NoneRoleId && packageEnt.ParentId == "" { + if !t.isSysadm(ctx) { + return nil, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + Debug: exception.PrivateWorkspaceNotModifiableMsg, + } + } + } + if len(requestRoles) > 0 { + var availableRoles []entity.RoleEntity + if t.isSysadm(ctx) { + availableRoles, err = t.roleRepository.GetAllRoles() + if err != nil { + return nil, err + } + } else { + availableRoles, err = t.roleRepository.GetAvailablePackageRoles(packageId, ctx.GetUserId()) + if err != nil { + return nil, err + } + } + // check requested roles against available for current user + availableIds := map[string]struct{}{} + for _, role := range availableRoles { + availableIds[role.Id] = struct{}{} + } + for _, roleId := range requestRoles { + if _, exists := availableIds[roleId]; !exists { + // user do not have permission for the role + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.NotAvailableRole, + Message: exception.NotAvailableRoleMsg, + Params: map[string]interface{}{"role": roleId}, + } + } + } + // all request roles passed the check, so now we can add it to result + resultRoles = append(resultRoles, requestRoles...) + } else { + if t.isSysadm(ctx) { + resultRoles = append(resultRoles, view.SysadmRole) + } else { + userRoles, err := t.roleRepository.GetPackageRolesHierarchyForUser(packageId, ctx.GetUserId()) + if err != nil { + return nil, err + } + for _, roleEnt := range userRoles { + resultRoles = append(resultRoles, roleEnt.RoleId) + } + if len(resultRoles) == 0 { + resultRoles = append(resultRoles, packageEnt.DefaultRole) + } + } + } + } else { + if len(requestRoles) > 0 { + resultRoles = append(resultRoles, requestRoles...) // set all request roles to result. Requester is sysadmin(requirements for *), so it's ok + } else { + resultRoles = append(resultRoles, view.SysadmRole) // request roles not set - fallback to sysadmin role to keep old behavior + } + } + apiKey := crypto.CreateRandomHash() + keyToCreate := view.ApihubApiKey{ + Id: t.makeApiKeyId(), + PackageId: packageId, + Name: name, + CreatedBy: view.User{Id: ctx.GetUserId()}, + CreatedFor: &view.User{Id: ""}, + CreatedAt: time.Now(), + ApiKey: apiKey, + Roles: resultRoles, + } + apiKeyHash := crypto.CreateSHA256Hash([]byte(apiKey)) + apihubApiKeyEntity := entity.MakeApihubApiKeyEntity(keyToCreate, apiKeyHash) + err := t.apiKeyRepository.SaveApiKey(apihubApiKeyEntity) + if err != nil { + return nil, err + } + + if packageId != "*" { + dataMap := map[string]interface{}{} + dataMap["apiKeyId"] = apihubApiKeyEntity.Id + dataMap["apiKeyName"] = apihubApiKeyEntity.Name + dataMap["apiKeyRoleIds"] = apihubApiKeyEntity.Roles + t.atService.TrackEvent(view.ActivityTrackingEvent{ + Type: view.ATETGenerateApiKey, + Data: dataMap, + PackageId: packageId, // Will not work for * case due to constraint in DB + Date: time.Now(), + UserId: ctx.GetUserId(), + }) + } + createdEnt, err := t.apiKeyRepository.GetPackageApiKey_deprecated(keyToCreate.Id, packageId) + if err != nil { + return nil, err + } + if createdEnt == nil { + return nil, fmt.Errorf("failed to get created api key") + } + + apiKeyView := entity.MakeApihubApiKeyView_v3_deprecated(*createdEnt) + apiKeyView.ApiKey = apiKey + return apiKeyView, nil +} + +func (t apihubApiKeyServiceImpl) CreateApiKey(ctx context.SecurityContext, packageId, name string, createdFor string, requestRoles []string) (*view.ApihubApiKey, error) { + // validate request roles first + if len(requestRoles) > 0 { + allRoles, err := t.roleRepository.GetAllRoles() + if err != nil { + return nil, err + } + existingIds := map[string]struct{}{} + for _, role := range allRoles { + existingIds[role.Id] = struct{}{} + } + for _, roleId := range requestRoles { + if _, exists := existingIds[roleId]; !exists { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.RoleNotFound, + Message: exception.RoleNotFoundMsg, + Params: map[string]interface{}{"role": roleId}, + } + } + } + } + + var resultRoles []string + + if packageId != "*" { + packageEnt, err := t.publishedRepo.GetPackage(packageId) + if err != nil { + return nil, err + } + if packageEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"packageId": packageId}, + } + } + if packageEnt.DefaultRole == view.NoneRoleId && packageEnt.ParentId == "" { + if !t.isSysadm(ctx) { + return nil, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + Debug: exception.PrivateWorkspaceNotModifiableMsg, + } + } + } + if len(requestRoles) > 0 { + var availableRoles []entity.RoleEntity + allRoles, err := t.roleRepository.GetAllRoles() + if err != nil { + return nil, err + } + if t.isSysadm(ctx) { + availableRoles = allRoles + } else if ctx.GetApikeyPackageId() == packageId || strings.HasPrefix(packageId, ctx.GetApikeyPackageId()+".") || ctx.GetApikeyPackageId() == "*" { + maxRoleRank := -1 + for _, apikeyRoleId := range ctx.GetApikeyRoles() { + for _, role := range allRoles { + if apikeyRoleId == role.Id { + if maxRoleRank < role.Rank { + maxRoleRank = role.Rank + } + } + } + } + for _, role := range allRoles { + if maxRoleRank >= role.Rank { + availableRoles = append(availableRoles, role) + } + } + } else { + availableRoles, err = t.roleRepository.GetAvailablePackageRoles(packageId, ctx.GetUserId()) + if err != nil { + return nil, err + } + } + // check requested roles against available for current user + availableRoleIds := map[string]struct{}{} + for _, role := range availableRoles { + availableRoleIds[role.Id] = struct{}{} + } + for _, roleId := range requestRoles { + if _, exists := availableRoleIds[roleId]; !exists { + // user do not have permission for the role + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.NotAvailableRole, + Message: exception.NotAvailableRoleMsg, + Params: map[string]interface{}{"role": roleId}, + } + } + } + // all request roles passed the check, so now we can add it to result + resultRoles = append(resultRoles, requestRoles...) + } else { + if t.isSysadm(ctx) { + resultRoles = append(resultRoles, view.SysadmRole) + } else { + userRoles, err := t.roleRepository.GetPackageRolesHierarchyForUser(packageId, ctx.GetUserId()) + if err != nil { + return nil, err + } + for _, roleEnt := range userRoles { + resultRoles = append(resultRoles, roleEnt.RoleId) + } + if len(resultRoles) == 0 { + resultRoles = append(resultRoles, packageEnt.DefaultRole) + } + } + } + } else { + if len(requestRoles) > 0 { + resultRoles = append(resultRoles, requestRoles...) // set all request roles to result. Requester is sysadmin(requirements for *), so it's ok + } else { + resultRoles = append(resultRoles, view.SysadmRole) // request roles not set - fallback to sysadmin role to keep old behavior + } + } + + existingApiKeyEntities, err := t.apiKeyRepository.GetPackageApiKeys(packageId) + if err != nil { + return nil, err + } + for _, existingApiKeyEntity := range existingApiKeyEntities { + if existingApiKeyEntity.DeletedAt == nil && existingApiKeyEntity.ApihubApiKeyEntity.Name == name { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.ApiKeyNameDuplicate, + Message: exception.ApiKeyNameDuplicateMsg, + Params: map[string]interface{}{"name": name}, + } + } + } + + var createdForUser *view.User + if createdFor != "" { + createdForUser, err = t.userService.GetUserFromDB(createdFor) + if err != nil { + return nil, err + } + if createdForUser == nil { + usersFromLdap, err := t.userService.SearchUsersInLdap(view.LdapSearchFilterReq{FilterToValue: map[string]string{view.SAMAccountName: createdFor}, Limit: 1}, true) + if err != nil { + return nil, err + } + if usersFromLdap == nil || len(usersFromLdap.Users) == 0 { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.UserNotFound, + Message: exception.UserNotFoundMsg, + Params: map[string]interface{}{"userId": createdFor}, + } + } + user := usersFromLdap.Users[0] + err = t.userService.StoreUserAvatar(user.Id, user.Avatar) + if err != nil { + return nil, err + } + externalUser := view.User{ + Id: user.Id, + Name: user.Name, + Email: user.Email, + AvatarUrl: fmt.Sprintf("/api/v2/users/%s/profile/avatar", user.Id), + } + createdForUser, err = t.userService.GetOrCreateUserForIntegration(externalUser, view.ExternalLdapIntegration) + if err != nil { + return nil, err + } + } + } + + apiKey := crypto.CreateRandomHash() + keyToCreate := view.ApihubApiKey{ + Id: t.makeApiKeyId(), + PackageId: packageId, + Name: name, + CreatedBy: view.User{Id: ctx.GetUserId()}, + CreatedFor: createdForUser, + CreatedAt: time.Now(), + ApiKey: apiKey, + Roles: resultRoles, + } + apiKeyHash := crypto.CreateSHA256Hash([]byte(apiKey)) + apihubApiKeyEntity := entity.MakeApihubApiKeyEntity(keyToCreate, apiKeyHash) + err = t.apiKeyRepository.SaveApiKey(apihubApiKeyEntity) + if err != nil { + return nil, err + } + + if packageId != "*" { + dataMap := map[string]interface{}{} + dataMap["apiKeyId"] = apihubApiKeyEntity.Id + dataMap["apiKeyName"] = apihubApiKeyEntity.Name + dataMap["apiKeyRoleIds"] = apihubApiKeyEntity.Roles + t.atService.TrackEvent(view.ActivityTrackingEvent{ + Type: view.ATETGenerateApiKey, + Data: dataMap, + PackageId: packageId, // Will not work for * case due to constraint in DB + Date: time.Now(), + UserId: ctx.GetUserId(), + }) + } + createdEnt, err := t.apiKeyRepository.GetPackageApiKey(keyToCreate.Id, packageId) + if err != nil { + return nil, err + } + if createdEnt == nil { + return nil, fmt.Errorf("failed to get created api key") + } + + apiKeyView := entity.MakeApihubApiKeyView(*createdEnt) + apiKeyView.ApiKey = apiKey + return apiKeyView, nil +} + +func (t apihubApiKeyServiceImpl) RevokePackageApiKey(ctx context.SecurityContext, apiKeyId string, packageId string) error { + apiKeyEntity, err := t.apiKeyRepository.GetPackageApiKey(apiKeyId, packageId) + if err != nil { + return err + } + if apiKeyEntity == nil { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageApiKeyNotFound, + Message: exception.PackageApiKeyNotFoundMsg, + Params: map[string]interface{}{"apiKeyId": apiKeyId, "packageId": packageId}, + } + } + if apiKeyEntity.DeletedAt != nil || apiKeyEntity.DeletedBy != "" { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.PackageApiKeyAlreadyRevoked, + Message: exception.PackageApiKeyAlreadyRevokedMsg, + Params: map[string]interface{}{"apiKeyId": apiKeyId, "packageId": packageId}, + } + } + if packageId != "*" { + packageEnt, err := t.publishedRepo.GetPackage(packageId) + if err != nil { + return err + } + if packageEnt == nil { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"packageId": packageId}, + } + } + if packageEnt.DefaultRole == view.NoneRoleId && packageEnt.ParentId == "" { + if !t.isSysadm(ctx) { + return &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + Debug: exception.PrivateWorkspaceNotModifiableMsg, + } + } + } + } + + err = t.apiKeyRepository.RevokeApiKey(apiKeyId, ctx.GetUserId()) + if err != nil { + return err + } + dataMap := map[string]interface{}{} + dataMap["apiKeyId"] = apiKeyEntity.Id + dataMap["apiKeyName"] = apiKeyEntity.Name + dataMap["apiKeyRoleIds"] = apiKeyEntity.Roles + t.atService.TrackEvent(view.ActivityTrackingEvent{ + Type: view.ATETRevokeApiKey, + Data: dataMap, + PackageId: apiKeyEntity.PackageId, + Date: time.Now(), + UserId: ctx.GetUserId(), + }) + return nil +} + +func (t apihubApiKeyServiceImpl) GetProjectApiKeys_deprecated(packageId string) (*view.ApihubApiKeys_deprecated, error) { + if packageId != "*" { + packageEnt, err := t.publishedRepo.GetPackage(packageId) + if err != nil { + return nil, err + } + if packageEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"packageId": packageId}, + } + } + } + apiKeys := make([]view.ApihubApiKey_deprecated, 0) + apiKeyEntities, err := t.apiKeyRepository.GetPackageApiKeys_deprecated(packageId) + if err != nil { + return nil, err + } + for _, apiKeyEntity := range apiKeyEntities { + if apiKeyEntity.DeletedAt == nil { + apiKeys = append(apiKeys, *entity.MakeApihubApiKeyView_deprecated(apiKeyEntity)) + } + } + return &view.ApihubApiKeys_deprecated{ApiKeys: apiKeys}, nil +} + +func (t apihubApiKeyServiceImpl) GetProjectApiKeys_v3_deprecated(packageId string) (*view.ApihubApiKeys_v3_deprecated, error) { + if packageId != "*" { + packageEnt, err := t.publishedRepo.GetPackage(packageId) + if err != nil { + return nil, err + } + if packageEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"packageId": packageId}, + } + } + } + apiKeys := make([]view.ApihubApiKey_v3_deprecated, 0) + apiKeyEntities, err := t.apiKeyRepository.GetPackageApiKeys_v3_deprecated(packageId) + if err != nil { + return nil, err + } + for _, apiKeyEntity := range apiKeyEntities { + if apiKeyEntity.DeletedAt == nil { + apiKeys = append(apiKeys, *entity.MakeApihubApiKeyView_v3_deprecated(apiKeyEntity)) + } + } + return &view.ApihubApiKeys_v3_deprecated{ApiKeys: apiKeys}, nil +} +func (t apihubApiKeyServiceImpl) GetProjectApiKeys(packageId string) (*view.ApihubApiKeys, error) { + if packageId != "*" { + packageEnt, err := t.publishedRepo.GetPackage(packageId) + if err != nil { + return nil, err + } + if packageEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"packageId": packageId}, + } + } + } + apiKeys := make([]view.ApihubApiKey, 0) + apiKeyEntities, err := t.apiKeyRepository.GetPackageApiKeys(packageId) + if err != nil { + return nil, err + } + for _, apiKeyEntity := range apiKeyEntities { + if apiKeyEntity.DeletedAt == nil { + apiKeys = append(apiKeys, *entity.MakeApihubApiKeyView(apiKeyEntity)) + } + } + return &view.ApihubApiKeys{ApiKeys: apiKeys}, nil +} + +func (t apihubApiKeyServiceImpl) GetApiKeyStatus(apiKey string, packageId string) (bool, *view.ApihubApiKey, error) { + apiKeyHash := crypto.CreateSHA256Hash([]byte(apiKey)) + apiKeyEnt, err := t.apiKeyRepository.GetApiKeyByHash(apiKeyHash) + if err != nil { + return false, nil, err + } + if apiKeyEnt == nil { + //apiKey doesn't exist + return false, nil, nil + } + apiKeyUserEnt := entity.ApihubApiKeyUserEntity{ApihubApiKeyEntity: *apiKeyEnt} + if apiKeyEnt.DeletedAt != nil { + //apiKey exists but it was revoked + return true, entity.MakeApihubApiKeyView(apiKeyUserEnt), nil + } + + if apiKeyEnt.PackageId != "*" && apiKeyEnt.PackageId != packageId && !strings.HasPrefix(packageId, apiKeyEnt.PackageId+".") { + return false, nil, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + } + } + //apiKey exists + return false, entity.MakeApihubApiKeyView(apiKeyUserEnt), nil +} + +func (t apihubApiKeyServiceImpl) GetApiKeyByKey(apiKey string) (*view.ApihubApiKeyExtAuthView, error) { + apiKeyHash := crypto.CreateSHA256Hash([]byte(apiKey)) + apiKeyEnt, err := t.apiKeyRepository.GetApiKeyByHash(apiKeyHash) + if err != nil { + return nil, err + } + if apiKeyEnt == nil { + //apiKey doesn't exist + return nil, nil + } + return &view.ApihubApiKeyExtAuthView{ + Id: apiKeyEnt.Id, + PackageId: apiKeyEnt.PackageId, + Name: apiKeyEnt.Name, + Revoked: apiKeyEnt.DeletedAt != nil, + Roles: apiKeyEnt.Roles, + }, nil +} + +func (t apihubApiKeyServiceImpl) GetApiKeyById(apiKeyId string) (*view.ApihubApiKeyExtAuthView, error) { + apiKeyEnt, err := t.apiKeyRepository.GetApiKey(apiKeyId) + if err != nil { + return nil, err + } + if apiKeyEnt == nil { + //apiKey doesn't exist + return nil, nil + } + return &view.ApihubApiKeyExtAuthView{ + Id: apiKeyEnt.Id, + PackageId: apiKeyEnt.PackageId, + Name: apiKeyEnt.Name, + Revoked: apiKeyEnt.DeletedAt != nil, + Roles: apiKeyEnt.Roles, + }, nil +} + +func (t apihubApiKeyServiceImpl) CreateSystemApiKey(apiKey string) error { + if apiKey == "" { + return fmt.Errorf("system api key must not be empty") + } + + packageId, apiKeyName := "*", "system_api_key" + resultRoles := []string{view.SysadmRole} + + existingKey, err := t.GetApiKeyByKey(apiKey) + if err != nil { + return err + } + if existingKey != nil { + log.Info("provided system api key already exists") + return nil + } else { + log.Debug("system api key not found, creating new") + + email := os.Getenv(APIHUB_ADMIN_EMAIL) + adminUser, err := t.userService.GetUserByEmail(email) + if err != nil { + return err + } + if adminUser == nil { + return fmt.Errorf("failed to generate system api key: no sysadm user has found") + } + + keyToCreate := view.ApihubApiKey{ + Id: t.makeApiKeyId(), + PackageId: packageId, + Name: apiKeyName, + CreatedBy: view.User{Id: adminUser.Id}, + CreatedFor: nil, + CreatedAt: time.Now(), + ApiKey: apiKey, + Roles: resultRoles, + } + apiKeyHash := crypto.CreateSHA256Hash([]byte(apiKey)) + apihubApiKeyEntity := entity.MakeApihubApiKeyEntity(keyToCreate, apiKeyHash) + err = t.apiKeyRepository.SaveApiKey(apihubApiKeyEntity) + if err != nil { + return err + } + log.Info("new system api key has been created") + + existingApiKeyEntities, err := t.apiKeyRepository.GetPackageApiKeys(packageId) + if err != nil { + return err + } + for _, existingApiKeyEntity := range existingApiKeyEntities { + if existingApiKeyEntity.DeletedAt == nil && + existingApiKeyEntity.ApihubApiKeyEntity.Name == apiKeyName && + existingApiKeyEntity.Id != apihubApiKeyEntity.Id { + err = t.RevokePackageApiKey(context.CreateFromId(adminUser.Id), existingApiKeyEntity.Id, packageId) + if err != nil { + return err + } + } + } + } + return nil +} + +func (t apihubApiKeyServiceImpl) makeApiKeyId() string { + return API_KEY_PREFIX + uuid.New().String() +} diff --git a/qubership-apihub-service/service/BranchEditorsService.go b/qubership-apihub-service/service/BranchEditorsService.go new file mode 100644 index 0000000..76f5023 --- /dev/null +++ b/qubership-apihub-service/service/BranchEditorsService.go @@ -0,0 +1,251 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "errors" + "fmt" + "sync" + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/cache" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/websocket" + "github.com/buraksezer/olric" + log "github.com/sirupsen/logrus" +) + +type BranchEditorsService interface { + AddBranchEditor(projectId string, branchName string, userId string) error + GetBranchEditors(projectId string, branchName string) ([]view.User, error) + RemoveBranchEditor(projectId string, branchName string, userId string) error + RemoveBranchEditors(projectId string, branchName string) error +} + +func NewBranchEditorsService(userService UserService, + wsBranchService WsBranchService, + branchRepository repository.BranchRepository, op cache.OlricProvider) BranchEditorsService { + + bs := &branchEditorsServiceImpl{ + userService: userService, + wsBranchService: wsBranchService, + branchRepository: branchRepository, + op: op, + isReadyWg: sync.WaitGroup{}, + } + + bs.isReadyWg.Add(1) + utils.SafeAsync(func() { + bs.initWhenOlricReady() + }) + + return bs +} + +const keySeparator = "|@@|" + +type branchEditorsServiceImpl struct { + userService UserService + wsBranchService WsBranchService + branchRepository repository.BranchRepository + + op cache.OlricProvider + isReadyWg sync.WaitGroup + olricC *olric.Olric + + editors *olric.DMap +} + +func (b *branchEditorsServiceImpl) initWhenOlricReady() { + var err error + hasErrors := false + + b.olricC = b.op.Get() + b.editors, err = b.olricC.NewDMap("Editors") + if err != nil { + log.Errorf("Failed to creare dmap Editors: %s", err.Error()) + hasErrors = true + } + + drafts, _ := b.branchRepository.GetBranchDrafts() + for _, draft := range drafts { + if len(draft.Editors) > 0 { + err = b.editors.Put(draft.ProjectId+keySeparator+draft.BranchName, draft.Editors) + if err != nil { + log.Errorf("Failed to add editors to dmap: %s", err.Error()) + hasErrors = true + } + } + } + + if hasErrors { + log.Infof("Failed to init BranchEditorsService, going to retry") + time.Sleep(time.Second * 5) + b.initWhenOlricReady() + return + } + + b.isReadyWg.Done() + log.Infof("BranchEditorsService is ready") +} + +func (b *branchEditorsServiceImpl) AddBranchEditor(projectId string, branchName string, userId string) error { + b.isReadyWg.Wait() + + isAdded, editors, err := b.addBranchEditorToCache(projectId, branchName, userId) + if err != nil { + return fmt.Errorf("failed to add editor to cache %s", err) + } + if isAdded { + utils.SafeAsync(func() { + err = b.branchRepository.SetDraftEditors(projectId, branchName, editors) + if err != nil { + log.Errorf("failed to set editors for project %s and branch %s to db: %s", projectId, branchName, err.Error()) + } + + b.wsBranchService.NotifyProjectBranchUsers(projectId, branchName, + websocket.BranchEditorAddedPatch{ + Type: websocket.BranchEditorAddedType, + UserId: userId, + }) + }) + } + return nil +} + +func (b *branchEditorsServiceImpl) GetBranchEditors(projectId string, branchName string) ([]view.User, error) { + b.isReadyWg.Wait() + + userIds, err := b.getBranchEditorsFromCache(projectId, branchName) + if err != nil { + return nil, fmt.Errorf("failed to get editors from cache: %s", err) + } + if userIds == nil { + return make([]view.User, 0), nil + } + users, err := b.userService.GetUsersByIds(userIds) + if err != nil { + return nil, err + } + return users, nil +} + +func (b *branchEditorsServiceImpl) RemoveBranchEditor(projectId string, branchName string, userId string) error { + b.isReadyWg.Wait() + + isRemoved, editors, err := b.removeBranchEditorFromCache(projectId, branchName, userId) + if err != nil { + return fmt.Errorf("failed to remove editor from cache: %s", err) + } + if isRemoved { + utils.SafeAsync(func() { + err = b.branchRepository.SetDraftEditors(projectId, branchName, editors) + if err != nil { + log.Errorf("failed to set editors for project %s and branch %s to db: %s", projectId, branchName, err.Error()) + } + + b.wsBranchService.NotifyProjectBranchUsers(projectId, branchName, + websocket.BranchEditorRemovedPatch{ + Type: websocket.BranchEditorRemovedType, + UserId: userId, + }) + }) + } + return nil +} + +func (b *branchEditorsServiceImpl) RemoveBranchEditors(projectId string, branchName string) error { + b.isReadyWg.Wait() + + err := b.removeBranchEditorsFromCache(projectId, branchName) + if err != nil { + return fmt.Errorf("failed to remove editors from cache: %s", err) + } + return nil +} + +func (b *branchEditorsServiceImpl) addBranchEditorToCache(projectId string, branchName string, userId string) (bool, []string, error) { + key := projectId + keySeparator + branchName + + val, err := b.editors.Get(key) + if err != nil { + if errors.Is(err, olric.ErrKeyNotFound) { + // ok + } else { + return false, nil, err + } + } + var branchEditors []string + if val != nil { + branchEditors = val.([]string) + if utils.SliceContains(branchEditors, userId) { + return false, nil, nil + } + } else { + branchEditors = []string{} + } + branchEditors = append(branchEditors, userId) + err = b.editors.Put(key, branchEditors) + if err != nil { + return false, nil, err + } + return true, branchEditors, nil +} + +func (b *branchEditorsServiceImpl) getBranchEditorsFromCache(projectId string, branchName string) ([]string, error) { + key := projectId + keySeparator + branchName + + val, err := b.editors.Get(key) + if err != nil { + if errors.Is(err, olric.ErrKeyNotFound) { + return nil, nil + } else { + return nil, err + } + } + + return val.([]string), nil +} + +func (b *branchEditorsServiceImpl) removeBranchEditorFromCache(projectId string, branchName string, userId string) (bool, []string, error) { + key := projectId + keySeparator + branchName + + val, err := b.editors.Get(key) + if err != nil { + if errors.Is(err, olric.ErrKeyNotFound) { + return false, nil, nil + } else { + return false, nil, err + } + } + branchEditors := val.([]string) + + if index := utils.SliceIndex(branchEditors, userId); index != -1 { + branchEditors = append(branchEditors[:index], branchEditors[index+1:]...) + err = b.editors.Put(key, branchEditors) + if err != nil { + return false, nil, err + } + return true, branchEditors, nil + } + return false, nil, nil +} + +func (b *branchEditorsServiceImpl) removeBranchEditorsFromCache(projectId string, branchName string) error { + key := projectId + keySeparator + branchName + return b.editors.Delete(key) +} diff --git a/qubership-apihub-service/service/BranchService.go b/qubership-apihub-service/service/BranchService.go new file mode 100644 index 0000000..80fd81c --- /dev/null +++ b/qubership-apihub-service/service/BranchService.go @@ -0,0 +1,1743 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "archive/zip" + "bytes" + goctx "context" + "crypto/sha1" + "encoding/hex" + "encoding/json" + "fmt" + "net/http" + "os" + "sort" + "strings" + "sync" + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/archive" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/websocket" + ws "github.com/gorilla/websocket" + "github.com/gosimple/slug" + log "github.com/sirupsen/logrus" + "golang.org/x/sync/errgroup" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +const ApiHubBaseConfigPath = "apihub-config/" + +type BranchService interface { + GetProjectBranchesFromGit(ctx goctx.Context, projectId string, filter string, limit int) ([]view.BranchItemView, error) + CreateDraftFromGit(ctx goctx.Context, projectId string, branchName string) error + GetBranchDetails(ctx goctx.Context, projectId string, branchName string) (*view.Branch, error) + GetBranchDetailsEP(ctx goctx.Context, projectId string, branchName string, createDraft bool) (*view.Branch, error) + RecalculateDraftConfigChangeType(ctx goctx.Context, projectId string, branchName string) error + RecalculateDraftConfigFolders(ctx goctx.Context, projectId string, branchName string) error + UpdateDraftConfigChangeType(ctx goctx.Context, projectId string, branchName string, changeType view.ChangeType) error + GetBranchDetailsFromDraft(ctx goctx.Context, projectId string, branchName string, allowBrokenRefs bool) (*view.Branch, error) + GetBranchRawConfigFromDraft(ctx goctx.Context, projectId string, branchName string) ([]byte, error) + GetBranchDetailsFromGit(ctx goctx.Context, projectId string, branchName string) (*view.Branch, string, error) + GetBranchDetailsFromGitCommit(ctx goctx.Context, projectId string, branchName string, commitId string) (*view.Branch, error) + GetBranchRawConfigFromGit(ctx goctx.Context, projectId string, branchName string) ([]byte, error) + GetContentNoData(ctx goctx.Context, projectId string, branchName string, contentId string) (*view.Content, error) + DraftExists(projectId string, branchName string) (bool, error) + DraftContainsChanges(ctx goctx.Context, projectId string, branchName string) (bool, error) + BranchExists(ctx goctx.Context, projectId string, branchName string) (bool, bool, error) + CloneBranch(ctx goctx.Context, projectId string, branchName string, newBranchName string) error + CreateMergeRequest(ctx goctx.Context, projectId string, targetBranchName string, sourceBranchName string, title string, description string) (string, error) + DeleteBranch(ctx goctx.Context, projectId string, branchName string) error + ResetBranchDraft(ctx goctx.Context, projectId string, branchName string, sendResetNotification bool) error + DeleteBranchDraft(projectId string, branchName string) error + CalculateBranchConflicts(ctx goctx.Context, projectId string, branchName string) (*view.BranchConflicts, error) + ConnectToWebsocket(ctx goctx.Context, projectId string, branchName string, wsId string, connection *ws.Conn) error + GetAllZippedContentFromGitCommit(ctx goctx.Context, branchDetails *view.Branch, projectId string, branchName string, commitId string) ([]byte, error) + GetVersionPublishDetailsFromGitCommit(ctx goctx.Context, projectId string, branchName string, commitId string) (*view.GitVersionPublish, error) +} + +func NewBranchService(projectService ProjectService, + draftRepo repository.DraftRepository, + gitClientProvider GitClientProvider, + publishedRepo repository.PublishedRepository, + wsBranchService WsBranchService, + branchEditorsService BranchEditorsService, + branchRepository repository.BranchRepository) BranchService { + branchService := &branchServiceImpl{ + projectService: projectService, + gitClientProvider: gitClientProvider, + draftRepo: draftRepo, + publishedRepo: publishedRepo, + wsBranchService: wsBranchService, + branchEditorsService: branchEditorsService, + branchRepository: branchRepository, + branchMapMutex: &sync.RWMutex{}, + branchMutex: map[string]*sync.RWMutex{}, + } + //todo move this to a separate service that manages all other jobs + branchService.startCleanupJob(time.Second * 30) + + return branchService +} + +type branchServiceImpl struct { + projectService ProjectService + gitClientProvider GitClientProvider + draftRepo repository.DraftRepository + publishedRepo repository.PublishedRepository + wsBranchService WsBranchService + branchEditorsService BranchEditorsService + branchRepository repository.BranchRepository + branchMapMutex *sync.RWMutex + branchMutex map[string]*sync.RWMutex +} + +func (b *branchServiceImpl) GetProjectBranchesFromGit(ctx goctx.Context, projectId string, filter string, limit int) ([]view.BranchItemView, error) { + ctx = context.CreateContextWithStacktrace(ctx, fmt.Sprintf("GetProjectBranchesFromGit(%s,%s,%d)", projectId, filter, limit)) + secCtx := context.GetSecurityContext(ctx) + if secCtx == nil { + return nil, fmt.Errorf("security context not found") + } + project, err := b.projectService.GetProject(*secCtx, projectId) + if err != nil { + return nil, err + } + + gitClient, err := b.gitClientProvider.GetUserClient(project.Integration.Type, (*secCtx).GetUserId()) + if err != nil { + return nil, err + } + + names, canPush, err := gitClient.GetRepoBranches(ctx, project.Integration.RepositoryId, filter, limit) + if err != nil { + return nil, err + } + tags, err := gitClient.GetRepoTags(ctx, project.Integration.RepositoryId, filter, limit) + if err != nil { + return nil, err + } + if len(tags) != 0 { + names = append(names, tags...) + canPush = append(canPush, make([]bool, len(tags))...) + } + if len(names) == 0 { + return nil, nil + } + + var result []view.BranchItemView + for i, name := range names { + permissions := make([]string, 0) + if canPush[i] { + permissions = append(permissions, "all") + } + result = append(result, view.BranchItemView{Name: name, Permissions: permissions}) + } + sort.SliceStable(result, func(i, j int) bool { + return result[i].Name < result[j].Name + }) + + if len(result) < limit { + return result, nil + } else { + return result[:limit], nil + } +} + +func (b *branchServiceImpl) CreateDraftFromGit(ctx goctx.Context, projectId string, branchName string) error { + branchDetails, currentCommit, err := b.GetBranchDetailsFromGit(ctx, projectId, branchName) + if err != nil { + return err + } + return b.createBranchDraft(ctx, projectId, branchName, branchDetails, currentCommit) +} + +func (b *branchServiceImpl) createBranchDraft(ctx goctx.Context, projectId string, branchName string, branchDetails *view.Branch, currentCommit string) error { + ctx = context.CreateContextWithStacktrace(ctx, fmt.Sprintf("createBranchDraft(%s,%s,%+v,%s)", projectId, branchName, branchDetails, currentCommit)) + + b.branchMapMutex.Lock() + branchMutexKey := fmt.Sprintf("%v%v%v", projectId, stringSeparator, branchName) + var mutex *sync.RWMutex + branchMutexExists := false + mutex, branchMutexExists = b.branchMutex[branchMutexKey] + if !branchMutexExists { + mutex = &sync.RWMutex{} + b.branchMutex[branchMutexKey] = mutex + } + mutex.Lock() + defer mutex.Unlock() + b.branchMapMutex.Unlock() + + branchDraftCommitId, err := b.getBranchDraftCommitId(projectId, branchName) + if err != nil { + return err + } + if branchDraftCommitId != "" && branchDraftCommitId == currentCommit { + return nil + } + + start := time.Now() + + branchEnt := entity.BranchDraftEntity{ + ProjectId: projectId, + BranchName: branchName, + ChangeType: string(view.CTUnchanged), + Editors: []string{}, + OriginalConfig: []byte{}, + CommitId: currentCommit, + } + + var contentEnts []*entity.ContentDraftEntity + var refEnts []entity.BranchRefDraftEntity + + if branchDetails != nil { + branchEnt.OriginalConfig, err = json.Marshal(view.TransformBranchToGitView(*branchDetails)) + if err != nil { + return err + } + + contentEnts, err = b.getBranchContentFromRepositoy(ctx, branchDetails, projectId, branchName, currentCommit) + if err != nil { + return err + } + + for _, ref := range branchDetails.Refs { + refEnts = append(refEnts, *entity.MakeRefEntity(&ref, projectId, branchName, string(view.StatusUnmodified))) + } + } + + err = b.draftRepo.CreateBranchDraft(branchEnt, contentEnts, refEnts) + log.Infof("[PERF] Create branch draft took %dms", time.Since(start).Milliseconds()) + return err +} + +func (b branchServiceImpl) getBranchContentFromRepositoy(ctx goctx.Context, branchDetails *view.Branch, projectId string, branchName string, currentCommit string) ([]*entity.ContentDraftEntity, error) { + filesCount := len(branchDetails.Files) + if filesCount < 10 { //if we have more than 10 files in config it is better to download repository as archive and get files from it + return b.getBranchContentFromRepositoyFiles(ctx, branchDetails, projectId, branchName, currentCommit) + } else { + return b.getBranchContentFromRepositoyArchive(ctx, branchDetails, projectId, branchName, currentCommit) + } +} + +func (b branchServiceImpl) getBranchContentFromRepositoyFiles(ctx goctx.Context, branchDetails *view.Branch, projectId string, branchName string, currentCommit string) ([]*entity.ContentDraftEntity, error) { + ctx = context.CreateContextWithStacktrace(ctx, fmt.Sprintf("getBranchContentFromRepositoyFiles(%s,%s,%+v,%s)", projectId, branchName, branchDetails, currentCommit)) + + contentEnts := make([]*entity.ContentDraftEntity, len(branchDetails.Files)) + secCtx := context.GetSecurityContext(ctx) + if secCtx == nil { + return nil, fmt.Errorf("security context not found") + } + project, err := b.projectService.GetProject(*secCtx, projectId) + if err != nil { + return nil, err + } + + gitClient, err := b.gitClientProvider.GetUserClient(project.Integration.Type, (*secCtx).GetUserId()) + if err != nil { + return nil, fmt.Errorf("failed to get git client: %v", err) + } + + eg := errgroup.Group{} + for index := range branchDetails.Files { + content := &branchDetails.Files[index] + i := index + if !content.IsFolder { + eg.Go(func() error { + fileData, _, blobId, err := gitClient.GetFileContent(ctx, project.Integration.RepositoryId, currentCommit, content.FileId) + if err != nil { + return err + } + content.BlobId = blobId + var preparedData []byte + if strings.Contains(getMediaType(fileData), "text/plain") { + preparedData = convertEol(fileData) + } else { + preparedData = fileData + } + ent := entity.MakeContentEntity(content, i, projectId, branchName, preparedData, getMediaType(fileData), string(view.StatusUnmodified)) + contentEnts[i] = ent + return nil + }) + } else { + ent := entity.MakeContentEntity(content, i, projectId, branchName, nil, "text/plain", string(view.StatusUnmodified)) + contentEnts[i] = ent + } + } + + err = eg.Wait() + if err != nil { + return nil, fmt.Errorf("failed to get content from git repository: %w", err) + } + return contentEnts, nil +} + +func (b branchServiceImpl) getBranchContentFromRepositoyArchive(ctx goctx.Context, branchDetails *view.Branch, projectId string, branchName string, currentCommit string) ([]*entity.ContentDraftEntity, error) { + ctx = context.CreateContextWithStacktrace(ctx, fmt.Sprintf("getBranchContentFromRepositoyArchive(%s,%s,%+v,%s)", projectId, branchName, branchDetails, currentCommit)) + + contentEnts := make([]*entity.ContentDraftEntity, len(branchDetails.Files)) + secCtx := context.GetSecurityContext(ctx) + if secCtx == nil { + return nil, fmt.Errorf("security context not found") + } + project, err := b.projectService.GetProject(*secCtx, projectId) + if err != nil { + return nil, err + } + + gitClient, err := b.gitClientProvider.GetUserClient(project.Integration.Type, (*secCtx).GetUserId()) + if err != nil { + return nil, fmt.Errorf("failed to get git client: %v", err) + } + + tempFolder := "tmp" + tempFilePath := fmt.Sprintf("%v/%v_@@_%v_@@_%v.zip", tempFolder, project.Integration.RepositoryId, slug.Make(branchName), time.Now().UnixMilli()) + + err = os.MkdirAll(tempFolder, 0777) + if err != nil { + return nil, err + } + gitRepoFile, err := os.Create(tempFilePath) + if err != nil { + return nil, err + } + defer os.Remove(tempFilePath) + err = gitClient.WriteCommitArchive(ctx, project.Integration.RepositoryId, currentCommit, gitRepoFile, "zip") + if err != nil { + return nil, err + } + defer gitRepoFile.Close() + + zipReader, err := zip.OpenReader(tempFilePath) + if err != nil { + return nil, err + } + defer zipReader.Close() + + zipFileHeaders := make(map[string]*zip.File) + for _, file := range zipReader.File { + if file.FileInfo().IsDir() { + continue + } + if strings.Contains(file.Name, "/") { + //remove first folder from filename because its git specific folder that doesn't exist in repository + filename := strings.SplitN(file.Name, "/", 2)[1] + zipFilePtr := file + zipFileHeaders[filename] = zipFilePtr + } + } + + eg := errgroup.Group{} + for index := range branchDetails.Files { + content := &branchDetails.Files[index] + i := index + if !content.IsFolder { + eg.Go(func() error { + var fileData []byte + if fileHeader, exists := zipFileHeaders[content.FileId]; !exists { + //branch config contains file that doesn't exist in git + fileData = []byte{} + content.BlobId = "" + } else { + fileData, err = archive.ReadZipFile(fileHeader) + if err != nil { + return err + } + content.BlobId = calculateGitBlobId(fileData) + } + + var preparedData []byte + if strings.Contains(getMediaType(fileData), "text/plain") { + preparedData = convertEol(fileData) + } else { + preparedData = fileData + } + ent := entity.MakeContentEntity(content, i, projectId, branchName, preparedData, getMediaType(fileData), string(view.StatusUnmodified)) + contentEnts[i] = ent + return nil + }) + } else { + ent := entity.MakeContentEntity(content, i, projectId, branchName, nil, "text/plain", string(view.StatusUnmodified)) + contentEnts[i] = ent + } + } + + err = eg.Wait() + if err != nil { + return nil, fmt.Errorf("failed to get content from git repository archive: %w", err) + } + return contentEnts, nil +} + +const GitBlobIdForEmptyFile = "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391" + +func calculateGitBlobId(s []byte) string { + p := fmt.Sprintf("blob %d\x00", len(s)) + h := sha1.New() + h.Write([]byte(p)) + h.Write(s) + return hex.EncodeToString(h.Sum([]byte(nil))) +} + +func (b *branchServiceImpl) expandBranchFolders(ctx goctx.Context, projectId string, branchName string, branchDetails *view.Branch) error { + ctx = context.CreateContextWithStacktrace(ctx, fmt.Sprintf("expandBranchFolders(%s,%s,%+v)", projectId, branchName, branchDetails)) + secCtx := context.GetSecurityContext(ctx) + if secCtx == nil { + return fmt.Errorf("security context not found") + } + + project, err := b.projectService.GetProject(*secCtx, projectId) + if err != nil { + return err + } + gitClient, err := b.gitClientProvider.GetUserClient(project.Integration.Type, (*secCtx).GetUserId()) + if err != nil { + return fmt.Errorf("failed to get git client: %v", err) + } + configFiles := make(map[string]*view.Content, 0) + configFolders := make([]*view.Content, 0) + for index := range branchDetails.Files { + content := &branchDetails.Files[index] + if strings.HasSuffix(content.FileId, "/") { + content.IsFolder = true + content.Publish = false + configFolders = append(configFolders, content) + continue + } + configFiles[content.FileId] = content + } + filesFromFolders := make([]view.Content, 0) + for _, folder := range configFolders { + folderFileIds, err := gitClient.ListDirectoryFilesRecursive(ctx, project.Integration.RepositoryId, branchName, folder.FileId) + if err != nil { + return err //todo custom error + } + for _, fileId := range folderFileIds { + fileId := utils.NormalizeFileId(fileId) + if configFiles[fileId] != nil { + if !configFiles[fileId].Publish && len(configFiles[fileId].Labels) == 0 { + configFiles[fileId].FromFolder = true + } + continue + } + filePath, fileName := utils.SplitFileId(fileId) + file := view.Content{ + FileId: fileId, + Name: fileName, + Type: view.Unknown, + Path: filePath, + Publish: false, + Status: view.StatusUnmodified, + Labels: []string{}, + FromFolder: true, + } + filesFromFolders = append(filesFromFolders, file) + configFiles[fileId] = &file + } + } + branchDetails.Files = append(branchDetails.Files, filesFromFolders...) + return nil +} + +func (b *branchServiceImpl) GetBranchDetails(ctx goctx.Context, projectId string, branchName string) (*view.Branch, error) { + exists, err := b.DraftExists(projectId, branchName) + if err != nil { + return nil, err + } + + var branchDetails *view.Branch + if exists { + branchDetails, err = b.GetBranchDetailsFromDraft(ctx, projectId, branchName, false) + } else { + branchDetails, _, err = b.GetBranchDetailsFromGit(ctx, projectId, branchName) + } + if branchDetails == nil { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.ConfigNotFound, + Message: exception.ConfigNotFoundMsg, + Params: map[string]interface{}{"projectId": projectId, "branch": branchName}, + } + } + + return branchDetails, err +} + +func (b *branchServiceImpl) GetBranchDetailsEP(ctx goctx.Context, projectId string, branchName string, createDraft bool) (*view.Branch, error) { + draftExists, err := b.DraftExists(projectId, branchName) + if err != nil { + return nil, err + } + branchExists, canPush, err := b.BranchExists(ctx, projectId, branchName) + if err != nil { + return nil, err + } + if !branchExists { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.BranchNotFound, + Message: exception.BranchNotFoundMsg, + Params: map[string]interface{}{"branch": branchName, "projectId": projectId}, + } + } + + var branchDetails *view.Branch + var currentCommit string + if draftExists { + branchDetails, err = b.GetBranchDetailsFromDraft(ctx, projectId, branchName, false) + } else { + branchDetails, currentCommit, err = b.GetBranchDetailsFromGit(ctx, projectId, branchName) + } + if err != nil { + return nil, err + } + if !draftExists && createDraft { + err = b.createBranchDraft(ctx, projectId, branchName, branchDetails, currentCommit) + if err != nil { + return nil, err + } + } + if branchDetails == nil { + branchDetails = &view.Branch{ + ProjectId: projectId, + Files: make([]view.Content, 0), + Refs: make([]view.Ref, 0), + } + } + if draftExists { + branchDraft, err := b.branchRepository.GetBranchDraft(projectId, branchName) + if err != nil { + return nil, err + } + branchDetails.ChangeType = view.ChangeType(branchDraft.ChangeType) + } else { + branchDetails.ChangeType = view.CTUnchanged + } + setFileChangeTypes(branchDetails) + editors, err := b.branchEditorsService.GetBranchEditors(projectId, branchName) + if err != nil { + return nil, err + } + branchDetails.Editors = editors + branchDetails.ConfigFileId = getApihubConfigFileId(projectId) + permissions := make([]string, 0) + if canPush { + permissions = append(permissions, "all") + } + branchDetails.Permissions = &permissions + + return branchDetails, nil +} + +func (b *branchServiceImpl) RecalculateDraftConfigChangeType(ctx goctx.Context, projectId string, branchName string) error { + err := b.RecalculateDraftConfigFolders(ctx, projectId, branchName) + if err != nil { + return err + } + branchDraft, err := b.branchRepository.GetBranchDraft(projectId, branchName) + if err != nil { + b.wsBranchService.DisconnectClients(projectId, branchName) + return err + } + if branchDraft == nil { + return nil + } + calculatedChangeType, err := b.calculateDraftConfigChangeType(ctx, projectId, branchName, false) + if err != nil { + b.wsBranchService.DisconnectClients(projectId, branchName) + return err + } + if branchDraft.ChangeType != string(calculatedChangeType) { + err = b.branchRepository.SetChangeType(projectId, branchName, string(calculatedChangeType)) + if err != nil { + b.wsBranchService.DisconnectClients(projectId, branchName) + return err + } + b.wsBranchService.NotifyProjectBranchUsers(projectId, branchName, + websocket.BranchConfigUpdatedPatch{ + Type: websocket.BranchConfigUpdatedType, + Data: websocket.BranchConfigUpdatedPatchData{ChangeType: calculatedChangeType}, + }) + } + return nil +} + +func (b *branchServiceImpl) RecalculateDraftConfigFolders(ctx goctx.Context, projectId string, branchName string) error { + branchDraft, err := b.GetBranchDetailsFromDraft(ctx, projectId, branchName, false) + if err != nil { + return err + } + folders := make([]string, 0) + files := make([]*view.Content, 0) + excludedFiles := make([]string, 0) + filesToMoveInFolder := make(map[string]bool, 0) + filesToDelete := make(map[string]bool, 0) + for index, file := range branchDraft.Files { + if file.IsFolder { + folders = append(folders, file.FileId) + } + if file.Status == view.StatusExcluded { + excludedFiles = append(excludedFiles, file.FileId) + } + if file.FromFolder || file.Publish || len(file.Labels) > 0 { + continue + } + files = append(files, &branchDraft.Files[index]) + } + for _, folder := range folders { + for _, file := range files { + if strings.HasPrefix(file.FileId, folder) && file.FileId != folder { + if file.IsFolder { + filesToDelete[file.FileId] = true + } else { + filesToMoveInFolder[file.FileId] = true + } + } + } + } + + fileIdsToMoveInFolder := make([]string, 0) + fileIdsToMoveFromFolder := make([]string, 0) + fileIdsToDelete := make([]string, 0) + for fileToUpdate := range filesToMoveInFolder { + fileIdsToMoveInFolder = append(fileIdsToMoveInFolder, fileToUpdate) + } + for _, excludedFileId := range excludedFiles { + folderForExcludedFile := findFolderForFile(excludedFileId, branchDraft.Files) + if folderForExcludedFile == "" { + continue + } + fileIdsToMoveFromFolder = append(fileIdsToMoveFromFolder, findAllFilesForFolder(folderForExcludedFile, branchDraft.Files)...) + filesToDelete[folderForExcludedFile] = true + } + for fileToDelete := range filesToDelete { + fileIdsToDelete = append(fileIdsToDelete, fileToDelete) + } + + err = b.draftRepo.UpdateFolderContents(projectId, branchName, fileIdsToDelete, fileIdsToMoveInFolder, fileIdsToMoveFromFolder) + return err +} + +func (b *branchServiceImpl) UpdateDraftConfigChangeType(ctx goctx.Context, projectId string, branchName string, changeType view.ChangeType) error { + branchDraft, err := b.branchRepository.GetBranchDraft(projectId, branchName) + if err != nil { + return err + } + if branchDraft.ChangeType != string(changeType) { + err = b.branchRepository.SetChangeType(projectId, branchName, string(changeType)) + if err != nil { + return err + } + } + return nil +} + +func (b *branchServiceImpl) calculateDraftConfigChangeType(ctx goctx.Context, projectId string, branchName string, allowBrokenRefs bool) (view.ChangeType, error) { + branchDraft, err := b.branchRepository.GetBranchDraft(projectId, branchName) + if err != nil { + return "", err + } + if branchDraft != nil { + if len(branchDraft.OriginalConfig) == 0 { + return view.CTAdded, nil + } + + branchDetails, err := b.GetBranchDetailsFromDraft(ctx, projectId, branchName, allowBrokenRefs) + if err != nil { + return "", err + } + var originalBranchDetails view.Branch + err = json.Unmarshal(branchDraft.OriginalConfig, &originalBranchDetails) + if err != nil { + return "", err + } + if draftConfigChanged(branchDetails, &originalBranchDetails) { + return view.CTUpdated, nil + } + } + return view.CTUnchanged, nil +} + +func setFileChangeTypes(branch *view.Branch) { + for i, file := range branch.Files { + switch file.Status { + case view.StatusAdded: + branch.Files[i].ChangeType = view.CTAdded + case view.StatusModified, view.StatusMoved: + branch.Files[i].ChangeType = view.CTUpdated + case view.StatusDeleted: + branch.Files[i].ChangeType = view.CTDeleted + default: + branch.Files[i].ChangeType = view.CTUnchanged + } + } +} + +func draftConfigChanged(draftBranch *view.Branch, gitBranch *view.Branch) bool { + if (draftBranch == nil) != (gitBranch == nil) { + return true + } + if draftBranch == nil { + return false + } + + filesFromGitConfig := map[string]view.Content{} + for _, file := range gitBranch.Files { + filesFromGitConfig[file.FileId] = file + } + + for _, draftFile := range draftBranch.Files { + gitFile, exists := filesFromGitConfig[draftFile.FileId] + deletedFromConfig := draftFile.Status == view.StatusExcluded || draftFile.Status == view.StatusDeleted + if draftFile.FromFolder { + if exists { + return true + } + continue + } + if deletedFromConfig && exists { + return true + } + if deletedFromConfig && !exists { + continue + } + if !exists { + return true + } + if !draftFile.EqualsGitView(&gitFile) { + return true + } + } + + refsFromGitConfig := map[string]view.Ref{} + for _, ref := range gitBranch.Refs { + if ref.IsBroken { + continue + } + refsFromGitConfig[ref.RefPackageId+ref.RefPackageVersion] = ref + } + + for _, draftRef := range draftBranch.Refs { + if draftRef.IsBroken { + continue + } + gitRef, exists := refsFromGitConfig[draftRef.RefPackageId+draftRef.RefPackageVersion] + deletedFromConfig := draftRef.Status == view.StatusDeleted + if deletedFromConfig && exists { + return true + } + if deletedFromConfig && !exists { + continue + } + if !exists { + return true + } + if !draftRef.EqualsGitView(&gitRef) { + return true + } + } + + return false +} + +func (b *branchServiceImpl) GetBranchDetailsFromDraft(ctx goctx.Context, projectId string, branchName string, allowBrokenRefs bool) (*view.Branch, error) { + result := view.Branch{ + ProjectId: projectId, + Files: make([]view.Content, 0), + Refs: make([]view.Ref, 0), + } + + contents, err := b.draftRepo.GetContents(projectId, branchName) + if err != nil { + return nil, err + } + for _, content := range contents { + result.Files = append(result.Files, *entity.MakeContentView(&content)) + } + + refs, err := b.draftRepo.GetRefs(projectId, branchName) + if err != nil { + return nil, err + } + var refIsBroken bool + for _, ref := range refs { + refIsBroken = false + packageEnt, err := b.publishedRepo.GetPackage(ref.RefPackageId) + if err != nil { + return nil, err + } + if packageEnt == nil { + if allowBrokenRefs { + refIsBroken = true + } else { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ReferencedPackageNotFound, + Message: exception.ReferencedPackageNotFoundMsg, + Params: map[string]interface{}{"package": ref.RefPackageId}, + } + } + } + + version, err := b.publishedRepo.GetVersion(ref.RefPackageId, ref.RefVersion) + if err != nil { + return nil, err + } + if version == nil { + if allowBrokenRefs { + refIsBroken = true + } else { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ReferencedPackageVersionNotFound, + Message: exception.ReferencedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"package": ref.RefPackageId, "version": ref.RefVersion}, + } + } + } + packageName, versionStatus, packageKind := "unknown", "unknown", "unknown" + if packageEnt != nil { + packageName = packageEnt.Name + packageKind = packageEnt.Kind + } + if version != nil { + versionStatus = version.Status + } + rv := entity.MakeRefView(&ref, packageName, versionStatus, packageKind, refIsBroken) + + result.Refs = append(result.Refs, *rv) + } + + return &result, nil +} + +func (b *branchServiceImpl) GetBranchRawConfigFromDraft(ctx goctx.Context, projectId string, branchName string) ([]byte, error) { + result := view.Branch{ + ProjectId: projectId, + Files: make([]view.Content, 0), + Refs: make([]view.Ref, 0), + } + draftExists, err := b.DraftExists(projectId, branchName) + if err != nil { + return nil, err + } + if !draftExists { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.BranchDraftNotFound, + Message: exception.BranchDraftNotFoundMsg, + Params: map[string]interface{}{"projectId": projectId, "branch": branchName}, + } + } + contents, err := b.draftRepo.GetContents(projectId, branchName) + if err != nil { + return nil, err + } + for _, content := range contents { + if content.Status == string(view.StatusExcluded) || content.Status == string(view.StatusDeleted) { + continue + } + result.Files = append(result.Files, *entity.MakeContentView(&content)) + } + + refs, err := b.draftRepo.GetRefs(projectId, branchName) + if err != nil { + return nil, err + } + for _, ref := range refs { + if ref.Status == string(view.StatusDeleted) { + continue + } + packageEnt, err := b.publishedRepo.GetPackage(ref.RefPackageId) + if err != nil { + return nil, err + } + if packageEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ReferencedPackageNotFound, + Message: exception.ReferencedPackageNotFoundMsg, + Params: map[string]interface{}{"package": ref.RefPackageId}, + } + } + + version, err := b.publishedRepo.GetVersion(ref.RefPackageId, ref.RefVersion) + if err != nil { + return nil, err + } + if version == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ReferencedPackageVersionNotFound, + Message: exception.ReferencedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"package": ref.RefPackageId, "version": ref.RefVersion}, + } + } + + rv := entity.MakeRefView(&ref, packageEnt.Name, version.Status, packageEnt.Kind, false) + + result.Refs = append(result.Refs, *rv) + } + return getApihubConfigRaw(view.TransformBranchToGitView(result)) +} + +func (b *branchServiceImpl) GetBranchDetailsFromGit(ctx goctx.Context, projectId string, branchName string) (*view.Branch, string, error) { + ctx = context.CreateContextWithStacktrace(ctx, fmt.Sprintf("GetBranchDetailsFromGit(%s,%s)", projectId, branchName)) + secCtx := context.GetSecurityContext(ctx) + if secCtx == nil { + return nil, "", fmt.Errorf("security context not found") + } + + project, err := b.projectService.GetProject(*secCtx, projectId) + if err != nil { + return nil, "", err + } + + gitClient, err := b.gitClientProvider.GetUserClient(project.Integration.Type, (*secCtx).GetUserId()) + if err != nil { + return nil, "", fmt.Errorf("failed to get git client: %v", err) + } + + lastBranchCommit, err := gitClient.GetBranchOrTagLastCommitId(ctx, project.Integration.RepositoryId, branchName) + if err != nil || lastBranchCommit == "" { + branchExists, _, exErr := gitClient.BranchOrTagExists(ctx, project.Integration.RepositoryId, branchName) + if exErr != nil { + return nil, "", fmt.Errorf("failed to get last branch commit: %s", exErr) + } + if !branchExists { + return nil, "", &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.BranchNotFound, + Message: exception.BranchNotFoundMsg, + Params: map[string]interface{}{"projectId": projectId, "branch": branchName}, + } + } + return nil, "", fmt.Errorf("failed to get last branch commit: %s", err) + } + + result, err := b.getBranchDetailsFromGitCommit(ctx, project, branchName, lastBranchCommit) + if err != nil { + return nil, "", err + } + return result, lastBranchCommit, nil +} + +func (b *branchServiceImpl) GetBranchDetailsFromGitCommit(ctx goctx.Context, projectId string, branchName string, commitId string) (*view.Branch, error) { + ctx = context.CreateContextWithStacktrace(ctx, fmt.Sprintf("GetBranchDetailsFromGitCommit(%s,%s,%s)", projectId, branchName, commitId)) + secCtx := context.GetSecurityContext(ctx) + if secCtx == nil { + return nil, fmt.Errorf("security context not found") + } + + project, err := b.projectService.GetProject(*secCtx, projectId) + if err != nil { + return nil, err + } + + return b.getBranchDetailsFromGitCommit(ctx, project, branchName, commitId) +} + +func (b *branchServiceImpl) getBranchDetailsFromGitCommit(ctx goctx.Context, project *view.Project, branchName string, commitId string) (*view.Branch, error) { + secCtx := context.GetSecurityContext(ctx) + if secCtx == nil { + return nil, fmt.Errorf("security context not found") + } + var gitBranch *view.BranchGitConfigView + + gitClient, err := b.gitClientProvider.GetUserClient(project.Integration.Type, (*secCtx).GetUserId()) + if err != nil { + return nil, fmt.Errorf("failed to get git client: %v", err) + } + + apiHubConfigPath := ApiHubBaseConfigPath + project.Id + ".json" + + configExists, err := gitClient.FileExists(ctx, project.Integration.RepositoryId, commitId, apiHubConfigPath) + if err != nil { + return nil, fmt.Errorf("failed to check if apihub config file exists: %v", err) + } + if !configExists { + return nil, nil + } + + data, _, _, err := gitClient.GetFileContent(ctx, project.Integration.RepositoryId, commitId, apiHubConfigPath) + if err != nil { + return nil, err + } + if data == nil { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, //todo maybe 404? + Code: exception.ConfigNotFound, + Message: exception.ConfigNotFoundMsg, + Params: map[string]interface{}{"projectId": project.Id, "branch": branchName}, + } + } + err = json.Unmarshal(data, &gitBranch) + if err != nil { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidApihubConfig, + Message: exception.InvalidApihubConfigMsg, + Debug: err.Error(), + } + } + + err = b.validateGitBranchConfig(gitBranch, apiHubConfigPath) + if err != nil { + return nil, err + } + + var resRefs []view.Ref + if gitBranch.Refs == nil { + resRefs = make([]view.Ref, 0) + } else { + for _, ref := range gitBranch.Refs { + packageEnt, err := b.publishedRepo.GetPackage(ref.RefPackageId) + if err != nil { + return nil, err + } + if packageEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ReferencedPackageNotFound, + Message: exception.ReferencedPackageNotFoundMsg, + Params: map[string]interface{}{"package": project.Id}, + } + } + + version, err := b.publishedRepo.GetVersion(ref.RefPackageId, ref.Version) + if err != nil { + return nil, err + } + if version == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ReferencedPackageVersionNotFound, + Message: exception.ReferencedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"package": project.Id, "version": ref.Version}, + } + } + resRefs = append(resRefs, view.TransformGitViewToRef(ref, packageEnt.Name, version.Status, entity.KIND_PACKAGE)) + } + } + + branchDetails := view.TransformGitToBranchView(gitBranch, resRefs) + err = b.expandBranchFolders(ctx, project.Id, commitId, branchDetails) + if err != nil { + return nil, err + } + return branchDetails, nil +} + +func (b *branchServiceImpl) GetBranchRawConfigFromGit(ctx goctx.Context, projectId string, branchName string) ([]byte, error) { + ctx = context.CreateContextWithStacktrace(ctx, fmt.Sprintf("GetBranchDetailsFromGit(%s,%s)", projectId, branchName)) + secCtx := context.GetSecurityContext(ctx) + if secCtx == nil { + return nil, fmt.Errorf("security context not found") + } + + project, err := b.projectService.GetProject(*secCtx, projectId) + if err != nil { + return nil, err + } + gitBranch := &view.BranchGitConfigView{ + ProjectId: projectId, + Files: make([]view.ContentGitConfigView, 0), + Refs: make([]view.RefGitConfigView, 0), + } + + gitClient, err := b.gitClientProvider.GetUserClient(project.Integration.Type, (*secCtx).GetUserId()) + if err != nil { + return nil, fmt.Errorf("failed to get git client: %v", err) + } + + apiHubConfigPath := getApihubConfigFileId(projectId) + + configExists, err := gitClient.FileExists(ctx, project.Integration.RepositoryId, branchName, apiHubConfigPath) + if err != nil { + return nil, fmt.Errorf("failed to check if apihub config file exists: %v", err) + } + if !configExists { + return getApihubConfigRaw(gitBranch) + } + + data, _, _, err := gitClient.GetFileContent(ctx, project.Integration.RepositoryId, branchName, apiHubConfigPath) + if err != nil { + return nil, err + } + if data == nil { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, //todo maybe 404? + Code: exception.ConfigNotFound, + Message: exception.ConfigNotFoundMsg, + Params: map[string]interface{}{"projectId": projectId, "branch": branchName}, + } + } + err = json.Unmarshal(data, &gitBranch) + if err != nil { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidApihubConfig, + Message: exception.InvalidApihubConfigMsg, + Debug: err.Error(), + } + } + + return getApihubConfigRaw(gitBranch) +} + +func (b *branchServiceImpl) GetContentNoData(ctx goctx.Context, projectId string, branchName string, contentId string) (*view.Content, error) { + content, err := b.draftRepo.GetContent(projectId, branchName, contentId) + if err != nil { + return nil, err + } + if content != nil { + return entity.MakeContentView(content), nil + } else { + branchDetails, _, err := b.GetBranchDetailsFromGit(ctx, projectId, branchName) + if err != nil { + return nil, err + } + if branchDetails == nil { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.ConfigNotFound, + Message: exception.ConfigNotFoundMsg, + Params: map[string]interface{}{"projectId": projectId, "branch": branchName}, + } + } + branchDetails.RemoveFolders() + for _, content := range branchDetails.Files { + if content.FileId == contentId { + return &content, nil + } + } + } + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ContentIdNotFound, + Message: exception.ContentIdNotFoundMsg, + Params: map[string]interface{}{"contentId": contentId, "branch": branchName, "projectId": projectId}, + } +} + +func (b *branchServiceImpl) DraftExists(projectId string, branchName string) (bool, error) { + branchDraft, err := b.branchRepository.GetBranchDraft(projectId, branchName) + if err != nil { + return false, err + } + return branchDraft != nil, nil +} + +func (b *branchServiceImpl) getBranchDraftCommitId(projectId string, branchName string) (string, error) { + branchDraft, err := b.branchRepository.GetBranchDraft(projectId, branchName) + if err != nil { + return "", err + } + if branchDraft == nil { + return "", nil + } + return branchDraft.CommitId, nil +} + +func (b *branchServiceImpl) DraftContainsChanges(ctx goctx.Context, projectId string, branchName string) (bool, error) { + configChangeType, err := b.calculateDraftConfigChangeType(ctx, projectId, branchName, true) + if err != nil { + return false, err + } + if configChangeType != view.CTUnchanged { + return true, nil + } + + contents, err := b.draftRepo.GetContents(projectId, branchName) + if err != nil { + return false, err + } + + files := make([]view.Content, 0) + for _, content := range contents { + files = append(files, *entity.MakeContentView(&content)) + } + + for _, file := range files { + if file.Status != view.StatusUnmodified { + return true, nil + } + } + return false, nil +} + +func (b branchServiceImpl) BranchExists(ctx goctx.Context, projectId string, branchName string) (bool, bool, error) { + ctx = context.CreateContextWithStacktrace(ctx, fmt.Sprintf("BranchExists(%s,%s)", projectId, branchName)) + secCtx := context.GetSecurityContext(ctx) + if secCtx == nil { + return false, false, fmt.Errorf("security context not found") + } + + project, err := b.projectService.GetProject(*secCtx, projectId) + if err != nil { + return false, false, err + } + gitClient, err := b.gitClientProvider.GetUserClient(project.Integration.Type, (*secCtx).GetUserId()) + if err != nil { + return false, false, err + } + exists, canPush, err := gitClient.BranchOrTagExists(ctx, project.Integration.RepositoryId, branchName) + if err != nil { + return false, false, err + } + return exists, canPush, nil +} + +func (b branchServiceImpl) CloneBranch(ctx goctx.Context, projectId string, branchName string, newBranchName string) error { + ctx = context.CreateContextWithStacktrace(ctx, fmt.Sprintf("CloneBranch(%s,%s,%s)", projectId, branchName, newBranchName)) + secCtx := context.GetSecurityContext(ctx) + if secCtx == nil { + return fmt.Errorf("security context not found") + } + + project, err := b.projectService.GetProject(*secCtx, projectId) + if err != nil { + return err + } + newBranchExists, _, err := b.BranchExists(ctx, projectId, newBranchName) + if err != nil { + return err + } + if newBranchExists { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BranchAlreadyExists, + Message: exception.BranchAlreadyExistsMsg, + Params: map[string]interface{}{"branch": newBranchName, "projectId": projectId}, + } + } + gitClient, err := b.gitClientProvider.GetUserClient(project.Integration.Type, (*secCtx).GetUserId()) + if err != nil { + return fmt.Errorf("failed to get git client: %v", err) + } + return gitClient.CloneBranch(ctx, project.Integration.RepositoryId, branchName, newBranchName) +} + +func (b branchServiceImpl) CreateMergeRequest(ctx goctx.Context, projectId string, targetBranchName string, sourceBranchName string, title string, description string) (string, error) { + ctx = context.CreateContextWithStacktrace(ctx, fmt.Sprintf("CreateMergeRequest(%s,%s,%s)", projectId, targetBranchName, sourceBranchName)) + secCtx := context.GetSecurityContext(ctx) + if secCtx == nil { + return "", fmt.Errorf("security context not found") + } + + project, err := b.projectService.GetProject(*secCtx, projectId) + if err != nil { + return "", err + } + targetBranchExists, _, err := b.BranchExists(ctx, projectId, targetBranchName) + if err != nil { + return "", err + } + if !targetBranchExists { + return "", &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.BranchNotFound, + Message: exception.BranchNotFoundMsg, + Params: map[string]interface{}{"branch": targetBranchName, "projectId": projectId}, + } + } + sourceBranchExists, _, err := b.BranchExists(ctx, projectId, sourceBranchName) + if err != nil { + return "", err + } + if !sourceBranchExists { + return "", &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.BranchNotFound, + Message: exception.BranchNotFoundMsg, + Params: map[string]interface{}{"branch": sourceBranchName, "projectId": projectId}, + } + } + gitClient, err := b.gitClientProvider.GetUserClient(project.Integration.Type, (*secCtx).GetUserId()) + if err != nil { + return "", fmt.Errorf("failed to get git client: %v", err) + } + return gitClient.CreateMergeRequest(ctx, project.Integration.RepositoryId, targetBranchName, sourceBranchName, title, description) +} + +func (b branchServiceImpl) DeleteBranch(ctx goctx.Context, projectId string, branchName string) error { + ctx = context.CreateContextWithStacktrace(ctx, fmt.Sprintf("DeleteBranch(%s,%s)", projectId, branchName)) + secCtx := context.GetSecurityContext(ctx) + if secCtx == nil { + return fmt.Errorf("security context not found") + } + + project, err := b.projectService.GetProject(*secCtx, projectId) + if err != nil { + return err + } + gitClient, err := b.gitClientProvider.GetUserClient(project.Integration.Type, (*secCtx).GetUserId()) + if err != nil { + return fmt.Errorf("failed to get git client: %v", err) + } + return gitClient.DeleteBranch(ctx, project.Integration.RepositoryId, branchName) +} + +func (b branchServiceImpl) ResetBranchDraft(ctx goctx.Context, projectId string, branchName string, sendResetNotification bool) error { + ctx = context.CreateContextWithStacktrace(ctx, fmt.Sprintf("ResetBranchDraft(%s,%s)", projectId, branchName)) + secCtx := context.GetSecurityContext(ctx) + if secCtx == nil { + return fmt.Errorf("security context not found") + } + + draftExists, err := b.DraftExists(projectId, branchName) + if err != nil { + return err + } + if !draftExists { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.BranchDraftNotFound, + Message: exception.BranchDraftNotFoundMsg, + Params: map[string]interface{}{"projectId": projectId, "branch": branchName}, + } + } + err = b.DeleteBranchDraft(projectId, branchName) + if err != nil { + return err + } + if sendResetNotification { + b.wsBranchService.NotifyProjectBranchUsers(projectId, branchName, + websocket.BranchResetPatch{ + Type: websocket.BranchResetType, + UserId: (*secCtx).GetUserId(), + }) + } + + if b.wsBranchService.HasActiveEditSession(projectId, branchName) { + branch, err := b.GetBranchDetailsEP(ctx, projectId, branchName, true) + if err != nil { + b.wsBranchService.DisconnectClients(projectId, branchName) + return err + } + branch.RemoveFolders() + b.wsBranchService.NotifyProjectBranchUsers(projectId, branchName, + websocket.BranchConfigSnapshot{ + Type: websocket.BranchConfigSnapshotType, + Data: branch, + }) + } + return nil +} + +func (b branchServiceImpl) DeleteBranchDraft(projectId string, branchName string) error { + //todo do it in one transaction + err := b.draftRepo.DeleteBranchDraft(projectId, branchName) + if err != nil { + return err + } + err = b.branchEditorsService.RemoveBranchEditors(projectId, branchName) + if err != nil { + return err + } + return nil +} + +func (b branchServiceImpl) CalculateBranchConflicts(ctx goctx.Context, projectId string, branchName string) (*view.BranchConflicts, error) { + ctx = context.CreateContextWithStacktrace(ctx, fmt.Sprintf("CalculateBranchConflicts(%s,%s)", projectId, branchName)) + secCtx := context.GetSecurityContext(ctx) + if secCtx == nil { + return nil, fmt.Errorf("security context not found") + } + + filesWithConflict := make([]string, 0) + draftExists, err := b.DraftExists(projectId, branchName) + if err != nil { + return nil, err + } + if !draftExists { + branchExists, _, err := b.BranchExists(ctx, projectId, branchName) + if err != nil { + return nil, err + } + if !branchExists { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.BranchNotFound, + Message: exception.BranchNotFoundMsg, + Params: map[string]interface{}{"branch": branchName, "projectId": projectId}, + } + } + return &view.BranchConflicts{Files: make([]string, 0)}, nil + } + draftBranch, err := b.GetBranchDetailsFromDraft(ctx, projectId, branchName, false) + if err != nil { + return nil, err + } + draftBranch.RemoveFolders() + if !draftExists || len(draftBranch.Files) == 0 { + return &view.BranchConflicts{Files: filesWithConflict}, nil + } + project, err := b.projectService.GetProject(*secCtx, projectId) + if err != nil { + return nil, err + } + gitClient, err := b.gitClientProvider.GetUserClient(project.Integration.Type, (*secCtx).GetUserId()) + if err != nil { + return nil, fmt.Errorf("failed to get git client: %v", err) + } + conflictChanges := make([]view.FileConflict, 0) + for _, file := range draftBranch.Files { + switch file.Status { + case view.StatusAdded: + { + blobId, err := gitClient.GetFileBlobId(ctx, project.Integration.RepositoryId, branchName, file.FileId) + if err != nil { + return nil, err + } + if blobId != "" { + filesWithConflict = append(filesWithConflict, file.FileId) + } + if blobId != file.ConflictedBlobId { + conflictChanges = append(conflictChanges, view.FileConflict{FileId: file.FileId, ConflictedBlobId: blobId}) + } + } + case view.StatusDeleted: + { + if file.BlobId != "" { + blobId, err := gitClient.GetFileBlobId(ctx, project.Integration.RepositoryId, branchName, file.FileId) + if err != nil { + return nil, err + } + if blobId != file.BlobId { + filesWithConflict = append(filesWithConflict, file.FileId) + } + if file.ConflictedBlobId != blobId && blobId != file.BlobId { + conflictChanges = append(conflictChanges, view.FileConflict{FileId: file.FileId, ConflictedBlobId: blobId}) + } + } + if file.BlobId == "" && file.ConflictedBlobId != "" { + //file marked to delete no longer exists in git //todo probably never happening + conflictChanges = append(conflictChanges, view.FileConflict{FileId: file.FileId, ConflictedBlobId: ""}) + } + } + case view.StatusModified: + { + if file.MovedFrom == "" { + blobId, err := gitClient.GetFileBlobId(ctx, project.Integration.RepositoryId, branchName, file.FileId) + if err != nil { + return nil, err + } + if blobId != file.BlobId { + filesWithConflict = append(filesWithConflict, file.FileId) + } + if file.ConflictedBlobId != "" && blobId == file.BlobId { + conflictChanges = append(conflictChanges, view.FileConflict{FileId: file.FileId, ConflictedBlobId: ""}) + } + if file.ConflictedBlobId != blobId && blobId != file.BlobId { + conflictChanges = append(conflictChanges, view.FileConflict{FileId: file.FileId, ConflictedBlobId: blobId}) + } + } else { + fileConflict := view.FileConflict{} + hasConflict := false + + blobIdOld, err := gitClient.GetFileBlobId(ctx, project.Integration.RepositoryId, branchName, file.MovedFrom) + if err != nil { + return nil, err + } + if blobIdOld != file.BlobId { + hasConflict = true + } + if file.ConflictedBlobId != "" && blobIdOld == file.BlobId { + fileConflict.FileId = file.FileId + fileConflict.ConflictedBlobId = "" + } + if file.ConflictedBlobId != blobIdOld && blobIdOld != file.BlobId { + fileConflict.FileId = file.FileId + fileConflict.ConflictedBlobId = blobIdOld + } + + blobIdNew, err := gitClient.GetFileBlobId(ctx, project.Integration.RepositoryId, branchName, file.FileId) + if err != nil { + return nil, err + } + if blobIdNew != "" { + hasConflict = true + } + if file.ConflictedFileId != "" && blobIdNew == "" { + fileConflict.FileId = file.FileId + fileConflict.ConflictedBlobId = "" + emptyStr := "" + fileConflict.ConflictedFileId = &emptyStr + } + if file.ConflictedBlobId != blobIdNew && blobIdNew != "" { + fileConflict.FileId = file.FileId + fileConflict.ConflictedBlobId = blobIdNew + fileConflict.ConflictedFileId = &file.FileId + } + + if hasConflict { + filesWithConflict = append(filesWithConflict, file.FileId) + } + if fileConflict.FileId != "" { + conflictChanges = append(conflictChanges, fileConflict) + } + } + } + case view.StatusMoved: + { + fileConflict := view.FileConflict{} + blobIdNew, err := gitClient.GetFileBlobId(ctx, project.Integration.RepositoryId, branchName, file.FileId) + if err != nil { + return nil, err + } + if blobIdNew != "" { + filesWithConflict = append(filesWithConflict, file.FileId) + } + if file.ConflictedFileId != "" && blobIdNew == "" { + fileConflict.FileId = file.FileId + fileConflict.ConflictedBlobId = "" + emptyStr := "" + fileConflict.ConflictedFileId = &emptyStr + } + if file.ConflictedBlobId != blobIdNew && blobIdNew != "" { + fileConflict.FileId = file.FileId + fileConflict.ConflictedBlobId = blobIdNew + fileConflict.ConflictedFileId = &file.FileId + } + if fileConflict.FileId != "" { + conflictChanges = append(conflictChanges, fileConflict) + } + } + case view.StatusUnmodified: + continue + } + } + err = b.draftRepo.UpdateContentsConflicts(projectId, branchName, conflictChanges) + if err != nil { + return nil, err + } + for _, conflictChange := range conflictChanges { + b.wsBranchService.NotifyProjectBranchUsers(projectId, branchName, + websocket.BranchFilesUpdatedPatch{ + Type: websocket.BranchFilesUpdatedType, + UserId: (*secCtx).GetUserId(), + FileId: conflictChange.FileId, + Operation: "patch", + Data: &websocket.BranchFilesUpdatedPatchData{ + ConflictedBlobId: &conflictChange.ConflictedBlobId, + ConflictedFileId: conflictChange.ConflictedFileId, + }, + }) + } + return &view.BranchConflicts{Files: filesWithConflict}, nil +} + +func (b *branchServiceImpl) validateGitBranchConfig(gitBranchConfig *view.BranchGitConfigView, apiHubConfigPath string) error { + // check duplicate file entries + + fileIds := map[string]bool{} + duplicateIds := map[string]bool{} + + for _, fileEntry := range gitBranchConfig.Files { + _, exists := fileIds[fileEntry.FileId] + if !exists { + fileIds[fileEntry.FileId] = true + } else { + duplicateIds[fileEntry.FileId] = true + } + } + if len(duplicateIds) > 0 { + var duplicates []string + for id := range duplicateIds { + duplicates = append(duplicates, id) + } + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.GitBranchConfigContainDuplicateFiles, + Message: exception.GitBranchConfigContainDuplicateFilesMsg, + Params: map[string]interface{}{"path": apiHubConfigPath, "files": fmt.Sprintf("%+q", duplicates)}, + } + } + return nil + + // TODO: add more validations? +} + +func (b *branchServiceImpl) ConnectToWebsocket(ctx goctx.Context, projectId string, branchName string, wsId string, connection *ws.Conn) error { + ctx = context.CreateContextWithStacktrace(ctx, fmt.Sprintf("ConnectToWebsocket(%s,%s)", projectId, branchName)) + secCtx := context.GetSecurityContext(ctx) + if secCtx == nil { + return fmt.Errorf("security context not found") + } + + err := b.wsBranchService.ConnectToProjectBranch(*secCtx, projectId, branchName, wsId, connection) + if err != nil { + return err + } + branch, err := b.GetBranchDetailsEP(ctx, projectId, branchName, true) + if err != nil { + b.wsBranchService.DisconnectClient(projectId, branchName, wsId) + return err + } + branch.RemoveFolders() + b.wsBranchService.NotifyProjectBranchUser(projectId, branchName, wsId, + websocket.BranchConfigSnapshot{ + Type: websocket.BranchConfigSnapshotType, + Data: branch, + }) + + return nil +} + +func (b *branchServiceImpl) GetAllZippedContentFromGitCommit(ctx goctx.Context, branchDetails *view.Branch, projectId string, branchName string, commitId string) ([]byte, error) { + zipBuf := bytes.Buffer{} + zw := zip.NewWriter(&zipBuf) + + contentEntities, err := b.getBranchContentFromRepositoy(ctx, branchDetails, projectId, branchName, commitId) + if err != nil { + return nil, err + } + + for _, contentEntity := range contentEntities { + mdFw, err := zw.Create(contentEntity.FileId) + if err != nil { + return nil, err + } + if len(contentEntity.Data) == 0 { + continue + } + _, err = mdFw.Write(contentEntity.Data) + if err != nil { + return nil, err + } + } + + err = zw.Close() + if err != nil { + return nil, err + } + + return zipBuf.Bytes(), nil +} + +func (b *branchServiceImpl) GetVersionPublishDetailsFromGitCommit(ctx goctx.Context, projectId string, branchName string, commitId string) (*view.GitVersionPublish, error) { + ctx = context.CreateContextWithStacktrace(ctx, fmt.Sprintf("GetVersionPublishDetailsFromGitCommit(%s,%s,%s)", projectId, branchName, commitId)) + secCtx := context.GetSecurityContext(ctx) + if secCtx == nil { + return nil, fmt.Errorf("security context not found") + } + + project, err := b.projectService.GetProject(*secCtx, projectId) + if err != nil { + return nil, err + } + + var gitVersion view.GitVersionPublish + gitClient, err := b.gitClientProvider.GetUserClient(project.Integration.Type, (*secCtx).GetUserId()) + if err != nil { + return nil, fmt.Errorf("failed to get git client: %v", err) + } + + publishDetailsPath := getApihubVersionPublishFileId(project.Id) + + configExists, err := gitClient.FileExists(ctx, project.Integration.RepositoryId, commitId, publishDetailsPath) + if err != nil { + return nil, fmt.Errorf("failed to check if apihub version publish file exists: %v", err) + } + if !configExists { + return nil, nil + } + + data, _, _, err := gitClient.GetFileContent(ctx, project.Integration.RepositoryId, commitId, publishDetailsPath) + if err != nil { + return nil, err + } + if data == nil { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.GitVersionPublishFileNotFound, + Message: exception.GitVersionPublishFileNotFoundMsg, + Params: map[string]interface{}{"projectId": project.Id, "branch": branchName}, + } + } + err = json.Unmarshal(data, &gitVersion) + if err != nil { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.GitVersionPublishFileInvalid, + Message: exception.GitVersionPublishFileInvalidMsg, + Params: map[string]interface{}{"projectId": project.Id, "branch": branchName}, + Debug: err.Error(), + } + } + + return &gitVersion, nil +} + +func (b *branchServiceImpl) deleteEmptyDrafts(ctx goctx.Context) { + drafts, err := b.branchRepository.GetBranchDrafts() + if err != nil { + log.Errorf("Failed to get drafts for all branches. Error: %v", err.Error()) + return + } + for _, draft := range drafts { + containsChanges, err := b.DraftContainsChanges(ctx, draft.ProjectId, draft.BranchName) + if err != nil { + log.Errorf("Failed to check if draft for project %v branch %v contains changes. Error: %v", draft.ProjectId, draft.BranchName, err.Error()) + continue + } + if containsChanges || b.wsBranchService.HasActiveEditSession(draft.ProjectId, draft.BranchName) { + continue + } + err = b.DeleteBranchDraft(draft.ProjectId, draft.BranchName) + if err != nil { + log.Errorf("Failed to delete draft for project %v branch %v. Error: %v", draft.ProjectId, draft.BranchName, err.Error()) + continue + } + log.Debugf("Successfully deleted draft for project %v branch %v", draft.ProjectId, draft.BranchName) + } +} + +func (b *branchServiceImpl) startCleanupJob(interval time.Duration) { + ctx := context.CreateContextWithSecurity(goctx.Background(), context.CreateSystemContext()) + ctx = context.CreateContextWithStacktrace(ctx, fmt.Sprintf("startCleanupJob(%d)", interval)) + + utils.SafeAsync(func() { + for { + time.Sleep(interval) + b.deleteEmptyDrafts(ctx) + } + }) +} + +func findFolderForFile(fileId string, allFiles []view.Content) string { + for _, file := range allFiles { + if file.IsFolder && strings.HasPrefix(fileId, file.FileId) { + return file.FileId + } + } + return "" +} + +func findAllFilesForFolder(folderFileId string, allFiles []view.Content) []string { + filesForFolder := make([]string, 0) + for _, file := range allFiles { + if !file.IsFolder && file.FromFolder && strings.HasPrefix(file.FileId, folderFileId) { + filesForFolder = append(filesForFolder, file.FileId) + } + } + return filesForFolder +} diff --git a/qubership-apihub-service/service/BuildCleanupService.go b/qubership-apihub-service/service/BuildCleanupService.go new file mode 100644 index 0000000..4914869 --- /dev/null +++ b/qubership-apihub-service/service/BuildCleanupService.go @@ -0,0 +1,255 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "context" + "net/http" + "sync" + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/db" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + mRepository "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/migration/repository" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + "github.com/google/uuid" + "github.com/robfig/cron/v3" + log "github.com/sirupsen/logrus" +) + +type DBCleanupService interface { + CreateCleanupJob(schedule string) error + StartMigrationBuildDataCleanup() (string, error) + GetMigrationBuildDataCleanupResult(id string) (interface{}, error) +} + +func NewDBCleanupService(cleanUpRepository repository.BuildCleanupRepository, + migrationRepository mRepository.MigrationRunRepository, + minioStorageService MinioStorageService, + infoService SystemInfoService) DBCleanupService { + return &dbCleanupServiceImpl{ + cleanUpRepository: cleanUpRepository, + migrationRepository: migrationRepository, + cron: cron.New(), + rmMigrationBuildDataRes: map[string]interface{}{}, + rmMigrationBuildDataResMutex: sync.RWMutex{}, + systemInfoService: infoService, + minioStorageService: minioStorageService, + } +} + +type dbCleanupServiceImpl struct { + cleanUpRepository repository.BuildCleanupRepository + migrationRepository mRepository.MigrationRunRepository + connectionProvider db.ConnectionProvider + cron *cron.Cron + rmMigrationBuildDataRes map[string]interface{} + rmMigrationBuildDataResMutex sync.RWMutex + minioStorageService MinioStorageService + systemInfoService SystemInfoService +} + +func (c *dbCleanupServiceImpl) CreateCleanupJob(schedule string) error { + job := BuildCleanupJob{ + schedule: schedule, + buildCleanupRepository: c.cleanUpRepository, + minioStorageService: c.minioStorageService, + systemInfoService: c.systemInfoService, + migrationRepository: c.migrationRepository, + } + + if len(c.cron.Entries()) == 0 { + location, err := time.LoadLocation("") + if err != nil { + return err + } + c.cron = cron.New(cron.WithLocation(location)) + c.cron.Start() + } + + _, err := c.cron.AddJob(schedule, &job) + if err != nil { + log.Warnf("[DBCleanupService] Job wasn't added for schedule - %s. With error - %s", schedule, err) + return err + } + log.Infof("[DBCleanupService] Job was created with schedule - %s", schedule) + + return nil +} + +type BuildCleanupJob struct { + schedule string + buildCleanupRepository repository.BuildCleanupRepository + minioStorageService MinioStorageService + systemInfoService SystemInfoService + migrationRepository mRepository.MigrationRunRepository +} + +func (j BuildCleanupJob) Run() { + scheduledAt := time.Now().Round(time.Second) + + migrations, err := j.migrationRepository.GetRunningMigrations() + if err != nil { + log.Error("Failed to check for running migrations for build cleanup job") + return + } + if migrations != nil && len(migrations) != 0 { + log.Infof("Cleanup was skipped at %s due to migration run", scheduledAt) + return + } + + var runCleanup bool + var lockId int + lastCleanup, err := j.buildCleanupRepository.GetLastCleanup() + if err != nil { + log.Errorf("Failed to get last cleanup: %v", err) + return + } + if lastCleanup != nil { + schedule, err := cron.NewParser(cron.Minute | cron.Hour | cron.Dom | cron.Month | cron.Dow).Parse(j.schedule) + if err != nil { + log.Errorf("Failed to parse schedule for cleaning job: %v", err) + return + } + currentTime := time.Now().UTC() + nextRun := schedule.Next(currentTime) + interval := nextRun.Sub(currentTime) + runCleanup = !lastCleanup.ScheduledAt.After(currentTime.Add(-interval)) + lockId = lastCleanup.RunId + 1 + } else { + runCleanup = true + lockId = 1 + } + + if runCleanup { + log.Info("Cleanup job has started") + err = j.buildCleanupRepository.StoreCleanup(&entity.BuildCleanupEntity{ + RunId: lockId, + ScheduledAt: scheduledAt, + }) + if err != nil { + log.Errorf("Failed to store cleanup entity: %v", err) + return + } + if j.systemInfoService.IsMinioStorageActive() { + ctx := context.Background() + ids, err := j.buildCleanupRepository.GetRemoveCandidateOldBuildEntitiesIds() + if err != nil { + log.Errorf("Failed to get up remove candidate old build ids: %v", err) + return + } + err = j.minioStorageService.RemoveFiles(ctx, view.BUILD_RESULT_TABLE, ids) + if err != nil { + log.Errorf("Failed to remove old build results from minio storage: %v", err) + return + } + + err = j.buildCleanupRepository.RemoveOldBuildSourcesByIds(ctx, ids, lockId, scheduledAt) + if err != nil { + log.Errorf("Failed to clean up old builds sources: %v", err) + return + } + } else { + err = j.buildCleanupRepository.RemoveOldBuildEntities(lockId, scheduledAt) + if err != nil { + log.Errorf("Failed to clean up old builds: %v", err) + return + } + } + //todo uncomment after improving performance of "delete" queries + // err = j.buildCleanupRepository.RemoveUnreferencedOperationData(lockId) + // if err != nil { + // log.Errorf("Failed to clean up unreferenced operation data: %v", err) + // return + // } + + cleanupEnt, err := j.buildCleanupRepository.GetCleanup(lockId) + if err != nil { + log.Errorf("Failed to get cleanup run entity with id %d", lockId) + return + } + log.Infof("Cleanup was performed at %s with results: %v", scheduledAt, *cleanupEnt) + } else { + log.Infof("Cleanup was skipped at %s", scheduledAt) + } +} + +func (c *dbCleanupServiceImpl) StartMigrationBuildDataCleanup() (string, error) { + id := uuid.New().String() + + result := map[string]interface{}{} + result["status"] = "running" + + c.rmMigrationBuildDataResMutex.Lock() + c.rmMigrationBuildDataRes[id] = result + c.rmMigrationBuildDataResMutex.Unlock() + + utils.SafeAsync(func() { + var err error + var removedRowsCount int + if c.systemInfoService.IsMinioStorageActive() { + ctx := context.Background() + ids, err := c.cleanUpRepository.GetRemoveMigrationBuildIds() + if err != nil { + c.saveErrorInfo(result, err, id, removedRowsCount) + } + err = c.minioStorageService.RemoveFiles(ctx, view.BUILD_RESULT_TABLE, ids) + if err != nil { + c.saveErrorInfo(result, err, id, removedRowsCount) + } + removedRowsCount, err = c.cleanUpRepository.RemoveMigrationBuildSourceData(ids) + if err != nil { + c.saveErrorInfo(result, err, id, removedRowsCount) + } + } else { + removedRowsCount, err = c.cleanUpRepository.RemoveMigrationBuildData() + } + c.saveErrorInfo(result, err, id, removedRowsCount) + }) + + return id, nil +} + +func (c *dbCleanupServiceImpl) saveErrorInfo(result map[string]interface{}, err error, id string, removedRowsCount int) { + c.rmMigrationBuildDataResMutex.Lock() + if err != nil { + log.Errorf("Failed to remove migration build data: %s", err) + result["status"] = "error" + result["error"] = err + c.rmMigrationBuildDataRes[id] = result + } else { + log.Infof("Removed %d migration build data rows", removedRowsCount) + result["status"] = "success" + result["removedRowsCount"] = removedRowsCount + c.rmMigrationBuildDataRes[id] = result + } + c.rmMigrationBuildDataResMutex.Unlock() +} +func (c *dbCleanupServiceImpl) GetMigrationBuildDataCleanupResult(id string) (interface{}, error) { + c.rmMigrationBuildDataResMutex.RLock() + defer c.rmMigrationBuildDataResMutex.RUnlock() + result, exists := c.rmMigrationBuildDataRes[id] + if !exists { + return 0, exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.UnableToGetMigrationDataCleanupResult, + Message: exception.UnableToGetMigrationDataCleanupResultMsg, + } + } + return result, nil +} diff --git a/qubership-apihub-service/service/BuildProcessor.go b/qubership-apihub-service/service/BuildProcessor.go new file mode 100644 index 0000000..b1d6769 --- /dev/null +++ b/qubership-apihub-service/service/BuildProcessor.go @@ -0,0 +1,140 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "fmt" + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type BuildProcessorService interface { + GetFreeBuild(builderId string) (*view.BuildConfig, []byte, error) +} + +func NewBuildProcessorService(buildRepository repository.BuildRepository, refResolverService RefResolverService) BuildProcessorService { + bp := &buildProcessorServiceImpl{ + buildRepository: buildRepository, + + refResolverService: refResolverService, + } + + return bp +} + +type buildProcessorServiceImpl struct { + buildRepository repository.BuildRepository + + refResolverService RefResolverService +} + +func (b *buildProcessorServiceImpl) GetFreeBuild(builderId string) (*view.BuildConfig, []byte, error) { + buildSrc, err := b.findFreeBuild(builderId) // find not started build + if err != nil { + return nil, nil, err + } + if buildSrc == nil { + return nil, nil, nil + } + + config, err := view.BuildConfigFromMap(buildSrc.Config, buildSrc.BuildId) + if err != nil { + return nil, nil, fmt.Errorf("failed to parse build config: %s", err) + } + return config, buildSrc.Source, nil +} + +func (b *buildProcessorServiceImpl) findFreeBuild(builderId string) (*entity.BuildSourceEntity, error) { + var buildSrc *entity.BuildSourceEntity + var build *entity.BuildEntity + var err error + + for { + start := time.Now() + build, err = b.buildRepository.FindAndTakeFreeBuild(builderId) + utils.PerfLog(time.Since(start).Milliseconds(), 250, "findFreeBuild: FindAndTakeFreeBuild") + if err != nil { + return nil, err + } + if build == nil { + break + } + + start = time.Now() + src, err := b.buildRepository.GetBuildSrc(build.BuildId) + + if err != nil { + return nil, err + } + if src == nil { + utils.PerfLog(time.Since(start).Milliseconds(), 200, "findFreeBuild: GetBuildSrc") + start = time.Now() + err = b.buildRepository.UpdateBuildStatus(build.BuildId, view.StatusError, "BE error: sources not found during findFreeBuild") + utils.PerfLog(time.Since(start).Milliseconds(), 200, "findFreeBuild: UpdateBuildStatus") + if err != nil { + return nil, err + } + continue + } + + srcConfig, err := view.BuildConfigFromMap(src.Config, src.BuildId) + if err != nil { + err = b.buildRepository.UpdateBuildStatus(src.BuildId, view.StatusError, fmt.Sprintf("Build config has invalid format: %v", err.Error())) + if err != nil { + return nil, err + } + continue + } + utils.PerfLog(time.Since(start).Milliseconds(), 200, "findFreeBuild: GetBuildSrc") + + if srcConfig.UnresolvedRefs { + start = time.Now() + srcConfig.Refs, err = b.refResolverService.CalculateBuildConfigRefs(srcConfig.Refs, srcConfig.ResolveRefs, srcConfig.ResolveConflicts) + if err != nil { + err = b.buildRepository.UpdateBuildStatus(src.BuildId, view.StatusError, fmt.Sprintf("Build config has invalid refs: %v", err.Error())) + if err != nil { + return nil, err + } + continue + } + srcConfig.UnresolvedRefs = false + configAsMap, err := view.BuildConfigToMap(*srcConfig) + if err != nil { + err = b.buildRepository.UpdateBuildStatus(src.BuildId, view.StatusError, fmt.Sprintf("Failed to parse build src config as map: %v", err.Error())) + if err != nil { + return nil, err + } + continue + } + err = b.buildRepository.UpdateBuildSourceConfig(src.BuildId, *configAsMap) + if err != nil { + err = b.buildRepository.UpdateBuildStatus(src.BuildId, view.StatusError, fmt.Sprintf("Failed to update build config: %v", err.Error())) + if err != nil { + return nil, err + } + continue + } + src.Config = *configAsMap + utils.PerfLog(time.Since(start).Milliseconds(), 200, "findFreeBuild: CalculateBuildConfigRefs") + } + buildSrc = src + break + } + return buildSrc, nil +} diff --git a/qubership-apihub-service/service/BuildResultService.go b/qubership-apihub-service/service/BuildResultService.go new file mode 100644 index 0000000..ee36795 --- /dev/null +++ b/qubership-apihub-service/service/BuildResultService.go @@ -0,0 +1,73 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "context" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type BuildResultService interface { + StoreBuildResult(buildId string, result []byte) error + GetBuildResult(buildId string) ([]byte, error) +} + +func NewBuildResultService(repo repository.BuildResultRepository, systemInfoService SystemInfoService, minioStorageService MinioStorageService) BuildResultService { + return &buildResultServiceImpl{ + repo: repo, + minioStorageService: minioStorageService, + systemInfoService: systemInfoService, + } +} + +type buildResultServiceImpl struct { + repo repository.BuildResultRepository + minioStorageService MinioStorageService + systemInfoService SystemInfoService +} + +func (b buildResultServiceImpl) StoreBuildResult(buildId string, result []byte) error { + if b.systemInfoService.IsMinioStorageActive() { + ctx := context.Background() + err := b.minioStorageService.UploadFile(ctx, view.BUILD_RESULT_TABLE, buildId, result) + if err != nil { + return err + } + return nil + } + return b.repo.StoreBuildResult(entity.BuildResultEntity{ + BuildId: buildId, + Data: result, + }) +} + +func (b buildResultServiceImpl) GetBuildResult(buildId string) ([]byte, error) { + if b.systemInfoService.IsMinioStorageActive() { + ctx := context.Background() + content, err := b.minioStorageService.GetFile(ctx, view.BUILD_RESULT_TABLE, buildId) + if err != nil { + return nil, err + } + return content, nil + } + res, err := b.repo.GetBuildResult(buildId) + if err != nil { + return nil, err + } + return res.Data, nil +} diff --git a/qubership-apihub-service/service/BuildService.go b/qubership-apihub-service/service/BuildService.go new file mode 100644 index 0000000..762636b --- /dev/null +++ b/qubership-apihub-service/service/BuildService.go @@ -0,0 +1,557 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "archive/zip" + "bytes" + "encoding/json" + "fmt" + "io" + "net/http" + "strings" + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/archive" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service/validation" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + "github.com/google/uuid" + log "github.com/sirupsen/logrus" +) + +type BuildService interface { + PublishVersion(ctx context.SecurityContext, config view.BuildConfig, src []byte, clientBuild bool, builderId string, dependencies []string, resolveRefs bool, resolveConflicts bool) (*view.PublishV2Response, error) + GetStatus(buildId string) (string, string, error) + GetStatuses(buildIds []string) ([]view.PublishStatusResponse, error) + UpdateBuildStatus(buildId string, status view.BuildStatusEnum, details string) error + GetFreeBuild(builderId string) ([]byte, error) + CreateChangelogBuild(config view.BuildConfig, isExternal bool, builderId string) (string, error) //deprecated + GetBuildViewByChangelogSearchQuery(searchRequest view.ChangelogBuildSearchRequest) (*view.BuildView, error) + GetBuildViewByDocumentGroupSearchQuery(searchRequest view.DocumentGroupBuildSearchRequest) (*view.BuildView, error) + ValidateBuildOwnership(buildId string, builderId string) error + + CreateBuildWithoutDependencies(config view.BuildConfig, isExternal bool, builderId string) (string, error) + AwaitBuildCompletion(buildId string) error +} + +func NewBuildService( + buildRepository repository.BuildRepository, + buildProcessor BuildProcessorService, + publishService PublishedService, + systemInfoService SystemInfoService, + packageService PackageService, + refResolverService RefResolverService) BuildService { + return &buildServiceImpl{ + buildRepository: buildRepository, + buildProcessor: buildProcessor, + publishService: publishService, + systemInfoService: systemInfoService, + packageService: packageService, + refResolverService: refResolverService, + } +} + +type buildServiceImpl struct { + buildRepository repository.BuildRepository + buildProcessor BuildProcessorService + publishService PublishedService + systemInfoService SystemInfoService + packageService PackageService + refResolverService RefResolverService +} + +func (b *buildServiceImpl) PublishVersion(ctx context.SecurityContext, config view.BuildConfig, src []byte, clientBuild bool, builderId string, dependencies []string, resolveRefs bool, resolveConflicts bool) (*view.PublishV2Response, error) { + exists, err := b.packageService.PackageExists(config.PackageId) + if err != nil { + return nil, err + } + if !exists { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"packageId": config.PackageId}, + } + } + + versionNameValidationError := ValidateVersionName(config.Version) + if versionNameValidationError != nil { + return nil, versionNameValidationError + } + + if config.MigrationBuild == true || config.NoChangelog == true || !config.PublishedAt.IsZero() { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.ForbiddenDefaultMigrationBuildParameters, + Message: exception.ForbiddenDefaultMigrationBuildParametersMsg, + Params: map[string]interface{}{"parameters": "migrationBuild,noChangeLog,publishedAt"}, + } + } + + if config.BuildType != view.ChangelogType { + if strings.Contains(config.PreviousVersion, "@") { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.PreviousVersionNameNotAllowed, + Message: exception.PreviousVersionNameNotAllowedMsg, + Params: map[string]interface{}{"version": config.PreviousVersion}, + } + } + } + + if config.Status == string(view.Release) { + packEnt, err := b.packageService.GetPackage(ctx, config.PackageId, false) + if err != nil { + return nil, err + } + var pattern string + if packEnt.ReleaseVersionPattern != "" { + pattern = packEnt.ReleaseVersionPattern + } else { + pattern = ".*" + } + err = ReleaseVersionMatchesPattern(config.Version, pattern) + if err != nil { + return nil, err + } + } + + if config.PreviousVersionPackageId == config.PackageId { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPreviousVersionPackage, + Message: exception.InvalidPreviousVersionPackageMsg, + Params: map[string]interface{}{"previousVersionPackageId": config.PreviousVersionPackageId, "packageId": config.PackageId}, + } + } + + if config.Version == config.PreviousVersion { + if config.PreviousVersionPackageId == "" { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.VersionIsEqualToPreviousVersion, + Message: exception.VersionIsEqualToPreviousVersionMsg, + Params: map[string]interface{}{"version": config.Version, "previousVersion": config.PreviousVersion}, + } + } + } + + if config.PreviousVersion != "" { + previousVersionPackageId := config.PackageId + if config.PreviousVersionPackageId != "" { + previousVersionPackageId = config.PreviousVersionPackageId + } + previousVersionExists, err := b.publishService.VersionPublished(previousVersionPackageId, config.PreviousVersion) + if err != nil { + return nil, err + } + if !previousVersionExists { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": config.PreviousVersion, "packageId": previousVersionPackageId}, + } + } + } + + if len(src) > 0 { + zipReader, err := zip.NewReader(bytes.NewReader(src), int64(len(src))) + if err != nil { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackageArchive, + Message: exception.InvalidPackageArchiveMsg, + Params: map[string]interface{}{"error": err.Error()}, + } + } + if err = validation.ValidatePublishSources(archive.NewSourcesArchive(zipReader, &config)); err != nil { + return nil, err + } + } + + if config.Metadata.RepositoryUrl != "" && !utils.IsUrl(config.Metadata.RepositoryUrl) { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectMetadataField, + Message: exception.IncorrectMetadataFieldMsg, + Params: map[string]interface{}{"field": "repositoryUrl", "description": "not valid URL"}, + } + } + + //defer refs calculation if build has dependencies + if len(dependencies) > 0 { + config.UnresolvedRefs = true + config.ResolveConflicts = resolveConflicts + config.ResolveRefs = resolveRefs + } else { + config.Refs, err = b.refResolverService.CalculateBuildConfigRefs(config.Refs, resolveRefs, resolveConflicts) + if err != nil { + return nil, err + } + } + + publishId, err := b.addBuild(ctx, config, src, clientBuild, builderId, dependencies) + if err != nil { + return nil, err + } + + if clientBuild && len(dependencies) == 0 { + return &view.PublishV2Response{PublishId: publishId, Config: &config}, nil + } else { + return &view.PublishV2Response{PublishId: publishId}, nil + } +} + +// CreateChangelogBuild deprecated. use to CreateBuildWithoutDependencies +func (b *buildServiceImpl) CreateChangelogBuild(config view.BuildConfig, isExternal bool, builderId string) (string, error) { + status := view.StatusNotStarted + if isExternal { + status = view.StatusRunning + } + buildId := config.PublishId + if buildId == "" { + buildId = uuid.New().String() + } + + buildEnt := entity.BuildEntity{ + BuildId: buildId, + Status: string(status), + Details: "", + + PackageId: config.PackageId, + Version: config.Version, + + CreatedBy: config.CreatedBy, + RestartCount: 0, + + BuilderId: builderId, + Priority: 0, + } + + confAsMap, err := view.BuildConfigToMap(config) + if err != nil { + return "", err + } + + sourceEnt := entity.BuildSourceEntity{ + BuildId: buildEnt.BuildId, + Config: *confAsMap, + } + + err = b.buildRepository.StoreBuild(buildEnt, sourceEnt, nil) + if err != nil { + return "", err + } + return buildEnt.BuildId, nil +} + +func (b *buildServiceImpl) CreateBuildWithoutDependencies(config view.BuildConfig, clientBuild bool, builderId string) (string, error) { + status := view.StatusNotStarted + if clientBuild { + status = view.StatusRunning + } + buildId := config.PublishId + if buildId == "" { + buildId = uuid.New().String() + } + + timeNow := time.Now() + buildEnt := entity.BuildEntity{ + BuildId: buildId, + Status: string(status), + Details: "", + ClientBuild: clientBuild, + + PackageId: config.PackageId, + Version: config.Version, + + StartedAt: &timeNow, + + CreatedBy: config.CreatedBy, + RestartCount: 0, + + BuilderId: builderId, + Priority: 0, + } + + confAsMap, err := view.BuildConfigToMap(config) + if err != nil { + return "", err + } + + sourceEnt := entity.BuildSourceEntity{ + BuildId: buildEnt.BuildId, + Config: *confAsMap, + } + + err = b.buildRepository.StoreBuild(buildEnt, sourceEnt, nil) + if err != nil { + return "", err + } + return buildEnt.BuildId, nil +} + +// AddBuild this is intended for build only, shouldn't be called if build is not required in scope of publish +func (b *buildServiceImpl) addBuild(ctx context.SecurityContext, config view.BuildConfig, src []byte, clientBuild bool, builderId string, dependencies []string) (string, error) { + status := view.StatusNotStarted + if clientBuild { + status = view.StatusRunning + } + + buildId := config.PublishId + if buildId == "" { + buildId = uuid.New().String() + } + + timeNow := time.Now() + buildEnt := entity.BuildEntity{ + BuildId: buildId, + Status: string(status), + Details: "", + ClientBuild: clientBuild, + + PackageId: config.PackageId, + Version: config.Version, + + StartedAt: &timeNow, + + CreatedBy: ctx.GetUserId(), + RestartCount: 0, + + BuilderId: builderId, + Priority: 1, + } + + confAsMap, err := view.BuildConfigToMap(config) + if err != nil { + return "", err + } + + sourceEnt := entity.BuildSourceEntity{ + BuildId: buildEnt.BuildId, + Source: src, + Config: *confAsMap, + } + + var depends []entity.BuildDependencyEntity + for _, dep := range dependencies { + depends = append(depends, entity.BuildDependencyEntity{BuildId: buildEnt.BuildId, DependId: dep}) + } + + err = b.buildRepository.StoreBuild(buildEnt, sourceEnt, depends) + if err != nil { + return "", err + } + + if !clientBuild { + log.Infof("Build %s added as internal", buildEnt.BuildId) + } else { + log.Infof("Build %s added as external", buildEnt.BuildId) + } + + return buildEnt.BuildId, nil +} + +func (b *buildServiceImpl) GetStatus(buildId string) (string, string, error) { + ent, err := b.buildRepository.GetBuild(buildId) + if err != nil { + return "", "", err + } + if ent == nil { + return "", "", nil + } + return ent.Status, ent.Details, nil +} + +func (b *buildServiceImpl) GetStatuses(buildIds []string) ([]view.PublishStatusResponse, error) { + ents, err := b.buildRepository.GetBuilds(buildIds) + if err != nil { + return nil, err + } + var result []view.PublishStatusResponse + for _, ent := range ents { + result = append(result, view.PublishStatusResponse{ + PublishId: ent.BuildId, + Status: ent.Status, + Message: ent.Details, + }) + } + return result, nil +} + +func (b *buildServiceImpl) UpdateBuildStatus(buildId string, status view.BuildStatusEnum, details string) error { + err := b.buildRepository.UpdateBuildStatus(buildId, status, details) + if err != nil { + return err + } + + return nil +} + +func (b *buildServiceImpl) GetFreeBuild(builderId string) ([]byte, error) { + config, src, err := b.buildProcessor.GetFreeBuild(builderId) + if err != nil { + return nil, err + } + if config == nil && src == nil { + return nil, nil + } + result := bytes.Buffer{} + zw := zip.NewWriter(&result) + if src != nil { + srcZipReader, err := zip.NewReader(bytes.NewReader(src), int64(len(src))) + if err != nil { + return nil, err + } + for _, srcFile := range srcZipReader.File { + srcFileReader, err := srcFile.Open() + if err != nil { + return nil, err + } + header, err := zip.FileInfoHeader(srcFile.FileInfo()) + if err != nil { + return nil, err + } + header.Name = "sources/" + srcFile.Name + newHeader, err := zw.CreateHeader(header) + if err != nil { + return nil, err + } + _, err = io.Copy(newHeader, srcFileReader) + if err != nil { + return nil, err + } + srcFileReader.Close() + } + } + fw, err := zw.Create("config.json") + if err != nil { + return nil, err + } + configBytes, err := json.Marshal(config) + if err != nil { + return nil, err + } + _, err = fw.Write(configBytes) + if err != nil { + return nil, err + } + zw.Close() + return result.Bytes(), nil +} + +func (b *buildServiceImpl) GetBuildViewByChangelogSearchQuery(searchRequest view.ChangelogBuildSearchRequest) (*view.BuildView, error) { + searchQuery := entity.ChangelogBuildSearchQueryEntity{ + PackageId: searchRequest.PackageId, + Version: searchRequest.Version, + PreviousVersionPackageId: searchRequest.PreviousVersionPackageId, + PreviousVersion: searchRequest.PreviousVersion, + BuildType: searchRequest.BuildType, + ComparisonRevision: searchRequest.ComparisonRevision, + ComparisonPrevRevision: searchRequest.ComparisonPrevRevision, + } + + buildEnt, err := b.buildRepository.GetBuildByChangelogSearchQuery(searchQuery) + if err != nil { + return nil, err + } + if buildEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.BuildNotFoundByQuery, + Message: exception.BuildNotFoundByQueryMsg, + Params: map[string]interface{}{"query": searchRequest}, + } + } + result := entity.MakeBuildView(buildEnt) + return result, nil +} + +func (b *buildServiceImpl) GetBuildViewByDocumentGroupSearchQuery(searchRequest view.DocumentGroupBuildSearchRequest) (*view.BuildView, error) { + searchQuery := entity.DocumentGroupBuildSearchQueryEntity{ + PackageId: searchRequest.PackageId, + Version: searchRequest.Version, + BuildType: searchRequest.BuildType, + Format: searchRequest.Format, + ApiType: searchRequest.ApiType, + GroupName: searchRequest.GroupName, + } + + buildEnt, err := b.buildRepository.GetBuildByDocumentGroupSearchQuery(searchQuery) + if err != nil { + return nil, err + } + if buildEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.BuildNotFoundByQuery, + Message: exception.BuildNotFoundByQueryMsg, + Params: map[string]interface{}{"query": searchRequest}, + } + } + result := entity.MakeBuildView(buildEnt) + return result, nil +} + +func (b *buildServiceImpl) ValidateBuildOwnership(buildId string, builderId string) error { + buildEnt, err := b.buildRepository.GetBuild(buildId) + if err != nil { + return err + } + if buildEnt == nil { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.BuildNotFoundById, + Message: exception.BuildNotFoundByIdMsg, + Params: map[string]interface{}{"id": buildId}, + } + } + + if buildEnt.BuilderId != builderId { + return &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.BuildNotOwned, + Message: exception.BuildNotOwnedMsg, + Params: map[string]interface{}{"buildId": buildId}, + } + } + return nil +} + +func (b *buildServiceImpl) AwaitBuildCompletion(buildId string) error { + start := time.Now() + for { + build, err := b.buildRepository.GetBuild(buildId) + if err != nil { + return fmt.Errorf("failed to get build status: %v", err.Error()) + } + if build.Status == string(view.StatusError) { + return fmt.Errorf("build failed with error: %v", build.Details) + } + if build.Status == string(view.StatusComplete) { + return nil + } + if time.Since(start) > time.Minute*10 { + return fmt.Errorf("deadline exceeded") + } + time.Sleep(time.Second * 5) + } +} diff --git a/qubership-apihub-service/service/BusinessMetricService.go b/qubership-apihub-service/service/BusinessMetricService.go new file mode 100644 index 0000000..6b71468 --- /dev/null +++ b/qubership-apihub-service/service/BusinessMetricService.go @@ -0,0 +1,48 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type BusinessMetricService interface { + GetBusinessMetrics(parentPackageId string, hierarchyLevel int) ([]view.BusinessMetric, error) +} + +func NewBusinessMetricService(businessMetricRepo repository.BusinessMetricRepository) BusinessMetricService { + + return businessMetricServiceImpl{ + businessMetricRepo: businessMetricRepo, + } +} + +type businessMetricServiceImpl struct { + businessMetricRepo repository.BusinessMetricRepository +} + +func (b businessMetricServiceImpl) GetBusinessMetrics(parentPackageId string, hierarchyLevel int) ([]view.BusinessMetric, error) { + businessMetricEnts, err := b.businessMetricRepo.GetBusinessMetrics(parentPackageId, hierarchyLevel) + if err != nil { + return nil, err + } + businessMetrics := make([]view.BusinessMetric, 0) + for _, businessMetric := range businessMetricEnts { + businessMetrics = append(businessMetrics, entity.MakeBusinessMetricView(businessMetric)) + } + return businessMetrics, nil +} diff --git a/qubership-apihub-service/service/CleanupService.go b/qubership-apihub-service/service/CleanupService.go new file mode 100644 index 0000000..f1fd8fb --- /dev/null +++ b/qubership-apihub-service/service/CleanupService.go @@ -0,0 +1,133 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/db" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" +) + +type CleanupService interface { + ClearTestData(testId string) error +} + +func NewCleanupService(cp db.ConnectionProvider) CleanupService { + return &cleanupServiceImpl{cp: cp} +} + +type cleanupServiceImpl struct { + cp db.ConnectionProvider +} + +func (c cleanupServiceImpl) ClearTestData(testId string) error { + idFilter := "QS%-" + utils.LikeEscaped(testId) + "%" + //clear tables: project, branch_draft_content, branch_draft_references, favorites, apihub_api_keys + _, err := c.cp.GetConnection().Model(&entity.ProjectIntEntity{}). + Where("id like ?", idFilter). + ForceDelete() + if err != nil { + return err + } + //clear tables: package_group + _, err = c.cp.GetConnection().Model(&entity.PackageEntity{}). + Where("id like ?", idFilter). + ForceDelete() + if err != nil { + return err + } + //clear tables: published_version, published_version_references, published_version_revision_content, published_sources + _, err = c.cp.GetConnection().Model(&entity.PublishedVersionEntity{}). + Where("package_id like ?", idFilter). + ForceDelete() + if err != nil { + return err + } + //clear table: published_sources + _, err = c.cp.GetConnection().Model(&entity.PublishedSrcEntity{}). + Where("package_id like ?", idFilter). + ForceDelete() + if err != nil { + return err + } + //clear table: published_sources_archives + _, err = c.cp.GetConnection().Exec(`delete from published_sources_archives where checksum not in (select distinct archive_checksum from published_sources)`) + if err != nil { + return err + } + //clear table published_data + _, err = c.cp.GetConnection().Model(&entity.PublishedContentDataEntity{}). + Where("package_id like ?", idFilter). + ForceDelete() + if err != nil { + return err + } + //clear table shared_url_info + _, err = c.cp.GetConnection().Model(&entity.SharedUrlInfoEntity{}). + Where("package_id like ?", idFilter). + ForceDelete() + if err != nil { + return err + } + //clear table package_member_role + _, err = c.cp.GetConnection().Model(&entity.PackageMemberRoleEntity{}). + Where("user_id ilike ?", "%"+utils.LikeEscaped(testId)+"%"). + ForceDelete() + if err != nil { + return err + } + //clear table user_data + _, err = c.cp.GetConnection().Model(&entity.UserEntity{}). + Where("user_id ilike ?", "%"+utils.LikeEscaped(testId)+"%"). + ForceDelete() + if err != nil { + return err + } + //clear open_count tables + _, err = c.cp.GetConnection().Exec(`delete from published_version_open_count where package_id ilike ?`, idFilter) + if err != nil { + return err + } + _, err = c.cp.GetConnection().Exec(`delete from published_document_open_count where package_id ilike ?`, idFilter) + if err != nil { + return err + } + _, err = c.cp.GetConnection().Exec(`delete from operation_open_count where package_id ilike ?`, idFilter) + if err != nil { + return err + } + //clear table version_comparison + _, err = c.cp.GetConnection().Model(&entity.VersionComparisonEntity{}). + Where("(package_id like ? or previous_package_id like ?)", idFilter, idFilter). + ForceDelete() + if err != nil { + return err + } + //clear table operation_comparison + _, err = c.cp.GetConnection().Model(&entity.OperationComparisonEntity{}). + Where("(package_id like ? or previous_package_id like ?)", idFilter, idFilter). + ForceDelete() + if err != nil { + return err + } + //clear table apihub_api_keys + _, err = c.cp.GetConnection().Model(&entity.ApihubApiKeyEntity{}). + Where("package_id like ?", idFilter). + ForceDelete() + if err != nil { + return err + } + return nil +} diff --git a/qubership-apihub-service/service/CommitService.go b/qubership-apihub-service/service/CommitService.go new file mode 100644 index 0000000..6bc607a --- /dev/null +++ b/qubership-apihub-service/service/CommitService.go @@ -0,0 +1,405 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + goctx "context" + "fmt" + "net/http" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/websocket" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/client" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type CommitService interface { + CommitBranchDraftChanges(ctx context.SecurityContext, projectId string, branchName string, newBranchName string, comment string, createMergeRequest bool) error +} + +const ( + actionAdd int = 1 << 0 + actionDelete int = 1 << 1 + actionModify int = 1 << 2 + movedFrom int = 1 << 3 + movedTo int = 1 << 4 + existsInGit int = 1 << 5 + + gitActionCreate string = "git_create" + gitActionUpdate string = "git_update" + gitActionDelete string = "git_delete" + gitActionNone string = "git_no_action" + gitActionUnsupported string = "git_unsupported_lifecycle" +) + +func NewCommitService(draftRepository repository.DraftRepository, + contentService DraftContentService, + branchService BranchService, + projectService ProjectService, + gitClientProvider GitClientProvider, + websocketService WsBranchService, + wsFileEditService WsFileEditService, + branchEditorsService BranchEditorsService) CommitService { + return &commitServiceImpl{ + draftRepository: draftRepository, + contentService: contentService, + branchService: branchService, + projectService: projectService, + gitClientProvider: gitClientProvider, + wsBranchService: websocketService, + wsFileEditService: wsFileEditService, + branchEditorsService: branchEditorsService, + } +} + +type commitServiceImpl struct { + draftRepository repository.DraftRepository + contentService DraftContentService + branchService BranchService + projectService ProjectService + gitClientProvider GitClientProvider + wsBranchService WsBranchService + wsFileEditService WsFileEditService + branchEditorsService BranchEditorsService +} + +func (c *commitServiceImpl) CommitBranchDraftChanges(ctx context.SecurityContext, projectId string, branchName string, newBranchName string, comment string, createMergeRequest bool) error { + comment = "[APIHUB] " + comment + branchForCommit := branchName + if newBranchName != "" { + newBranchExists, _, err := c.branchService.BranchExists(context.CreateContextWithSecurity(goctx.Background(), ctx), projectId, newBranchName) + if err != nil { + return err + } + if newBranchExists { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BranchAlreadyExists, + Message: exception.BranchAlreadyExistsMsg, + Params: map[string]interface{}{"branch": newBranchName, "projectId": projectId}, + } + } + branchForCommit = newBranchName + } + + err := c.CommitToSpecificBranch(ctx, projectId, branchName, branchForCommit, comment) + if err != nil { + return err + } + var mrLink *string + defer utils.SafeAsync(func() { + c.wsBranchService.NotifyProjectBranchUsers(projectId, branchName, + websocket.BranchSavedPatch{ + Type: websocket.BranchSavedType, + UserId: ctx.GetUserId(), + Comment: comment, + Branch: newBranchName, + MergeRequestURL: mrLink, + }) + }) + if branchForCommit != branchName && createMergeRequest { + mergeRequestTitle := fmt.Sprintf("[APIHUB] Resolve commit conflict via %v", branchForCommit) + + goCtx := context.CreateContextWithSecurity(goctx.Background(), ctx) // TODO: should be context from the request + goCtx = context.CreateContextWithStacktrace(goCtx, fmt.Sprintf("CommitBranchDraftChanges(%s,%s,%s,%s,%t)", projectId, branchName, newBranchName, comment, createMergeRequest)) + + mergeRequestUrl, err := c.branchService.CreateMergeRequest(goCtx, projectId, branchForCommit, branchName, mergeRequestTitle, comment) + if err != nil { + return err + } + mrLink = &mergeRequestUrl + } + + return nil +} + +func (c *commitServiceImpl) CommitToSpecificBranch(ctx context.SecurityContext, projectId string, branchName string, branchForCommit, comment string) error { + goCtx := context.CreateContextWithSecurity(goctx.Background(), ctx) // TODO: should be context from the request + goCtx = context.CreateContextWithStacktrace(goCtx, fmt.Sprintf("CommitToSpecificBranch(%s,%s,%s,%s)", projectId, branchName, branchForCommit, comment)) + + branchExists, canPush, err := c.branchService.BranchExists(goCtx, projectId, branchName) + if err != nil { + return err + } + if !branchExists { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.BranchNotFound, + Message: exception.BranchNotFoundMsg, + Params: map[string]interface{}{"branch": branchName, "projectId": projectId}, + } + } + if !canPush { + return &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientRightsToCommit, + Message: exception.InsufficientRightsToCommitMsg, + Params: map[string]interface{}{"branch": branchName}, + } + } + project, err := c.projectService.GetProject(ctx, projectId) + if err != nil { + return err + } + gitClient, err := c.gitClientProvider.GetUserClient(project.Integration.Type, ctx.GetUserId()) + if err != nil { + return fmt.Errorf("failed to get git client: %v", err) + } + + builder := client.NewActionBuilder() + + draftExists, err := c.branchService.DraftExists(projectId, branchName) + if err != nil { + return err + } + if !draftExists { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BranchDraftNotFound, + Message: exception.BranchDraftNotFoundMsg, + Params: map[string]interface{}{"projectId": projectId, "branch": branchName}, + } + } + branchDetails, err := c.branchService.GetBranchDetailsFromDraft(goCtx, projectId, branchName, false) + if err != nil { + return err + } + //remove files and refs with "status" = "excluded" from branch config + branchDetails = removeExcluded(branchDetails) + + finalFileStates, err := getFilesLifecycle(branchDetails.Files, gitClient, project.Integration.RepositoryId, branchName) + if err != nil { + return err + } + + for id, lifecycle := range finalFileStates { + switch findFinalAction(lifecycle) { + case gitActionUpdate: + err = c.wsFileEditService.HandleCommitAction(projectId, branchName, id) + if err != nil { + return err + } + cwd, err := c.contentService.GetContentFromDraftOrGit(ctx, projectId, branchName, id) + if err != nil { + return err + } + builder = builder.Update(id, cwd.Data) + case gitActionCreate: + err = c.wsFileEditService.HandleCommitAction(projectId, branchName, id) + if err != nil { + return err + } + cwd, err := c.contentService.GetContentFromDraftOrGit(ctx, projectId, branchName, id) + if err != nil { + return err + } + builder = builder.Create(id, cwd.Data) + case gitActionDelete: + //err = c.wsFileEditService.HandleCommitAction(ctx, projectId, branchName, id) + //if err != nil { + // return err + //} + builder = builder.Delete(id, []byte{}) + case gitActionUnsupported: + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.UnsupportedActionWithFile, + Message: exception.UnsupportedActionWithFileMsg, + Params: map[string]interface{}{"code": lifecycle, "fileId": id}, + } + } + } + + apiHubConfigPath := getApihubConfigFileId(projectId) + + configExists, err := gitClient.FileExists(goCtx, project.Integration.RepositoryId, branchName, apiHubConfigPath) + if err != nil { + return err + } + + //remove files with "status" = "deleted" from branch config + branchDetails = removeDeletedFiles(branchDetails) + draftJson, err := getApihubConfigRaw(view.TransformBranchToGitView(*branchDetails)) + if err != nil { + return err + } + if !configExists { + builder = builder.Create(apiHubConfigPath, draftJson) + } else { + builder = builder.Update(apiHubConfigPath, draftJson) + } + + err = gitClient.CommitChanges(goCtx, project.Integration.RepositoryId, branchName, branchForCommit, comment, builder.Build()) + if err != nil { + return err + } + err = c.branchService.ResetBranchDraft(goCtx, projectId, branchName, false) + if err != nil { + return err + } + + return nil +} + +func removeExcluded(branch *view.Branch) *view.Branch { + resFiles := make([]view.Content, 0) + resRefs := make([]view.Ref, 0) + + for _, f := range branch.Files { + if f.Status != view.StatusExcluded { + resFiles = append(resFiles, f) + } + } + + for _, r := range branch.Refs { + if r.Status != view.StatusDeleted { + resRefs = append(resRefs, r) + } + } + branch.Files = resFiles + branch.Refs = resRefs + return branch +} + +func removeDeletedFiles(branch *view.Branch) *view.Branch { + resFiles := make([]view.Content, 0) + + for _, f := range branch.Files { + if f.Status != view.StatusDeleted { + resFiles = append(resFiles, f) + } + } + branch.Files = resFiles + return branch +} + +func getFilesLifecycle(files []view.Content, gitClient client.GitClient, repId string, branchName string) (map[string]int, error) { + fileHistory := map[string]int{} + for _, file := range files { + if file.Status == view.StatusUnmodified || file.IsFolder { + continue + } + initValues(fileHistory, file.FileId) + + // TODO: should be context from the request + goCtx := context.CreateContextWithStacktrace(goctx.Background(), fmt.Sprintf("getFilesLifecycle(%s,%s)", repId, branchName)) + + gitFileExists, err := gitClient.FileExists(goCtx, repId, branchName, file.FileId) + if err != nil { + return nil, err + } + + if gitFileExists { + fileHistory[file.FileId] = fileHistory[file.FileId] | existsInGit + } + + switch file.Status { + case view.StatusAdded: + { + fileHistory[file.FileId] = fileHistory[file.FileId] | actionAdd + } + case view.StatusDeleted: + { + if file.LastStatus == "" { + continue + } + + fileHistory[file.FileId] = fileHistory[file.FileId] | actionDelete + + gitFileExists, err = gitClient.FileExists(goCtx, repId, branchName, file.FileId) + if err != nil { + return nil, err + } + if gitFileExists { + fileHistory[file.FileId] = fileHistory[file.FileId] | existsInGit + } + } + case view.StatusModified, view.StatusMoved: + { + + if file.MovedFrom != "" { + initValues(fileHistory, file.MovedFrom) + fileHistory[file.FileId] = fileHistory[file.FileId] | movedTo + fileHistory[file.MovedFrom] = fileHistory[file.MovedFrom] | movedFrom + + gitFileExists, err = gitClient.FileExists(goCtx, repId, branchName, file.MovedFrom) + if err != nil { + return nil, err + } + if gitFileExists { + fileHistory[file.MovedFrom] = fileHistory[file.MovedFrom] | existsInGit + } + } else { + fileHistory[file.FileId] = fileHistory[file.FileId] | actionModify + } + } + } + } + return fileHistory, nil +} + +func initValues(mp map[string]int, key string) { + _, exists := mp[key] + if !exists { + mp[key] = 0 + } +} + +func findFinalAction(actions int) string { + switch actions { + case actionAdd | movedFrom | existsInGit: + return gitActionUpdate + case actionAdd | movedFrom: + return gitActionCreate + case actionAdd | existsInGit: + return gitActionUpdate + case actionAdd: + return gitActionCreate + case actionModify | movedFrom | existsInGit: + return gitActionUpdate + case actionModify | movedFrom: + return gitActionCreate + case actionModify | existsInGit: + return gitActionUpdate + case actionModify: + return gitActionCreate + case movedFrom | movedTo | existsInGit: + return gitActionUpdate + case movedFrom | movedTo: + return gitActionCreate + case movedFrom | existsInGit: + return gitActionDelete + case movedFrom: + return gitActionNone + case movedTo | existsInGit: + return gitActionUpdate + case movedTo: + return gitActionCreate + case actionDelete | existsInGit: + return gitActionDelete + case actionDelete: + return gitActionNone + case existsInGit: + return gitActionNone + case 0: + return gitActionNone + default: + return gitActionUnsupported + } +} diff --git a/qubership-apihub-service/service/ComparisonService.go b/qubership-apihub-service/service/ComparisonService.go new file mode 100644 index 0000000..2373761 --- /dev/null +++ b/qubership-apihub-service/service/ComparisonService.go @@ -0,0 +1,220 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "net/http" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type ComparisonService interface { + ValidComparisonResultExists(packageId string, version string, previousVersionPackageId string, previousVersion string) (bool, error) + GetComparisonResult(packageId string, version string, previousVersionPackageId string, previousVersion string) (*view.VersionComparisonSummary, error) +} + +func NewComparisonService(publishedRepo repository.PublishedRepository, operationRepo repository.OperationRepository, packageVersionEnrichmentService PackageVersionEnrichmentService) ComparisonService { + return &comparisonServiceImpl{ + publishedRepo: publishedRepo, + operationRepo: operationRepo, + packageVersionEnrichmentService: packageVersionEnrichmentService, + } +} + +type comparisonServiceImpl struct { + publishedRepo repository.PublishedRepository + operationRepo repository.OperationRepository + packageVersionEnrichmentService PackageVersionEnrichmentService +} + +func (c comparisonServiceImpl) GetComparisonResult(packageId string, version string, previousVersionPackageId string, previousVersion string) (*view.VersionComparisonSummary, error) { + packageEnt, err := c.publishedRepo.GetPackage(packageId) + if err != nil { + return nil, err + } + if packageEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"packageId": packageId}, + } + } + versionEnt, err := c.publishedRepo.GetVersion(packageId, version) + if err != nil { + return nil, err + } + if versionEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": version, "packageId": packageId}, + } + } + if previousVersion == "" || previousVersionPackageId == "" { + if versionEnt.PreviousVersion == "" { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.NoPreviousVersion, + Message: exception.NoPreviousVersionMsg, + Params: map[string]interface{}{"version": version}, + } + } + previousVersion = versionEnt.PreviousVersion + if versionEnt.PreviousVersionPackageId != "" { + previousVersionPackageId = versionEnt.PreviousVersionPackageId + } else { + previousVersionPackageId = packageId + } + } + previousVersionEnt, err := c.publishedRepo.GetVersion(previousVersionPackageId, previousVersion) + if err != nil { + return nil, err + } + if previousVersionEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": previousVersion, "packageId": previousVersionPackageId}, + } + } + comparisonId := view.MakeVersionComparisonId( + versionEnt.PackageId, versionEnt.Version, versionEnt.Revision, + previousVersionEnt.PackageId, previousVersionEnt.Version, previousVersionEnt.Revision, + ) + comparisonEnt, err := c.publishedRepo.GetVersionComparison(comparisonId) + if err != nil { + return nil, err + } + if comparisonEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ComparisonNotFound, + Message: exception.ComparisonNotFoundMsg, + Params: map[string]interface{}{ + "comparisonId": comparisonId, + "packageId": versionEnt.PackageId, + "version": versionEnt.Version, + "revision": versionEnt.Revision, + "previousPackageId": previousVersionEnt.PackageId, + "previousVersion": previousVersionEnt.Version, + "previousRevision": previousVersionEnt.Revision, + }, + } + } + result := new(view.VersionComparisonSummary) + + if packageEnt.Kind == entity.KIND_PACKAGE { + result.NoContent = comparisonEnt.NoContent + result.OperationTypes = &comparisonEnt.OperationTypes + } + if packageEnt.Kind == entity.KIND_DASHBOARD { + refsComparisonEnts, err := c.publishedRepo.GetVersionRefsComparisons(comparisonId) + if err != nil { + return nil, err + } + refComparisons := make([]view.RefComparison, 0) + packageVersions := make(map[string][]string, 0) + for _, refEnt := range refsComparisonEnts { + refView := entity.MakeRefComparisonView(refEnt) + if refView.PackageRef != "" { + packageVersions[refEnt.PackageId] = append(packageVersions[refEnt.PackageId], view.MakeVersionRefKey(refEnt.Version, refEnt.Revision)) + } + if refView.PreviousPackageRef != "" { + packageVersions[refEnt.PreviousPackageId] = append(packageVersions[refEnt.PreviousPackageId], view.MakeVersionRefKey(refEnt.PreviousVersion, refEnt.PreviousRevision)) + } + refComparisons = append(refComparisons, *refView) + } + packagesRefs, err := c.packageVersionEnrichmentService.GetPackageVersionRefsMap(packageVersions) + if err != nil { + return nil, err + } + result.Refs = &refComparisons + result.Packages = &packagesRefs + } + + return result, nil +} + +func (c comparisonServiceImpl) ValidComparisonResultExists(packageId string, version string, previousVersionPackageId string, previousVersion string) (bool, error) { + versionEnt, err := c.publishedRepo.GetVersion(packageId, version) + if err != nil { + return false, err + } + if versionEnt == nil { + return false, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": version, "packageId": packageId}, + } + } + if previousVersion == "" || previousVersionPackageId == "" { + if versionEnt.PreviousVersion == "" { + return false, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.NoPreviousVersion, + Message: exception.NoPreviousVersionMsg, + Params: map[string]interface{}{"version": version}, + } + } + previousVersion = versionEnt.PreviousVersion + if versionEnt.PreviousVersionPackageId != "" { + previousVersionPackageId = versionEnt.PreviousVersionPackageId + } else { + previousVersionPackageId = packageId + } + } + previousVersionEnt, err := c.publishedRepo.GetVersion(previousVersionPackageId, previousVersion) + if err != nil { + return false, err + } + if previousVersionEnt == nil { + return false, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": previousVersion, "packageId": previousVersionPackageId}, + } + } + comparisonId := view.MakeVersionComparisonId( + versionEnt.PackageId, versionEnt.Version, versionEnt.Revision, + previousVersionEnt.PackageId, previousVersionEnt.Version, previousVersionEnt.Revision, + ) + comparisonEnt, err := c.publishedRepo.GetVersionComparison(comparisonId) + if err != nil { + return false, err + } + if comparisonEnt == nil || comparisonEnt.NoContent { + return false, nil + } + if len(comparisonEnt.Refs) != 0 { + comparisonRefs, err := c.publishedRepo.GetVersionRefsComparisons(comparisonId) + if err != nil { + return false, err + } + for _, comparison := range comparisonRefs { + if comparison.NoContent { + return false, nil + } + } + } + return true, nil +} diff --git a/qubership-apihub-service/service/ContentUtils.go b/qubership-apihub-service/service/ContentUtils.go new file mode 100644 index 0000000..ff58bc0 --- /dev/null +++ b/qubership-apihub-service/service/ContentUtils.go @@ -0,0 +1,417 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "bufio" + "bytes" + goctx "context" + "encoding/json" + "fmt" + "net/http" + "path/filepath" + "regexp" + "strings" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/client" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +func getApihubConfigFileId(projectId string) string { + return ApiHubBaseConfigPath + projectId + ".json" +} + +func getApihubVersionPublishFileId(projectId string) string { + return ApiHubBaseConfigPath + projectId + "_version_publish.json" +} + +func getApihubConfigRaw(configView *view.BranchGitConfigView) ([]byte, error) { + return json.MarshalIndent(configView, "", " ") +} + +func getContentDataFromGit(ctx goctx.Context, client client.GitClient, projectGitId string, ref string, fileId string) (*view.ContentData, error) { + // TODO: should be context from the request + goCtx := context.CreateContextWithStacktrace(ctx, fmt.Sprintf("getContentDataFromGit(%s,%s,%s)", projectGitId, ref, fileId)) + + data, responseType, blobId, err := client.GetFileContent(goCtx, projectGitId, ref, fileId) + if err != nil { + return nil, err + } + + if data == nil && responseType == "" { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.FileByRefNotFound, + Message: exception.FileByRefNotFoundMsg, + Params: map[string]interface{}{ + "fileId": fileId, + "ref": ref, + "projectGitId": projectGitId}, + } + } + dataType := getMediaType(data) + return &view.ContentData{FileId: fileId, Data: data, DataType: dataType, BlobId: blobId}, nil +} + +func getMediaType(data []byte) string { + return http.DetectContentType(data) +} + +func validateFileInfo(fileId string, filePath string, fileName string) error { + if strings.Contains(fileId, "//") { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectFilePath, + Message: exception.IncorrectFilePathMsg, + Params: map[string]interface{}{"path": fileId}, + } + } + if strings.Contains(fileName, "/") { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectFileName, + Message: exception.IncorrectFileNameMsg, + Params: map[string]interface{}{"name": fileName}, + } + } + if strings.Contains(filePath, "//") { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectFilePath, + Message: exception.IncorrectFilePathMsg, + Params: map[string]interface{}{"path": filePath}, + } + } + + return nil +} + +func generateFileId(filePath string, fileName string) string { + filePath = utils.NormalizeFilePath(filePath) + fileId := utils.ConcatToFileId(filePath, fileName) + + return fileId +} + +func checkAvailability(fileId string, fileIds map[string]bool, folders map[string]bool) error { + if fileIds[fileId] { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.FileIdIsTaken, + Message: exception.FileIdIsTakenMsg, + Params: map[string]interface{}{"fileId": fileId}, + } + } + + path, _ := utils.SplitFileId(fileId) + //check if we have a folder which is a file in fileId + for folder := range folders { + if !strings.HasPrefix(folder, "/") { + folder = folder + "/" + } + if strings.HasPrefix(folder, fileId+"/") { + //directory of a new file for error message + dir := path + if dir == "" { + dir = "Root directory" + } + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.NameAlreadyTaken, + Message: exception.NameAlreadyTakenMsg, + Params: map[string]interface{}{"name": fileId, "directory": dir}, + } + } + } + //check if we have a file which is a folder in fileId + for file := range fileIds { + if strings.HasPrefix(fileId, file+"/") { + //directory of an existing file for error message + dir, _ := utils.SplitFileId(file) + if dir == "" { + dir = "Root directory" + } + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.NameAlreadyTaken, + Message: exception.NameAlreadyTakenMsg, + Params: map[string]interface{}{"name": file, "directory": dir}, + } + } + } + + return nil +} + +func getContentType(filePath string, data *[]byte) view.ShortcutType { + contentType, _ := GetContentInfo(filePath, data) + return contentType +} + +func GetContentInfo(filePath string, data *[]byte) (view.ShortcutType, string) { + switch strings.ToUpper(filepath.Ext(filepath.Base(filePath))) { + case ".JSON": + return getJsonContentInfo(data) + case ".YAML", ".YML": + return getYamlContentType(data) + case ".MD", ".MARKDOWN": + return view.MD, "" + case ".GRAPHQL", ".GQL": + return view.GraphQLSchema, "" + default: + return view.Unknown, "" + } +} + +type jsonMap map[string]interface{} + +func (j jsonMap) contains(key string) bool { + if _, ok := j[key]; ok { + return true + } + return false +} + +func (j jsonMap) getString(key string) string { + if val, ok := j[key]; ok { + return fmt.Sprint(val) + } + return "" +} + +func (j jsonMap) getObject(key string) jsonMap { + if obj, isObj := j[key].(map[string]interface{}); isObj { + return obj + } + return jsonMap{} +} + +func (j jsonMap) getValueAsString(key string) string { + if _, isObj := j[key].(map[string]interface{}); isObj { + return "" + } + if _, isArr := j[key].([]interface{}); isArr { + return "" + } + if val, ok := j[key]; ok { + return fmt.Sprint(val) + } + + return "" +} + +var openapi20JsonRegexp = regexp.MustCompile(`2.*`) +var openapi30JsonRegexp = regexp.MustCompile(`3.0.*`) +var openapi31JsonRegexp = regexp.MustCompile(`3.1.*`) + +func getJsonContentInfo(data *[]byte) (view.ShortcutType, string) { + var contentJson jsonMap + json.Unmarshal(*data, &contentJson) + + contentType := view.Unknown + contentTitle := "" + if contentJson.contains("graphapi") { + return view.GraphAPI, "" + } + if contentJson.getObject("data").contains("__schema") { + return view.Introspection, "" + } + hasInfo := contentJson.contains("info") + openapiValue := contentJson.getValueAsString("openapi") + swaggerValue := contentJson.getValueAsString("swagger") + if (openapiValue != "" || swaggerValue != "") && hasInfo && contentJson.contains("paths") { + if openapi30JsonRegexp.MatchString(openapiValue) { + contentType = view.OpenAPI30 + } + if contentType == view.Unknown && openapi31JsonRegexp.MatchString(openapiValue) { + contentType = view.OpenAPI31 + } + if contentType == view.Unknown && openapi20JsonRegexp.MatchString(swaggerValue) { + contentType = view.OpenAPI20 + } + } else if contentJson.contains("asyncapi") && hasInfo { + contentType = view.AsyncAPI + } else if schemaType := contentJson.getString("type"); schemaType != "" { + //goland:noinspection ALL + correctType, _ := regexp.MatchString("(string|number|object|array|boolean|null){1}", schemaType) + if correctType { + contentType = view.JsonSchema + } + } + + if contentType != "" && hasInfo { + infoJson := contentJson.getObject("info") + contentTitle = infoJson.getValueAsString("title") + } + return contentType, contentTitle +} + +//goland:noinspection RegExpDuplicateCharacterInClass +var openapi30YamlRegexp = regexp.MustCompile(`^['|"]?openapi['|"]?\s*:\s*['|"]?3.0(.\d)*['|"]?.*`) +var openapi31YamlRegexp = regexp.MustCompile(`^['|"]?openapi['|"]?\s*:\s*['|"]?3.1(.\d)*['|"]?.*`) +var openapi2YamlRegexp = regexp.MustCompile(`^['|"]?swagger['|"]?\s*:\s*['|"]?2(.\d)*['|"]?.*`) +var asyncapi2YamlRegexp = regexp.MustCompile(`^['|"]?asyncapi['|"]?\s*:\s*['|"]?2(.\d)*['|"]?.*`) +var infoYamlRegexp = regexp.MustCompile(`^['|"]?info['|"]?\s*:.*`) +var pathsYamlRegexp = regexp.MustCompile(`^['|"]?paths['|"]?\s*:.*`) +var jsonSchemaYamlRegexp = regexp.MustCompile(`^['|"]?type['|"]?\s*:\s*['|"]?(string|number|object|array|boolean|null){1}['|"]?.*`) +var yamlTitleRegexp = regexp.MustCompile(`^[\s]{1,2}['|"]?title['|"]?\s*:\s*['|"]?(.+?)['|"]?$`) + +func getYamlContentType(data *[]byte) (view.ShortcutType, string) { + var isOpenapi, + hasOpenapi30Tag, + hasOpenapi31Tag, + hasOpenapi2Tag, + hasAsyncapi2Tag, + hasInfoTag, + hasPathsTag, + isJsonSchema bool + reader := bytes.NewReader(*data) + scanner := bufio.NewScanner(reader) + contentType := view.Unknown + contentTitle := "" + + for scanner.Scan() { + text := scanner.Text() + if text == "" { + continue + } + if !strings.HasPrefix(text, " ") { + hasInfoTag = hasInfoTag || infoYamlRegexp.MatchString(text) + hasPathsTag = hasPathsTag || pathsYamlRegexp.MatchString(text) + isJsonSchema = isJsonSchema || jsonSchemaYamlRegexp.MatchString(text) + + // try to find content Type + if contentType == view.Unknown { + if !isOpenapi && !hasAsyncapi2Tag { + hasOpenapi30Tag = openapi30YamlRegexp.MatchString(text) + hasOpenapi31Tag = openapi31YamlRegexp.MatchString(text) + hasOpenapi2Tag = openapi2YamlRegexp.MatchString(text) + isOpenapi = hasOpenapi30Tag || hasOpenapi31Tag || hasOpenapi2Tag + + hasAsyncapi2Tag = asyncapi2YamlRegexp.MatchString(text) + } + if isOpenapi && hasInfoTag && hasPathsTag { + if hasOpenapi2Tag { + contentType = view.OpenAPI20 + } + if hasOpenapi30Tag { + contentType = view.OpenAPI30 + } + if hasOpenapi31Tag { + contentType = view.OpenAPI31 + } + } + if hasAsyncapi2Tag && hasInfoTag { + contentType = view.AsyncAPI + } + } + } + //try to find content Title + if hasInfoTag && contentTitle == "" { + parts := yamlTitleRegexp.FindStringSubmatch(text) + for _, title := range parts { + contentTitle = title + } + } + } + if isJsonSchema { + contentType = view.JsonSchema + } + + return contentType, contentTitle +} + +func equalStringSets(first []string, second []string) bool { + if len(first) != len(second) { + return false + } + exists := make(map[string]bool) + for _, value := range first { + exists[value] = true + } + for _, value := range second { + if !exists[value] { + return false + } + } + return true +} + +func convertEol(data []byte) []byte { + convertedData := string(data) + convertedData = strings.Replace(convertedData, "\r\n", "\n", -1) + return []byte(convertedData) +} + +func getOpenapiJsonIdentifiers(data []byte) []string { + identifiers := make([]string, 0) + var document jsonMap + err := json.Unmarshal(data, &document) + if err != nil { + return identifiers + } + pathsObject := document.getObject("paths") + for path := range pathsObject { + identifiers = append(identifiers, getOpenapiEndpointOperations(normalizeEndpointPath(path), pathsObject.getObject(path))...) + } + return identifiers +} + +var openapiOperationsRegexp = regexp.MustCompile(`(get|put|post|delete|options|head|patch|trace){1}`) + +func getOpenapiEndpointOperations(path string, operationsObj jsonMap) []string { + operations := make([]string, 0) + for operation := range operationsObj { + lOperation := strings.ToLower(operation) + if openapiOperationsRegexp.MatchString(lOperation) { + operations = append(operations, lOperation+" "+path) + } + } + return operations +} + +// replaces any {variable} with {*} +func normalizeEndpointPath(path string) string { + if strings.IndexByte(path, '{') < 0 { + return path + } + var result strings.Builder + var isVariable bool + + result.Grow(len(path)) + + for _, char := range path { + if isVariable { + if char == '}' { + //variable end + isVariable = false + + result.WriteRune('*') + result.WriteRune('}') + } + continue + } + if char == '{' { + //variable start + isVariable = true + } + result.WriteRune(char) + } + return result.String() +} diff --git a/qubership-apihub-service/service/ContentUtils_test.go b/qubership-apihub-service/service/ContentUtils_test.go new file mode 100644 index 0000000..7eca16b --- /dev/null +++ b/qubership-apihub-service/service/ContentUtils_test.go @@ -0,0 +1,223 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "strings" + "testing" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/stretchr/testify/assert" +) + +func TestSplit(t *testing.T) { + var fileIds []string + fileIds = append(fileIds, "fileName.json") + fileIds = append(fileIds, "/fileName.json") + fileIds = append(fileIds, "./fileName.json") + fileIds = append(fileIds, "/./fileName.json") + fileIds = append(fileIds, "dir/fileName.json") + fileIds = append(fileIds, "dir/./fileName.json") + fileIds = append(fileIds, ".dir/./fileName.json") + fileIds = append(fileIds, "dir/../fileName.json") + fileIds = append(fileIds, "./dir/../fileName.json") + + for _, fileId := range fileIds { + path, name := utils.SplitFileId(fileId) + if path == "." { + t.Error("File Path after split can't equal to '.'") + } + if strings.HasPrefix(path, "/") { + t.Error("File Path after split can't start from '/'") + } + + if strings.Contains(name, "/") { + t.Error("File Name after split can't contain '/'") + } + + if strings.HasPrefix(path, "../") || strings.Contains(path, "/../") || strings.HasSuffix(path, "/..") { + t.Error("File Path after split can't contain '..' directories") + } + + if strings.Contains(path, "//") { + t.Error("File Path after split can't contain '//'") + } + } +} + +func TestConcat(t *testing.T) { + var fileInfo [][2]string + fileInfo = append(fileInfo, [2]string{"filePath", "fileName.json"}) + fileInfo = append(fileInfo, [2]string{"", "filePath/fileName.json"}) + + for _, fileInfo := range fileInfo { + fileId := utils.ConcatToFileId(fileInfo[0], fileInfo[1]) + if fileId != "filePath/fileName.json" { + t.Errorf("Slug after concat should contain '/' between path and name: '%s' and '%s'", fileInfo[0], fileInfo[1]) + } + } + + fileId := utils.ConcatToFileId(".", "fileName.json") + if fileId != "./fileName.json" { + t.Errorf("Slug after concat should contain equal to ./fileName.json") + } + + fileInfo = append([][2]string{}, [2]string{"./", "fileName.json"}) + fileInfo = append(fileInfo, [2]string{"/", "fileName.json"}) + + for _, fileInfo := range fileInfo { + fileId := utils.ConcatToFileId(fileInfo[0], fileInfo[1]) + if fileId != fileInfo[0]+fileInfo[1] { + t.Errorf("Slug after concat should contain '/' between path and name: '%s' and '%s'", fileInfo[0], fileInfo[1]) + } + } +} + +func TestNormalizationOfFileId(t *testing.T) { + var fileIds []string + + initFileName := "fileName.json" + initFilePath := "dir" + + fileIds = append(fileIds, initFileName) + fileIds = append(fileIds, "/"+initFileName) + fileIds = append(fileIds, "./"+initFileName) + + for _, fileId := range fileIds { + normFileId := utils.NormalizeFileId(fileId) + if normFileId != initFileName { + t.Errorf("Slug normalization works incorrect with empty Path: '%s'", fileId) + } + } + + fileIds = append([]string{}, "../"+initFileName) + fileIds = append(fileIds, "/../"+initFileName) + fileIds = append(fileIds, "./../"+initFileName) + fileIds = append(fileIds, "/../../../"+initFileName) + fileIds = append(fileIds, "/1/../2/../"+initFileName) + fileIds = append(fileIds, "./1/../2/../"+initFileName) + fileIds = append(fileIds, "../1/../2/../"+initFileName) + + for _, fileId := range fileIds { + normFileId := utils.NormalizeFileId(fileId) + if normFileId != initFileName { + t.Errorf("Slug normalization works incorrect with '..' directories: '%s'", fileId) + } + } + + fileIds = append([]string{}, "../1/../"+initFilePath+"/"+initFileName) + + for _, fileId := range fileIds { + normFileId := utils.NormalizeFileId(fileId) + if normFileId != initFilePath+"/"+initFileName { + t.Errorf("Slug normalization works incorrect with '..' directories: '%s'", fileId) + } + } + + fileIds = append([]string{}, initFilePath+"/fileName.json") + fileIds = append(fileIds, "./././"+initFilePath+"/fileName.json") + fileIds = append(fileIds, "/"+initFilePath+"/./././fileName.json") + + for _, fileId := range fileIds { + normFileId := utils.NormalizeFileId(fileId) + if normFileId != initFilePath+"/"+initFileName { + t.Errorf("Slug normalization works incorrect with Path prefix: '%s'", fileId) + } + } +} +func TestNormalizationOfFilePath(t *testing.T) { + var paths []string + + initFilePath := "dir" + + paths = append(paths, "") + paths = append(paths, "/") + paths = append(paths, "./") + + for _, path := range paths { + normFileId := utils.NormalizeFilePath(path) + if normFileId != "" { + t.Errorf("File path normalization works incorrect with empty Path: '%s'", path) + } + } + + paths = append([]string{}, "../") + paths = append(paths, "/../") + paths = append(paths, "./../") + paths = append(paths, "/../../../") + paths = append(paths, "/1/../2/../") + paths = append(paths, "./1/../2/../") + paths = append(paths, "../1/../2/../") + + for _, path := range paths { + normFileId := utils.NormalizeFilePath(path) + if normFileId != "" { + t.Errorf("File path normalization works incorrect with '..' directories: '%s'", path) + } + } + + paths = append([]string{}, "/"+initFilePath+"/") + paths = append([]string{}, "./"+initFilePath+"/") + paths = append([]string{}, ""+initFilePath+"/") + + for _, path := range paths { + normFileId := utils.NormalizeFilePath(path) + if normFileId != initFilePath { + t.Errorf("File path normalization works incorrect with '/' suffix: '%s'", path) + } + } + + paths = append([]string{}, "../1/../"+initFilePath) + + for _, path := range paths { + normFileId := utils.NormalizeFilePath(path) + if normFileId != initFilePath { + t.Errorf("File path normalization works incorrect with '..' directories: '%s'", path) + } + } +} + +func TestCheckAvailability(t *testing.T) { + folders := make(map[string]bool) + folders["2021.4/worklog/"] = true + folders["2021.4/gsmtmf/"] = true + folders["2020.4/acmbi/"] = true + folders["2020.4/cmp/tmf621/"] = true + folders["apihub-config/"] = true + folders["newfolder/"] = true + folders["other/"] = true + folders["/"] = true + + files := make(map[string]bool) + files["2021.4/worklog/worklog.md"] = true + files["2021.4/gsmtmf/gsmtmf.md"] = true + files["2020.4/acmbi/acmbi.md"] = true + files["2020.4/cmp/tmf621/tmf.md"] = true + files["apihub-config/config.md"] = true + files["newfolder/new.md"] = true + files["other/other.md"] = true + files["README.md"] = true + + assert.Error(t, checkAvailability("README.md", files, folders)) + assert.Error(t, checkAvailability("README.md/qwerty.md", files, folders)) + assert.Error(t, checkAvailability("other/other.md/qwerty.md", files, folders)) //gitlab allows this but it deletes 'other.md' file + assert.Error(t, checkAvailability("2021.4", files, folders)) + + assert.NoError(t, checkAvailability("2021.4/qwerty.md", files, folders)) + assert.NoError(t, checkAvailability("2021.4/worklog/qwerty.md", files, folders)) + assert.NoError(t, checkAvailability("2021.5", files, folders)) + assert.NoError(t, checkAvailability("readme.md", files, folders)) + assert.NoError(t, checkAvailability("readme.md/qwerty.md", files, folders)) //gitlab allows this +} diff --git a/qubership-apihub-service/service/DraftContentService.go b/qubership-apihub-service/service/DraftContentService.go new file mode 100644 index 0000000..da79463 --- /dev/null +++ b/qubership-apihub-service/service/DraftContentService.go @@ -0,0 +1,1392 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "archive/zip" + "bytes" + goctx "context" + "crypto/tls" + "fmt" + "io/ioutil" + "mime" + "net/http" + "path" + "strings" + "sync" + "time" + + log "github.com/sirupsen/logrus" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/websocket" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type DraftContentService interface { + CreateDraftContentWithData(ctx context.SecurityContext, projectId string, branchName string, contents []view.Content, contentData []view.ContentData) ([]string, error) + GetContentFromDraftOrGit(ctx context.SecurityContext, projectId string, branchName string, contentId string) (*view.ContentData, error) + UpdateDraftContentData(ctx context.SecurityContext, projectId string, branchName string, contentId string, data []byte) error + ChangeFileId(ctx context.SecurityContext, projectId string, branchName string, fileId string, newFileId string) error + ExcludeFile(ctx context.SecurityContext, projectId string, branchName string, fileId string) error + DeleteFile(ctx context.SecurityContext, projectId string, branchName string, fileId string) error + AddGitFiles(ctx context.SecurityContext, projectId string, branchName string, paths []string, publish bool) ([]string, error) + AddFileFromUrl(ctx context.SecurityContext, projectId string, branchName string, url string, filePath string, publish bool) ([]string, error) + AddEmptyFile(ctx context.SecurityContext, projectId string, branchName string, name string, fileType view.ShortcutType, filePath string, publish bool) ([]string, error) + UpdateMetadata(ctx context.SecurityContext, projectId string, branchName string, path string, metaPatch view.ContentMetaPatch, bulk bool) error + ResetFile(ctx context.SecurityContext, projectId string, branchName string, fileId string) error + RestoreFile(ctx context.SecurityContext, projectId string, branchName string, fileId string) error + GetAllZippedContentFromDraftOrGit(ctx context.SecurityContext, projectId string, branchName string) ([]byte, error) +} + +func NewContentService(draftRepository repository.DraftRepository, + projectService ProjectService, + branchService BranchService, + gitClientProvider GitClientProvider, + websocketService WsBranchService, + templateService TemplateService, + systemInfoService SystemInfoService) DraftContentService { + return &draftContentServiceImpl{ + draftRepository: draftRepository, + projectService: projectService, + branchService: branchService, + gitClientProvider: gitClientProvider, + websocketService: websocketService, + templateService: templateService, + systemInfoService: systemInfoService, + } +} + +type draftContentServiceImpl struct { + draftRepository repository.DraftRepository + projectService ProjectService + branchService BranchService + gitClientProvider GitClientProvider + websocketService WsBranchService + templateService TemplateService + systemInfoService SystemInfoService +} + +func (c draftContentServiceImpl) CreateDraftContentWithData(ctx context.SecurityContext, projectId string, branchName string, contents []view.Content, contentData []view.ContentData) ([]string, error) { + goCtx := context.CreateContextWithSecurity(goctx.Background(), ctx) // TODO: should be context from the request + goCtx = context.CreateContextWithStacktrace(goCtx, fmt.Sprintf("CreateDraftContentWithData(%s,%s,%+v,...)", projectId, branchName, contents)) + + var err error + var resultFileIds []string + + fileIds := make(map[string]bool) + folders := make(map[string]bool) + + draftExists, err := c.branchService.DraftExists(projectId, branchName) + if err != nil { + return nil, err + } + if !draftExists { + err = c.branchService.CreateDraftFromGit(goCtx, projectId, branchName) + if err != nil { + return nil, err + } + } + branch, err := c.branchService.GetBranchDetailsFromDraft(goCtx, projectId, branchName, false) + if err != nil { + return nil, err + } + branch.RemoveFolders() + for _, file := range branch.Files { + fileIds[file.FileId] = true + folders[file.Path+"/"] = true + } + + var contentEnts []*entity.ContentDraftEntity + + for index, content := range contents { + if content.FileId == "" { + if validationErr := validateFileInfo("", content.Path, content.Name); validationErr != nil { + return nil, validationErr + } + content.FileId = generateFileId(content.Path, content.Name) + } else { + if validationErr := validateFileInfo(content.FileId, "", ""); validationErr != nil { + return nil, validationErr + } + content.FileId = utils.NormalizeFileId(content.FileId) + } + + err = checkAvailability(content.FileId, fileIds, folders) + if err != nil { + return nil, err + } + content.Path, content.Name = utils.SplitFileId(content.FileId) + + if content.Type == "" { + content.Type = getContentType(content.FileId, &contentData[index].Data) + } + + lastIndex := len(fileIds) + var preparedData []byte + if strings.Contains(getMediaType(contentData[index].Data), "text/plain") { + preparedData = convertEol(contentData[index].Data) + } else { + preparedData = contentData[index].Data + } + ent := entity.MakeContentEntity(&content, lastIndex, projectId, branchName, preparedData, getMediaType(contentData[index].Data), string(view.StatusAdded)) + + contentEnts = append(contentEnts, ent) + + fileIds[content.FileId] = true + resultFileIds = append(resultFileIds, content.FileId) + + } + + err = c.draftRepository.SetContents(contentEnts) + if err != nil { + return nil, err + } + defaultPublish := true + emptyStr := "" + for _, fileId := range resultFileIds { + c.websocketService.NotifyProjectBranchUsers(projectId, branchName, + websocket.BranchFilesUpdatedPatch{ + Type: websocket.BranchFilesUpdatedType, + UserId: ctx.GetUserId(), + Operation: "add", + Data: &websocket.BranchFilesUpdatedPatchData{ + FileId: fileId, + Publish: &defaultPublish, + BlobId: &emptyStr, + ChangeType: view.CTAdded, + Status: view.StatusAdded}, + }) + } + err = c.branchService.RecalculateDraftConfigChangeType(goCtx, projectId, branchName) + if err != nil { + return nil, err + } + + return resultFileIds, nil +} + +func (c draftContentServiceImpl) GetContentFromDraftOrGit(ctx context.SecurityContext, projectId string, branchName string, contentId string) (*view.ContentData, error) { + goCtx := context.CreateContextWithSecurity(goctx.Background(), ctx) // TODO: should be context from the request + goCtx = context.CreateContextWithStacktrace(goCtx, fmt.Sprintf("GetContentFromDraftOrGit(%s,%s,%s)", projectId, branchName, contentId)) + + ent, err := c.draftRepository.GetContentWithData(projectId, branchName, contentId) + if err != nil { + return nil, err + } + if ent != nil { + if ent.BlobId == "" || ent.Data != nil { + // get data from draft + return entity.MakeContentDataView(ent), err + } + // otherwise, get data from git + } + draftExists, err := c.branchService.DraftExists(projectId, branchName) + if err != nil { + return nil, err + } + if !draftExists { + err = c.branchService.CreateDraftFromGit(goCtx, projectId, branchName) + if err != nil { + return nil, err + } + } + branch, err := c.branchService.GetBranchDetailsFromDraft(goCtx, projectId, branchName, false) + if err != nil { + return nil, err + } + branch.RemoveFolders() + var content view.Content + for _, cont := range branch.Files { + if cont.FileId == contentId { + content = cont + break + } + } + if content.FileId == "" || content.BlobId == "" { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ContentIdNotFound, + Message: exception.ContentIdNotFoundMsg, + Params: map[string]interface{}{ + "contentId": contentId, + "branch": branchName, + "projectId": projectId}, + } + } + return c.updateUnsavedContentDataFromGit(ctx, projectId, branchName, content) +} + +func (c draftContentServiceImpl) UpdateDraftContentData(ctx context.SecurityContext, projectId string, branchName string, fileId string, data []byte) error { + goCtx := context.CreateContextWithSecurity(goctx.Background(), ctx) // TODO: should be context from the request + goCtx = context.CreateContextWithStacktrace(goCtx, fmt.Sprintf("UpdateDraftContentData(%s,%s,%s,...)", projectId, branchName, fileId)) + + // TODO: temp for for frontend issue + if fileId == "undefined" { + return fmt.Errorf("incorrect content id: undefined") + } + + draftExists, err := c.branchService.DraftExists(projectId, branchName) + if err != nil { + return err + } + if !draftExists { + // sendNotification = true // branch file updated event should be sent on first draft edit(add content to draft) + err = c.branchService.CreateDraftFromGit(goCtx, projectId, branchName) + if err != nil { + return err + } + } + + file, err := c.draftRepository.GetContent(projectId, branchName, fileId) + if err != nil { + return err + } + if file == nil { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ContentIdNotFound, + Message: exception.ContentIdNotFoundMsg, + Params: map[string]interface{}{"contentId": fileId, "branch": branchName, "projectId": projectId}, + } + } + + sendPatch := false + if file.Status == string(view.StatusUnmodified) || file.Status == string(view.StatusMoved) || file.Status == string(view.StatusIncluded) { + sendPatch = true // branch file updated event should be sent on first file edit + } + + status := view.StatusModified + fileStatus := view.ParseFileStatus(file.Status) + patchData := &websocket.BranchFilesUpdatedPatchData{ChangeType: view.CTUpdated} + + switch fileStatus { + case view.StatusAdded, view.StatusModified: + { + status = fileStatus + } + case view.StatusDeleted, view.StatusExcluded: + { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.NotApplicableOperation, + Message: exception.NotApplicableOperationMsg, + Params: map[string]interface{}{"operation": "modify", "status": fileStatus}, + } + } + } + + patchData.Status = status + err = c.draftRepository.UpdateContentData(projectId, branchName, fileId, data, getMediaType(data), string(status), file.BlobId) + if err != nil { + return err + } + c.websocketService.NotifyProjectBranchUsers(projectId, branchName, + websocket.BranchFilesDataModified{ + Type: websocket.BranchFilesDataModifiedType, + UserId: ctx.GetUserId(), + FileId: file.FileId, + }) + + if sendPatch && fileStatus != status { + c.websocketService.NotifyProjectBranchUsers(projectId, branchName, + websocket.BranchFilesUpdatedPatch{ + Type: websocket.BranchFilesUpdatedType, + UserId: ctx.GetUserId(), + Operation: "patch", + FileId: file.FileId, + Data: patchData, + }) + } + return nil +} + +func (c draftContentServiceImpl) ChangeFileId(ctx context.SecurityContext, projectId string, branchName string, fileId string, newFileId string) error { + goCtx := context.CreateContextWithSecurity(goctx.Background(), ctx) // TODO: should be context from the request + goCtx = context.CreateContextWithStacktrace(goCtx, fmt.Sprintf("ChangeFileId(%s,%s,%s,%s)", projectId, branchName, fileId, newFileId)) + + if validationErr := validateFileInfo(newFileId, "", ""); validationErr != nil { + return validationErr + } + newFileId = utils.NormalizeFileId(newFileId) + + draftExists, err := c.branchService.DraftExists(projectId, branchName) + if err != nil { + return err + } + + if !draftExists { + if err = c.branchService.CreateDraftFromGit(goCtx, projectId, branchName); err != nil { + return err + } + } + + branchDetails, err := c.branchService.GetBranchDetailsFromDraft(goCtx, projectId, branchName, false) + if err != nil { + return err + } + branchDetails.RemoveFolders() + + var file *view.Content + var index int + fileIds := make(map[string]bool) + folders := make(map[string]bool) + for i, f := range branchDetails.Files { + if f.FileId == fileId { + tmp := f + file = &tmp + index = i + continue + } + + fileIds[f.FileId] = true + folders[f.Path+"/"] = true + } + if file == nil { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.FileNotFound, + Message: exception.FileNotFoundMsg, + Params: map[string]interface{}{"fileId": fileId, "branch": branchName, "projectGitId": projectId}, + } + } + err = checkAvailability(newFileId, fileIds, folders) + if err != nil { + return err + } + + patchData := &websocket.BranchFilesUpdatedPatchData{FileId: newFileId} + + status := view.StatusMoved + fileStatus := file.Status + switch fileStatus { + case view.StatusAdded: + { + status = fileStatus + file.MovedFrom = "" + } + case view.StatusModified: + { + status = fileStatus + if file.MovedFrom == "" { + file.MovedFrom = fileId + patchData.MovedFrom = &fileId + } + } + case view.StatusUnmodified, view.StatusIncluded: + { + patchData.Status = status + patchData.ChangeType = view.CTUpdated + if file.MovedFrom == "" { + file.MovedFrom = fileId + patchData.MovedFrom = &fileId + } + } + case view.StatusExcluded, view.StatusDeleted: + { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.NotApplicableOperation, + Message: exception.NotApplicableOperationMsg, + Params: map[string]interface{}{"operation": "rename/move", "status": file.Status}, + } + } + } + + fileData, err := c.GetContentFromDraftOrGit(ctx, projectId, branchName, fileId) + if err != nil { + return err + } + file.FileId = newFileId + file.Path, file.Name = utils.SplitFileId(newFileId) + file.FromFolder = false + + ent := entity.MakeContentEntity(file, index, projectId, branchName, fileData.Data, getMediaType(fileData.Data), string(status)) + err = c.draftRepository.ReplaceContent(projectId, branchName, fileId, ent) + if err != nil { + return err + } + c.websocketService.NotifyProjectBranchUsers(projectId, branchName, + websocket.BranchFilesUpdatedPatch{ + Type: websocket.BranchFilesUpdatedType, + UserId: ctx.GetUserId(), + Operation: "patch", + FileId: fileId, + Data: patchData, + }) + err = c.branchService.RecalculateDraftConfigChangeType(goCtx, projectId, branchName) + if err != nil { + return err + } + + return nil +} + +func (c draftContentServiceImpl) ExcludeFile(ctx context.SecurityContext, projectId string, branchName string, fileId string) error { + goCtx := context.CreateContextWithSecurity(goctx.Background(), ctx) // TODO: should be context from the request + goCtx = context.CreateContextWithStacktrace(goCtx, fmt.Sprintf("ExcludeFile(%s,%s,%s)", projectId, branchName, fileId)) + + draftExists, err := c.branchService.DraftExists(projectId, branchName) + if err != nil { + return err + } + + if !draftExists { + err = c.branchService.CreateDraftFromGit(goCtx, projectId, branchName) + if err != nil { + return err + } + } + fileFromDraft, err := c.draftRepository.GetContent(projectId, branchName, fileId) + if err != nil { + return err + } + if fileFromDraft == nil { + // check it if's a folder + files, err := c.draftRepository.GetContents(projectId, branchName) + if err != nil { + return err + } + var toDelete []string + for _, file := range files { + if strings.HasPrefix(file.FileId, fileId) && (file.Status != view.StatusExcluded.String()) { + toDelete = append(toDelete, file.FileId) + } + } + if len(toDelete) == 0 { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.NoContentToDelete, + Message: exception.NoContentToDeleteMsg, + Params: map[string]interface{}{"contentId": fileId, "branch": branchName, "projectId": projectId}, + } + } + for _, file := range toDelete { + err = c.ExcludeFile(ctx, projectId, branchName, file) + if err != nil { + return err + } + } + return nil + } + + fileStatus := view.ParseFileStatus(fileFromDraft.Status) + lastStatus := string(fileStatus) + patchData := &websocket.BranchFilesUpdatedPatchData{Status: view.StatusExcluded} + + switch fileStatus { + case view.StatusAdded, view.StatusModified: + { + patchData.ChangeType = view.CTUnchanged + } + case view.StatusDeleted: + { + lastStatus = fileFromDraft.LastStatus + patchData.ChangeType = view.CTUnchanged + } + case view.StatusExcluded: + { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.NotApplicableOperation, + Message: exception.NotApplicableOperationMsg, + Params: map[string]interface{}{"operation": "exclude from config", "status": fileStatus}, + } + } + } + + err = c.draftRepository.UpdateContentStatus(projectId, branchName, fileId, string(view.StatusExcluded), lastStatus) + if err != nil { + return err + } + + c.websocketService.NotifyProjectBranchUsers(projectId, branchName, + websocket.BranchFilesUpdatedPatch{ + Type: websocket.BranchFilesUpdatedType, + UserId: ctx.GetUserId(), + Operation: "patch", + FileId: fileId, + Data: patchData, + }) + err = c.branchService.RecalculateDraftConfigChangeType(goCtx, projectId, branchName) + if err != nil { + return err + } + return nil +} + +func (c draftContentServiceImpl) DeleteFile(ctx context.SecurityContext, projectId string, branchName string, fileId string) error { + goCtx := context.CreateContextWithSecurity(goctx.Background(), ctx) // TODO: should be context from the request + goCtx = context.CreateContextWithStacktrace(goCtx, fmt.Sprintf("DeleteFile(%s,%s,%s)", projectId, branchName, fileId)) + + draftExists, err := c.branchService.DraftExists(projectId, branchName) + if err != nil { + return err + } + + if !draftExists { + err = c.branchService.CreateDraftFromGit(goCtx, projectId, branchName) + if err != nil { + return err + } + } + fileFromDraft, err := c.draftRepository.GetContent(projectId, branchName, fileId) + if err != nil { + return err + } + if fileFromDraft == nil { + // check it if's a folder + files, err := c.draftRepository.GetContents(projectId, branchName) + if err != nil { + return err + } + var toDelete []string + for _, file := range files { + if strings.HasPrefix(file.FileId, fileId) && (file.Status != view.StatusDeleted.String()) { + + toDelete = append(toDelete, file.FileId) + } + } + if len(toDelete) == 0 { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.NoContentToDelete, + Message: exception.NoContentToDeleteMsg, + Params: map[string]interface{}{"contentId": fileId, "branch": branchName, "projectId": projectId}, + } + } + for _, file := range toDelete { + err = c.DeleteFile(ctx, projectId, branchName, file) + if err != nil { + return err + } + } + return nil + } + + fileStatus := view.ParseFileStatus(fileFromDraft.Status) + lastStatus := string(fileStatus) + patchData := &websocket.BranchFilesUpdatedPatchData{Status: view.StatusDeleted} + + switch fileStatus { + case view.StatusIncluded, view.StatusMoved, view.StatusUnmodified, view.StatusModified: + { + patchData.ChangeType = view.CTDeleted + } + case view.StatusExcluded: + { + lastStatus = fileFromDraft.LastStatus + patchData.ChangeType = view.CTDeleted + } + case view.StatusAdded, view.StatusDeleted: + { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.NotApplicableOperation, + Message: exception.NotApplicableOperationMsg, + Params: map[string]interface{}{"operation": "delete from git", "status": fileStatus}, + } + } + } + + err = c.draftRepository.UpdateContentStatus(projectId, branchName, fileId, string(view.StatusDeleted), lastStatus) + if err != nil { + return err + } + + c.websocketService.NotifyProjectBranchUsers(projectId, branchName, + websocket.BranchFilesUpdatedPatch{ + Type: websocket.BranchFilesUpdatedType, + UserId: ctx.GetUserId(), + Operation: "patch", + FileId: fileId, + Data: patchData, + }) + err = c.branchService.RecalculateDraftConfigChangeType(goCtx, projectId, branchName) + if err != nil { + return err + } + return nil +} + +func (c draftContentServiceImpl) AddGitFiles(ctx context.SecurityContext, projectId string, branchName string, paths []string, publish bool) ([]string, error) { + goCtx := context.CreateContextWithSecurity(goctx.Background(), ctx) // TODO: should be context from the request + goCtx = context.CreateContextWithStacktrace(goCtx, fmt.Sprintf("AddGitFiles(%s,%s,%+v,%t)", projectId, branchName, paths, publish)) + + draftExists, err := c.branchService.DraftExists(projectId, branchName) + if err != nil { + return nil, err + } + if !draftExists { + err = c.branchService.CreateDraftFromGit(goCtx, projectId, branchName) + if err != nil { + return nil, err + } + } + + project, err := c.projectService.GetProject(ctx, projectId) + if err != nil { + return nil, err + } + gitClient, err := c.gitClientProvider.GetUserClient(project.Integration.Type, ctx.GetUserId()) + if err != nil { + return nil, fmt.Errorf("failed to get git client: %v", err) + } + + newFiles := make(map[string]*view.Content, 0) + entities := make([]*entity.ContentDraftEntity, 0) + branchFiles := make(map[string]view.Content) + fileIds := make(map[string]bool) + folders := make(map[string]bool) + + branch, err := c.branchService.GetBranchDetailsFromDraft(goCtx, projectId, branchName, false) + if err != nil { + return nil, err + } + for _, file := range branch.Files { + branchFiles[file.FileId] = file + fileIds[file.FileId] = true + folders[file.Path+"/"] = true + } + lastIndex := len(branchFiles) + patches := make([]websocket.BranchFilesUpdatedPatch, 0) + + filesFromFolders := make(map[string]bool, 0) + folderPaths := make([]string, 0) + originalPaths := make(map[string]bool, 0) + for _, path := range paths { + originalPaths[path] = true + if strings.HasSuffix(path, "/") { + fileIdsFromFolder, err := gitClient.ListDirectoryFilesRecursive(goCtx, project.Integration.RepositoryId, branchName, path) + if err != nil { + return nil, err + } + for _, fileId := range fileIdsFromFolder { + if fileIds[fileId] { + continue + } + filesFromFolders[fileId] = true + paths = append(paths, fileId) + } + folderPaths = append(folderPaths, path) + } + } + for _, folderFileId := range folderPaths { + if validationErr := validateFileInfo(folderFileId, "", ""); validationErr != nil { + return nil, validationErr + } + folderFileId = utils.NormalizeFileId(folderFileId) + if fileIds[folderFileId] { + continue + } + content := view.Content{FileId: folderFileId, Publish: false, Included: true, IsFolder: true} + content.Path, content.Name = utils.SplitFileId(content.FileId) + lastIndex++ + ent := entity.MakeContentEntity(&content, lastIndex, projectId, branchName, []byte{}, "text/plain", string(view.StatusIncluded)) + entities = append(entities, ent) + } + for _, fileId := range paths { + if strings.HasSuffix(fileId, "/") { + continue + } + if validationErr := validateFileInfo(fileId, "", ""); validationErr != nil { + return nil, validationErr + } + fileId = utils.NormalizeFileId(fileId) + gitContentData, err := getContentDataFromGit(goCtx, gitClient, project.Integration.RepositoryId, branchName, fileId) + if err != nil { + return nil, err + } + if f, inBranch := branchFiles[fileId]; inBranch { + file, err := c.draftRepository.GetContent(projectId, branchName, f.FileId) + if err != nil { + return nil, err + } + if file == nil { + continue + } + fileStatus := view.ParseFileStatus(file.Status) + if fileStatus == view.StatusExcluded || fileStatus == view.StatusDeleted { + fileLastStatus := view.ParseFileStatus(file.LastStatus) + status := view.StatusUnmodified + patchData := &websocket.BranchFilesUpdatedPatchData{ChangeType: view.CTUnchanged} + if fileStatus == view.StatusDeleted || fileLastStatus == view.StatusIncluded || fileLastStatus == view.StatusAdded { + status = view.StatusIncluded + } + if filesFromFolders[file.FileId] && len(file.Labels) == 0 && !file.Publish { + file.FromFolder = true + } + patchData.Status = status + file.LastStatus = "" + if file.MovedFrom != "" { + file.MovedFrom = "" + patchData.MovedFrom = &file.MovedFrom + } + if file.BlobId != gitContentData.BlobId { + file.BlobId = gitContentData.BlobId + patchData.BlobId = &file.BlobId + } + file.Status = string(status) + file.Data = gitContentData.Data + entities = append(entities, file) + patches = append(patches, websocket.BranchFilesUpdatedPatch{ + Type: websocket.BranchFilesUpdatedType, + UserId: ctx.GetUserId(), + FileId: file.FileId, + Operation: "patch", + Data: patchData, + }) + } + continue + } + + err = checkAvailability(fileId, fileIds, folders) + if err != nil { + return nil, err + } + + content := view.Content{FileId: fileId, Publish: publish, Included: true} + // if file was imported from folder and original list doesn't contain this file + if !originalPaths[fileId] && filesFromFolders[fileId] { + _, alreadyImported := newFiles[fileId] + if alreadyImported { + continue + } + content.Publish = false + content.FromFolder = true + } + content.Path, content.Name = utils.SplitFileId(content.FileId) + content.BlobId = gitContentData.BlobId + lastIndex++ + ent := entity.MakeContentEntity(&content, lastIndex, projectId, branchName, gitContentData.Data, getMediaType(gitContentData.Data), string(view.StatusIncluded)) + entities = append(entities, ent) + newFiles[content.FileId] = &content + + } + + err = c.draftRepository.SetContents(entities) + if err != nil { + return nil, err + } + for _, patch := range patches { + c.websocketService.NotifyProjectBranchUsers(projectId, branchName, patch) + } + + newFileIds := make([]string, 0) + for fileId, file := range newFiles { + newFileIds = append(newFileIds, fileId) + c.websocketService.NotifyProjectBranchUsers(projectId, branchName, + websocket.BranchFilesUpdatedPatch{ + Type: websocket.BranchFilesUpdatedType, + UserId: ctx.GetUserId(), + Operation: "add", + Data: &websocket.BranchFilesUpdatedPatchData{ + FileId: fileId, + Publish: &file.Publish, + Status: view.StatusIncluded, + ChangeType: view.CTUnchanged, + BlobId: &file.BlobId, + }, + }) + } + err = c.branchService.RecalculateDraftConfigChangeType(goCtx, projectId, branchName) + if err != nil { + return nil, err + } + return newFileIds, nil +} + +func (c draftContentServiceImpl) AddFileFromUrl(ctx context.SecurityContext, projectId string, branchName string, fileUrl string, filePath string, publish bool) ([]string, error) { + files := make([]view.Content, 0) + filesData := make([]view.ContentData, 0) + httpTransport := http.Transport{TLSClientConfig: &tls.Config{InsecureSkipVerify: true}} + httpClient := http.Client{Transport: &httpTransport, Timeout: time.Second * 60} + resp, err := httpClient.Get(fileUrl) + if err != nil { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.UrlUnexpectedErr, + Message: exception.UrlUnexpectedErrMsg, + Debug: err.Error(), + } + } + + if resp.ContentLength > c.systemInfoService.GetPublishFileSizeLimitMB() { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.PublishFileSizeExceeded, + Message: exception.PublishFileSizeExceededMsg, + Params: map[string]interface{}{"size": c.systemInfoService.GetPublishFileSizeLimitMB()}, + } + } + + var filename string + headerValue := resp.Header.Get("Content-Disposition") + if headerValue == "" { + filename = path.Base(fileUrl) + } else { + _, params, err := mime.ParseMediaType(headerValue) + if err != nil { + return nil, err + } + filename = params["filename"] + if filename == "" { + filename = path.Base(fileUrl) + } + } + + if fileUrl != resp.Request.URL.String() || resp.StatusCode != 200 { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidUrl, + Message: exception.InvalidUrlMsg, + } + } + defer resp.Body.Close() + body, err := ioutil.ReadAll(resp.Body) + if err != nil { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.UrlUnexpectedErr, + Message: exception.UrlUnexpectedErrMsg, + Debug: err.Error(), + } + } + files = append(files, view.Content{Name: filename, Path: filePath, Publish: publish}) + filesData = append(filesData, view.ContentData{Data: body}) + return c.CreateDraftContentWithData(ctx, projectId, branchName, files, filesData) +} + +func (c draftContentServiceImpl) AddEmptyFile(ctx context.SecurityContext, projectId string, branchName string, name string, fileType view.ShortcutType, filePath string, publish bool) ([]string, error) { + files := make([]view.Content, 0) + filesData := make([]view.ContentData, 0) + data := c.templateService.GetFileTemplate(name, string(fileType)) + files = append(files, view.Content{Name: name, Path: filePath, Type: fileType, Publish: publish}) + filesData = append(filesData, view.ContentData{Data: []byte(data)}) + return c.CreateDraftContentWithData(ctx, projectId, branchName, files, filesData) +} + +func (c draftContentServiceImpl) UpdateMetadata(ctx context.SecurityContext, projectId string, branchName string, path string, metaPatch view.ContentMetaPatch, bulk bool) error { + goCtx := context.CreateContextWithSecurity(goctx.Background(), ctx) // TODO: should be context from the request + goCtx = context.CreateContextWithStacktrace(goCtx, fmt.Sprintf("UpdateMetadata(%s,%s,%s,%+v,%t)", projectId, branchName, path, metaPatch, bulk)) + + var err error + draftExists, err := c.branchService.DraftExists(projectId, branchName) + if err != nil { + return err + } + + if !draftExists { + err = c.branchService.CreateDraftFromGit(goCtx, projectId, branchName) + if err != nil { + return err + } + } + fileUpdated := make(map[string]bool, 0) + publishUpdated := make(map[string]bool, 0) + labelsUpdated := make(map[string]bool, 0) + + if bulk { + filesPublishUpdated, filesLabelsUpdated, err := c.updateFolderMetadata(ctx, projectId, branchName, path, metaPatch) + if err != nil { + return err + } + for _, fileId := range filesPublishUpdated { + fileUpdated[fileId] = true + publishUpdated[fileId] = true + } + for _, fileId := range filesLabelsUpdated { + fileUpdated[fileId] = true + labelsUpdated[fileId] = true + } + } else { + filePublishUpdated, fileLabelsUpdated, err := c.updateFileMetadata(ctx, projectId, branchName, path, metaPatch) + if err != nil { + return err + } + fileUpdated[path] = filePublishUpdated || fileLabelsUpdated + publishUpdated[path] = filePublishUpdated + labelsUpdated[path] = fileLabelsUpdated + } + + configChanged := false + for fileId, updated := range fileUpdated { + if !updated { + continue + } + configChanged = true + wsMetaUpdatePatchData := &websocket.BranchFilesUpdatedPatchData{} + if publishUpdated[fileId] { + wsMetaUpdatePatchData.Publish = metaPatch.Publish + } + if labelsUpdated[fileId] { + wsMetaUpdatePatchData.Labels = metaPatch.Labels + } + c.websocketService.NotifyProjectBranchUsers(projectId, branchName, + websocket.BranchFilesUpdatedPatch{ + Type: websocket.BranchFilesUpdatedType, + UserId: ctx.GetUserId(), + Operation: "patch", + FileId: fileId, + Data: wsMetaUpdatePatchData, + }) + } + if configChanged { + err = c.branchService.RecalculateDraftConfigChangeType(goCtx, projectId, branchName) + if err != nil { + return err + } + } + return nil +} + +func (c draftContentServiceImpl) updateFileMetadata(ctx context.SecurityContext, projectId string, branchName string, fileId string, metaPatch view.ContentMetaPatch) (bool, bool, error) { + file, err := c.draftRepository.GetContent(projectId, branchName, fileId) + if err != nil { + return false, false, err + } + if file == nil { + return false, false, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ContentIdNotFound, + Message: exception.ContentIdNotFoundMsg, + Params: map[string]interface{}{"contentId": fileId, "branch": branchName, "projectId": projectId}, + } + } + publish := metaPatch.Publish + labels := metaPatch.Labels + publishUpdated := false + labelsUpdated := false + if publish != nil { + if file.Publish != *publish { + publishUpdated = true + file.Publish = *publish + } + } + if labels != nil { + if !equalStringSets(file.Labels, *labels) { + labelsUpdated = true + file.Labels = *labels + } + } + if publishUpdated || labelsUpdated { + if file.FromFolder && + (file.Publish || len(file.Labels) > 0) { + file.FromFolder = false + } + err = c.draftRepository.UpdateContentMetadata(file) + if err != nil { + return false, false, err + } + } + return publishUpdated, labelsUpdated, nil +} + +func (c draftContentServiceImpl) updateFolderMetadata(ctx context.SecurityContext, projectId string, branchName string, path string, metaPatch view.ContentMetaPatch) ([]string, []string, error) { + files, err := c.draftRepository.GetContents(projectId, branchName) + if err != nil { + return nil, nil, err + } + publish := metaPatch.Publish + labels := metaPatch.Labels + filesToUpdate := make([]*entity.ContentDraftEntity, 0) + entitiesToUpdate := make([]*entity.ContentDraftEntity, 0) + publishUpdated := make([]string, 0) + labelsUpdated := make([]string, 0) + //if path == "/" change meta for all project files + if path == "/" { + path = "" + } + for _, file := range files { + if !file.IsFolder && strings.HasPrefix(file.Path, path) { + fileTmp := file + filesToUpdate = append(filesToUpdate, &fileTmp) + } + } + if publish != nil { + for _, file := range filesToUpdate { + if file.Publish != *metaPatch.Publish { + publishUpdated = append(publishUpdated, file.FileId) + file.Publish = *publish + entitiesToUpdate = append(entitiesToUpdate, file) + } + } + } + if labels != nil { + for _, file := range filesToUpdate { + if !equalStringSets(file.Labels, *labels) { + labelsUpdated = append(labelsUpdated, file.FileId) + file.Labels = *labels + entitiesToUpdate = append(entitiesToUpdate, file) + } + } + } + if len(entitiesToUpdate) > 0 { + for index := range entitiesToUpdate { + entity := entitiesToUpdate[index] + if entity.FromFolder && + (entity.Publish || len(entity.Labels) > 0) { + entity.FromFolder = false + } + } + err = c.draftRepository.UpdateContentsMetadata(entitiesToUpdate) + } + if err != nil { + return nil, nil, err + } + return publishUpdated, labelsUpdated, nil +} + +func (c draftContentServiceImpl) ResetFile(ctx context.SecurityContext, projectId string, branchName string, fileId string) error { + goCtx := context.CreateContextWithSecurity(goctx.Background(), ctx) // TODO: should be context from the request + goCtx = context.CreateContextWithStacktrace(goCtx, fmt.Sprintf("ResetFile(%s,%s,%s)", projectId, branchName, fileId)) + + fileFromDraft, err := c.draftRepository.GetContent(projectId, branchName, fileId) + if err != nil { + return err + } + if fileFromDraft == nil { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.DraftFileNotFound, + Message: exception.DraftFileNotFoundMsg, + Params: map[string]interface{}{"fileId": fileId, "branchName": branchName, "projectId": projectId}, + } + } + project, err := c.projectService.GetProject(ctx, projectId) + if err != nil { + return err + } + gitClient, err := c.gitClientProvider.GetUserClient(project.Integration.Type, ctx.GetUserId()) + if err != nil { + return fmt.Errorf("failed to get git client: %v", err) + } + fileStatus := view.ParseFileStatus(fileFromDraft.Status) + status := view.StatusUnmodified + if fileFromDraft.Included { + status = view.StatusIncluded + } + resetData := false + switch fileStatus { + case view.StatusAdded: + { + err = c.draftRepository.DeleteContent(projectId, branchName, fileFromDraft.FileId) + if err != nil { + return err + } + } + case view.StatusModified, view.StatusMoved: + { + if fileFromDraft.MovedFrom != "" { + oldIdTaken, err := c.draftRepository.ContentExists(projectId, branchName, fileFromDraft.MovedFrom) + if err != nil { + return err + } + if oldIdTaken { + err = c.draftRepository.DeleteContent(projectId, branchName, fileFromDraft.FileId) + if err != nil { + return err + } + } else { + branch, err := c.branchService.GetBranchDetailsFromDraft(goCtx, projectId, branchName, false) + if err != nil { + return err + } + branch.RemoveFolders() + fileIds := make(map[string]bool) + folders := make(map[string]bool) + for _, file := range branch.Files { + fileIds[file.FileId] = true + folders[file.Path+"/"] = true + } + + err = checkAvailability(fileFromDraft.MovedFrom, fileIds, folders) + if err != nil { + return err + } + + gitContentData, _, err := gitClient.GetFileContentByBlobId(goCtx, project.Integration.RepositoryId, fileFromDraft.BlobId) + if err != nil { + return err + } + newFileId := fileFromDraft.FileId + fileFromDraft.FileId = fileFromDraft.MovedFrom + fileFromDraft.MovedFrom = "" + fileFromDraft.Path, fileFromDraft.Name = utils.SplitFileId(fileFromDraft.FileId) + fileFromDraft.Data = gitContentData + fileFromDraft.Status = string(status) + fileFromDraft.ConflictedFileId = "" + err = c.draftRepository.ReplaceContent(projectId, branchName, newFileId, fileFromDraft) + if err != nil { + return err + } + } + } else { + resetData = true + } + } + case view.StatusDeleted: + { + resetData = true + if fileFromDraft.LastStatus == string(view.StatusIncluded) { + status = view.StatusIncluded + } + } + case view.StatusIncluded, view.StatusExcluded, view.StatusUnmodified: + { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.NotApplicableOperation, + Message: exception.NotApplicableOperationMsg, + Params: map[string]interface{}{"operation": "reset", "status": fileStatus}, + } + } + } + + if resetData { + gitContentData, _, err := gitClient.GetFileContentByBlobId(goCtx, project.Integration.RepositoryId, fileFromDraft.BlobId) + if err != nil { + return err + } + fileFromDraft.Data = gitContentData + fileFromDraft.MediaType = getMediaType(gitContentData) + fileFromDraft.Status = string(status) + fileFromDraft.LastStatus = "" + err = c.draftRepository.UpdateContent(fileFromDraft) + if err != nil { + return err + } + } + c.websocketService.NotifyProjectBranchUsers(projectId, branchName, + websocket.BranchFilesResetPatch{ + Type: websocket.BranchFilesResetType, + UserId: ctx.GetUserId(), + FileId: fileId, + }) + branch, err := c.branchService.GetBranchDetailsEP(goCtx, projectId, branchName, true) + if err != nil { + c.websocketService.DisconnectClients(projectId, branchName) + return err + } + branch.RemoveFolders() + c.websocketService.NotifyProjectBranchUsers(projectId, branchName, + websocket.BranchConfigSnapshot{ + Type: websocket.BranchConfigSnapshotType, + Data: branch, + }) + err = c.branchService.RecalculateDraftConfigChangeType(goCtx, projectId, branchName) + if err != nil { + return err + } + return nil +} + +func (c draftContentServiceImpl) RestoreFile(ctx context.SecurityContext, projectId string, branchName string, fileId string) error { + goCtx := context.CreateContextWithSecurity(goctx.Background(), ctx) // TODO: should be context from the request + goCtx = context.CreateContextWithStacktrace(goCtx, fmt.Sprintf("RestoreFile(%s,%s,%s)", projectId, branchName, fileId)) + + fileFromDraft, err := c.draftRepository.GetContent(projectId, branchName, fileId) + if err != nil { + return err + } + if fileFromDraft == nil { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.DraftFileNotFound, + Message: exception.DraftFileNotFoundMsg, + Params: map[string]interface{}{"fileId": fileId, "$branchName": branchName, "$projectId": projectId}, + } + } + fileStatus := view.ParseFileStatus(fileFromDraft.Status) + lastStatus := view.ParseFileStatus(fileFromDraft.LastStatus) + patchData := &websocket.BranchFilesUpdatedPatchData{Status: lastStatus} + + switch fileStatus { + case view.StatusDeleted, view.StatusExcluded: + { + err := c.draftRepository.UpdateContentStatus(projectId, branchName, fileId, fileFromDraft.LastStatus, "") + if err != nil { + return err + } + } + default: + { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.NotApplicableOperation, + Message: exception.NotApplicableOperationMsg, + Params: map[string]interface{}{"operation": "restore", "status": fileStatus}, + } + } + } + switch patchData.Status { + case view.StatusAdded: + { + patchData.ChangeType = view.CTAdded + } + case view.StatusModified: + { + patchData.ChangeType = view.CTUpdated + } + default: + { + patchData.ChangeType = view.CTUnchanged + } + } + + c.websocketService.NotifyProjectBranchUsers(projectId, branchName, + websocket.BranchFilesUpdatedPatch{ + Type: websocket.BranchFilesUpdatedType, + UserId: ctx.GetUserId(), + Operation: "patch", + FileId: fileId, + Data: patchData, + }) + err = c.branchService.RecalculateDraftConfigChangeType(goCtx, projectId, branchName) + if err != nil { + return err + } + return nil +} + +func (c draftContentServiceImpl) GetAllZippedContentFromDraftOrGit(ctx context.SecurityContext, projectId string, branchName string) ([]byte, error) { + goCtx := context.CreateContextWithSecurity(goctx.Background(), ctx) // TODO: should be context from the request + goCtx = context.CreateContextWithStacktrace(goCtx, fmt.Sprintf("GetAllZippedContentFromDraftOrGit(%s,%s)", projectId, branchName)) + + //this call will always create draft if it doesn't already exist + config, err := c.branchService.GetBranchDetailsEP(goCtx, projectId, branchName, true) + if err != nil { + return nil, err + } + + zipBuf := bytes.Buffer{} + zw := zip.NewWriter(&zipBuf) + + wg := sync.WaitGroup{} + errMap := sync.Map{} + resultFiles := make([][]byte, len(config.Files)) + + for iter, file := range config.Files { + i := iter + currentFile := file + wg.Add(1) + utils.SafeAsync(func() { + defer wg.Done() + + var contentEnt *entity.ContentDraftEntity + + contentEnt, err = c.draftRepository.GetContentWithData(projectId, branchName, currentFile.FileId) + if err != nil { + errMap.Store(currentFile.FileId, err) + return + } + + if contentEnt != nil && contentEnt.Data != nil { + resultFiles[i] = contentEnt.Data + } else if contentEnt != nil { + contentData, err := c.updateUnsavedContentDataFromGit(ctx, projectId, branchName, *entity.MakeContentView(contentEnt)) + if err != nil { + errMap.Store(currentFile.FileId, err) + return + } + resultFiles[i] = contentData.Data + } else if currentFile.IsFolder { + resultFiles[i] = nil + } else { + //this should not be possible + errMap.Store(currentFile.FileId, "file not found in draft; contentEnt==nil") + return + } + }) + } + + wg.Wait() + + var errStr string + errMap.Range(func(key, value interface{}) bool { + errStr += fmt.Sprintf("file: %v, err: %v. ", key, value) + return true + }) + if errStr != "" { + log.Warnf("Got errors during GetAllZippedContentFromDraftOrGit: %s", errStr) // TODO: or should be err returned? + } + + for i, file := range config.Files { + mdFw, err := zw.Create(file.FileId) + if err != nil { + return nil, err + } + if len(resultFiles[i]) == 0 { + continue + } + _, err = mdFw.Write(resultFiles[i]) + if err != nil { + return nil, err + } + } + + err = zw.Close() + if err != nil { + return nil, err + } + + return zipBuf.Bytes(), nil +} + +// this method should only be used for old draft files that did not store git content on draft creation +// todo delete this method when all drafts are updated? +func (d draftContentServiceImpl) updateUnsavedContentDataFromGit(ctx context.SecurityContext, projectId string, branchName string, content view.Content) (*view.ContentData, error) { + if content.BlobId == GitBlobIdForEmptyFile { + return &view.ContentData{ + FileId: content.FileId, + Data: []byte{}, + DataType: "text/plain", + BlobId: content.BlobId, + }, nil + } + goCtx := context.CreateContextWithSecurity(goctx.Background(), ctx) // TODO: should be context from the request + goCtx = context.CreateContextWithStacktrace(goCtx, fmt.Sprintf("updateUnsavedContentDataFromGit(%s,%s,%+v)", projectId, branchName, content)) + + project, err := d.projectService.GetProject(ctx, projectId) + if err != nil { + return nil, err + } + + gitClient, err := d.gitClientProvider.GetUserClient(project.Integration.Type, ctx.GetUserId()) + if err != nil { + return nil, fmt.Errorf("failed to get git client: %v", err) + } + var contentData *view.ContentData + //in this case we have commitId in blobId field and no data for file + if content.MovedFrom != "" { + contentData, err = getContentDataFromGit(goCtx, gitClient, project.Integration.RepositoryId, content.BlobId, content.MovedFrom) + } else { + contentData, err = getContentDataFromGit(goCtx, gitClient, project.Integration.RepositoryId, content.BlobId, content.FileId) + } + if err != nil { + return nil, err + } + //update content data with new blobId replacing commitId value in this field + err = d.draftRepository.UpdateContentData(projectId, branchName, content.FileId, contentData.Data, getMediaType(contentData.Data), string(content.Status), contentData.BlobId) + if err != nil { + return nil, err + } + return contentData, nil +} diff --git a/qubership-apihub-service/service/DraftRefService.go b/qubership-apihub-service/service/DraftRefService.go new file mode 100644 index 0000000..a492b4f --- /dev/null +++ b/qubership-apihub-service/service/DraftRefService.go @@ -0,0 +1,315 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + goctx "context" + "fmt" + "net/http" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/websocket" +) + +type DraftRefService interface { + UpdateRefs(ctx context.SecurityContext, projectId string, branchName string, refPatch view.RefPatch) error +} + +func NewRefService(draftRepository repository.DraftRepository, + projectService ProjectService, + branchService BranchService, + publishedRepo repository.PublishedRepository, + websocketService WsBranchService) DraftRefService { + return &draftRefServiceImpl{ + draftRepository: draftRepository, + projectService: projectService, + branchService: branchService, + publishedRepo: publishedRepo, + websocketService: websocketService, + } +} + +type draftRefServiceImpl struct { + draftRepository repository.DraftRepository + projectService ProjectService + branchService BranchService + publishedRepo repository.PublishedRepository + websocketService WsBranchService +} + +func (d draftRefServiceImpl) UpdateRefs(ctx context.SecurityContext, projectId string, branchName string, refPatch view.RefPatch) error { + goCtx := context.CreateContextWithSecurity(goctx.Background(), ctx) // TODO: should be context from the request + goCtx = context.CreateContextWithStacktrace(goCtx, fmt.Sprintf("UpdateRefs(%s,%s)", projectId, branchName)) + + draftExists, err := d.branchService.DraftExists(projectId, branchName) + if err != nil { + return err + } + if !draftExists { + err = d.branchService.CreateDraftFromGit(goCtx, projectId, branchName) + if err != nil { + return err + } + } + switch refPatch.Status { + case view.StatusModified: + { + err = d.replaceRef(ctx, projectId, branchName, refPatch) + } + case view.StatusDeleted: + { + err = d.removeRef(ctx, projectId, branchName, refPatch) + } + case view.StatusAdded: + { + err = d.addRef(ctx, projectId, branchName, refPatch) + } + default: + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.UnsupportedStatus, + Message: exception.UnsupportedStatusMsg, + Params: map[string]interface{}{"status": refPatch.Status}, + } + } + if err != nil { + return err + } + err = d.branchService.RecalculateDraftConfigChangeType(goCtx, projectId, branchName) + if err != nil { + return err + } + return nil +} + +func (d draftRefServiceImpl) replaceRef(ctx context.SecurityContext, projectId string, branchName string, refPatch view.RefPatch) error { + ref, err := d.draftRepository.GetRef(projectId, branchName, refPatch.RefId, refPatch.Version) + if err != nil { + return err + } + if ref == nil { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.RefNotFound, + Message: exception.RefNotFoundMsg, + Params: map[string]interface{}{"ref": refPatch.RefId, "projectId": projectId, "version": refPatch.Version, "branch": branchName}, + } + } + if refPatch.Data.RefId == "" { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.EmptyParameter, + Message: exception.EmptyParameterMsg, + Params: map[string]interface{}{"param": "data.refId"}, + } + } + if refPatch.Data.Version == "" { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.EmptyParameter, + Message: exception.EmptyParameterMsg, + Params: map[string]interface{}{"param": "data.version"}, + } + } + newRef, err := d.draftRepository.GetRef(projectId, branchName, refPatch.Data.RefId, refPatch.Data.Version) + if err != nil { + return err + } + if newRef != nil { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.RefAlreadyExists, + Message: exception.RefAlreadyExistsMsg, + Params: map[string]interface{}{"ref": refPatch.Data.RefId, "projectId": projectId, "version": refPatch.Data.Version, "branch": branchName}, + } + } + packageEnt, err := d.publishedRepo.GetPackage(refPatch.Data.RefId) + if err != nil { + return err + } + if packageEnt == nil { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ReferencedPackageNotFound, + Message: exception.ReferencedPackageNotFoundMsg, + Params: map[string]interface{}{"package": refPatch.Data.RefId}, + } + } + version, err := d.publishedRepo.GetVersion(refPatch.Data.RefId, refPatch.Data.Version) + if err != nil { + return err + } + if version == nil { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ReferencedPackageVersionNotFound, + Message: exception.ReferencedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"package": refPatch.Data.RefId, "version": refPatch.Data.Version}, + } + } + newRefView := view.Ref{ + RefPackageId: refPatch.Data.RefId, + RefPackageVersion: refPatch.Data.Version, + RefPackageName: packageEnt.Name, + VersionStatus: version.Status, + Kind: packageEnt.Kind, + } + wsPatchData := &websocket.BranchRefsUpdatedPatchData{} + status := string(view.StatusModified) + if ref.Status == string(view.StatusDeleted) || ref.Status == string(view.StatusAdded) { + status = ref.Status + } + if ref.RefPackageId != refPatch.Data.RefId { + wsPatchData.RefId = refPatch.Data.RefId + } + if ref.RefVersion != refPatch.Data.Version { + wsPatchData.Version = refPatch.Data.Version + } + if ref.Status != status { + wsPatchData.Status = view.ParseFileStatus(status) + } + newRef = entity.MakeRefEntity(&newRefView, projectId, branchName, status) + err = d.draftRepository.ReplaceRef(projectId, branchName, refPatch.RefId, refPatch.Version, newRef) + if err != nil { + return err + } + d.websocketService.NotifyProjectBranchUsers(projectId, branchName, + websocket.BranchRefsUpdatedPatch{ + Type: websocket.BranchRefsUpdatedType, + UserId: ctx.GetUserId(), + Operation: "patch", + RefId: ref.RefPackageId, + Version: ref.RefVersion, + Data: wsPatchData, + }) + return nil +} + +func (d draftRefServiceImpl) removeRef(ctx context.SecurityContext, projectId string, branchName string, refPatch view.RefPatch) error { + ref, err := d.draftRepository.GetRef(projectId, branchName, refPatch.RefId, refPatch.Version) + if err != nil { + return err + } + if ref == nil { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.RefNotFound, + Message: exception.RefNotFoundMsg, + Params: map[string]interface{}{"ref": refPatch.RefId, "projectId": projectId, "version": refPatch.Version, "branch": branchName}, + } + } + if ref.Status == string(view.StatusAdded) { + err = d.draftRepository.DeleteRef(projectId, branchName, ref.RefPackageId, ref.RefVersion) + if err != nil { + return err + } + d.websocketService.NotifyProjectBranchUsers(projectId, branchName, + websocket.BranchRefsUpdatedPatch{ + Type: websocket.BranchRefsUpdatedType, + UserId: ctx.GetUserId(), + Operation: "remove", + RefId: refPatch.RefId, + Version: refPatch.Version, + }) + return nil + } + if ref.Status == string(view.StatusDeleted) { + return nil + } + ref.Status = string(view.StatusDeleted) + err = d.draftRepository.UpdateRef(ref) + if err != nil { + return err + } + d.websocketService.NotifyProjectBranchUsers(projectId, branchName, + websocket.BranchRefsUpdatedPatch{ + Type: websocket.BranchRefsUpdatedType, + UserId: ctx.GetUserId(), + Operation: "patch", + RefId: refPatch.RefId, + Version: refPatch.Version, + Data: &websocket.BranchRefsUpdatedPatchData{Status: view.StatusDeleted}, + }) + return nil +} + +func (d draftRefServiceImpl) addRef(ctx context.SecurityContext, projectId string, branchName string, refPatch view.RefPatch) error { + ref, err := d.draftRepository.GetRef(projectId, branchName, refPatch.RefId, refPatch.Version) + if err != nil { + return err + } + if ref != nil { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.RefAlreadyExists, + Message: exception.RefAlreadyExistsMsg, + Params: map[string]interface{}{"ref": refPatch.RefId, "projectId": projectId, "version": refPatch.Version, "branch": branchName}, + } + } + packageEnt, err := d.publishedRepo.GetPackage(refPatch.RefId) + if err != nil { + return err + } + if packageEnt == nil { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ReferencedPackageNotFound, + Message: exception.ReferencedPackageNotFoundMsg, + Params: map[string]interface{}{"package": refPatch.RefId}, + } + } + version, err := d.publishedRepo.GetVersion(refPatch.RefId, refPatch.Version) + if err != nil { + return err + } + if version == nil { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ReferencedPackageVersionNotFound, + Message: exception.ReferencedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"package": refPatch.RefId, "version": refPatch.Version}, + } + } + newRefView := view.Ref{ + RefPackageId: refPatch.RefId, + RefPackageVersion: refPatch.Version, + RefPackageName: packageEnt.Name, + VersionStatus: version.Status, + Kind: packageEnt.Kind, + } + newRef := entity.MakeRefEntity(&newRefView, projectId, branchName, string(view.StatusAdded)) + err = d.draftRepository.CreateRef(newRef) + if err != nil { + return err + } + d.websocketService.NotifyProjectBranchUsers(projectId, branchName, + websocket.BranchRefsUpdatedPatch{ + Type: websocket.BranchRefsUpdatedType, + UserId: ctx.GetUserId(), + Operation: "add", + Data: &websocket.BranchRefsUpdatedPatchData{ + RefId: newRefView.RefPackageId, + Version: newRefView.RefPackageVersion, + Name: newRefView.RefPackageName, + VersionStatus: newRefView.VersionStatus, + Status: view.StatusAdded, + }, + }) + return nil +} diff --git a/qubership-apihub-service/service/ExcelService.go b/qubership-apihub-service/service/ExcelService.go new file mode 100644 index 0000000..2ac0caa --- /dev/null +++ b/qubership-apihub-service/service/ExcelService.go @@ -0,0 +1,1706 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "fmt" + "strconv" + "strings" + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + log "github.com/sirupsen/logrus" + "github.com/xuri/excelize/v2" +) + +const ExcelTemplatePath = "static/templates/resources/ExcelExportTemplate.xlsx" + +type ExcelService interface { + ExportDeprecatedOperations(packageId, version, apiType string, req view.ExportOperationRequestView) (*excelize.File, string, error) + ExportApiChanges(packageId, version, apiType string, severities []string, req view.ExportApiChangesRequestView) (*excelize.File, string, error) + ExportOperations(packageId, version, apiType string, req view.ExportOperationRequestView) (*excelize.File, string, error) + ExportBusinessMetrics(businessMetrics []view.BusinessMetric) (*excelize.File, string, error) +} + +func NewExcelService(publishedRepo repository.PublishedRepository, versionService VersionService, operationService OperationService, packageService PackageService) ExcelService { + return &excelServiceImpl{publishedRepo: publishedRepo, versionService: versionService, operationService: operationService, packageService: packageService} +} + +type excelServiceImpl struct { + publishedRepo repository.PublishedRepository + versionService VersionService + operationService OperationService + packageService PackageService +} + +func (e excelServiceImpl) ExportApiChanges(packageId, version, apiType string, severities []string, req view.ExportApiChangesRequestView) (*excelize.File, string, error) { + versionChangesSearchReq := view.VersionChangesReq{ + PreviousVersion: req.PreviousVersion, + PreviousVersionPackageId: req.PreviousVersionPackageId, + ApiKind: req.ApiKind, + EmptyTag: req.EmptyTag, + RefPackageId: req.RefPackageId, + Tags: req.Tags, + TextFilter: req.TextFilter, + Group: req.Group, + EmptyGroup: req.EmptyGroup, + ApiAudience: req.ApiAudience, + } + changelog, err := e.versionService.GetVersionChanges(packageId, version, apiType, severities, versionChangesSearchReq) + if err != nil { + return nil, "", err + } + if changelog == nil || len(changelog.Operations) == 0 { + return nil, "", nil + } + versionName, err := e.getVersionNameForAttachmentName(packageId, version) + if err != nil { + return nil, "", err + } + versionStatus, err := e.versionService.GetVersionStatus(packageId, version) + if err != nil { + return nil, "", err + } + packageName, err := e.packageService.GetPackageName(packageId) + if err != nil { + return nil, "", err + } + file, err := buildApiChangesWorkbook(changelog, packageName, versionName, versionStatus) + return file, versionName, err +} + +type OperationsReport struct { + workbook *excelize.File + firstSheetIndex int + startColumn string + endColumn string + columnDefaultWidth float64 +} + +func (e excelServiceImpl) ExportOperations(packageId, version, apiType string, req view.ExportOperationRequestView) (*excelize.File, string, error) { + restOperationListReq := view.OperationListReq{ + Kind: req.Kind, + EmptyTag: req.EmptyTag, + Tag: req.Tag, + TextFilter: req.TextFilter, + ApiType: apiType, + RefPackageId: req.RefPackageId, + Group: req.Group, + EmptyGroup: req.EmptyGroup, + ApiAudience: req.ApiAudience, + } + operations, err := e.operationService.GetOperations(packageId, version, false, restOperationListReq) + if err != nil { + return nil, "", err + } + if operations == nil || len(operations.Operations) == 0 { + return nil, "", nil + } + versionName, err := e.getVersionNameForAttachmentName(packageId, version) + if err != nil { + return nil, "", err + } + versionStatus, err := e.versionService.GetVersionStatus(packageId, version) + if err != nil { + return nil, "", err + } + packageName, err := e.packageService.GetPackageName(packageId) + if err != nil { + return nil, "", err + } + file, err := buildOperationsWorkbook(operations, packageName, versionName, versionStatus) + return file, versionName, err +} + +type DeprecatedOperationsReport struct { + workbook *excelize.File + firstSheetIndex int + startColumn string + endColumn string + columnDefaultWidth float64 +} + +func (e excelServiceImpl) ExportDeprecatedOperations(packageId, version, apiType string, req view.ExportOperationRequestView) (*excelize.File, string, error) { + deprecatedOperationListReq := view.DeprecatedOperationListReq{ + Kind: req.Kind, + Tags: req.Tags, + TextFilter: req.TextFilter, + ApiType: apiType, + IncludeDeprecatedItems: true, + RefPackageId: req.RefPackageId, + EmptyTag: req.EmptyTag, + EmptyGroup: req.EmptyGroup, + Group: req.Group, + ApiAudience: req.ApiAudience, + } + deprecatedOperations, err := e.operationService.GetDeprecatedOperations(packageId, version, deprecatedOperationListReq) + if err != nil { + return nil, "", err + } + if deprecatedOperations == nil || len(deprecatedOperations.Operations) == 0 { + return nil, "", nil + } + + versionName, err := e.getVersionNameForAttachmentName(packageId, version) + if err != nil { + return nil, "", err + } + + versionStatus, err := e.versionService.GetVersionStatus(packageId, version) + if err != nil { + return nil, "", err + } + packageName, err := e.packageService.GetPackageName(packageId) + if err != nil { + return nil, "", err + } + file, err := buildDeprecatedOperationsWorkbook(deprecatedOperations, packageName, versionName, versionStatus) + return file, versionName, err +} + +func buildDeprecatedOperationsWorkbook(deprecatedOperations *view.Operations, packageName, versionName, versionStatus string) (*excelize.File, error) { + var err error + deprecatedOperationsReport, err := excelize.OpenFile(ExcelTemplatePath) + defer func() { + if err := deprecatedOperationsReport.Close(); err != nil { + log.Errorf("Failed to close excel template file: %v", err.Error()) + } + }() + if err != nil { + log.Errorf("Failed to open excel template file: %v", err.Error()) + return nil, err + } + + report := DeprecatedOperationsReport{ + workbook: deprecatedOperationsReport, + startColumn: "A", + endColumn: "L", + columnDefaultWidth: 35, + } + + buildCoverPage(report.workbook, packageName, "Deprecated API Operations", versionName, versionStatus) + + evenCellStyle := getEvenCellStyle(report.workbook) + oddCellStyle := getOddCellStyle(report.workbook) + restOperations := make(map[string][]view.DeprecatedRestOperationView) + graphQLOperations := make(map[string][]view.DeprecateGraphQLOperationView) + protobufOperations := make(map[string][]view.DeprecateProtobufOperationView) + + for _, operation := range deprecatedOperations.Operations { + if restOperation, ok := operation.(view.DeprecatedRestOperationView); ok { + restOperations[restOperation.PackageRef] = append(restOperations[restOperation.PackageRef], restOperation) + continue + } + if graphQLOperation, ok := operation.(view.DeprecateGraphQLOperationView); ok { + graphQLOperations[graphQLOperation.PackageRef] = append(graphQLOperations[graphQLOperation.PackageRef], graphQLOperation) + } + if protobufOperation, ok := operation.(view.DeprecateProtobufOperationView); ok { + protobufOperations[protobufOperation.PackageRef] = append(protobufOperations[protobufOperation.PackageRef], protobufOperation) + } + } + var cellsValues map[string]interface{} + rowIndex := 2 + restSheetCreated := false + for packageRef, operationsView := range restOperations { + versionName := deprecatedOperations.Packages[packageRef].RefPackageVersion + if !deprecatedOperations.Packages[packageRef].NotLatestRevision { + versionName, err = getVersionNameFromVersionWithRevision(deprecatedOperations.Packages[packageRef].RefPackageVersion) + if err != nil { + return nil, err + } + } + for _, operationView := range operationsView { + if !restSheetCreated { + err := report.createRestSheet() + if err != nil { + return nil, err + } + restSheetCreated = true + } + for _, deprecatedItem := range operationView.DeprecatedItems { + cellsValues = make(map[string]interface{}) + cellsValues[fmt.Sprintf("A%d", rowIndex)] = deprecatedOperations.Packages[packageRef].RefPackageId + cellsValues[fmt.Sprintf("B%d", rowIndex)] = deprecatedOperations.Packages[packageRef].RefPackageName + cellsValues[fmt.Sprintf("C%d", rowIndex)] = deprecatedOperations.Packages[packageRef].ServiceName + cellsValues[fmt.Sprintf("D%d", rowIndex)] = versionName + cellsValues[fmt.Sprintf("E%d", rowIndex)] = operationView.Title + cellsValues[fmt.Sprintf("F%d", rowIndex)] = strings.ToUpper(operationView.Method) + cellsValues[fmt.Sprintf("G%d", rowIndex)] = operationView.Path + cellsValues[fmt.Sprintf("H%d", rowIndex)] = strings.Join(operationView.Tags, ",") + cellsValues[fmt.Sprintf("I%d", rowIndex)] = strings.ToUpper(operationView.ApiKind) + if len(deprecatedItem.PreviousReleaseVersions) > 0 { + cellsValues[fmt.Sprintf("J%d", rowIndex)] = deprecatedItem.PreviousReleaseVersions[0] + } + cellsValues[fmt.Sprintf("K%d", rowIndex)] = deprecatedItem.Description + if deprecatedItem.DeprecatedInfo != "" { + cellsValues[fmt.Sprintf("L%d", rowIndex)] = deprecatedItem.DeprecatedInfo + } + err := setCellsValues(report.workbook, view.RestAPISheetName, cellsValues) + if err != nil { + return nil, err + } + if rowIndex%2 == 0 { + err = report.workbook.SetCellStyle(view.RestAPISheetName, fmt.Sprintf("A%d", rowIndex), fmt.Sprintf("L%d", rowIndex), evenCellStyle) + } else { + err = report.workbook.SetCellStyle(view.RestAPISheetName, fmt.Sprintf("A%d", rowIndex), fmt.Sprintf("L%d", rowIndex), oddCellStyle) + } + if err != nil { + return nil, err + } + rowIndex += 1 + } + } + } + + rowIndex = 2 + graphQLSheetCreated := false + for packageRef, operationsView := range graphQLOperations { + versionName := deprecatedOperations.Packages[packageRef].RefPackageVersion + if !deprecatedOperations.Packages[packageRef].NotLatestRevision { + versionName, err = getVersionNameFromVersionWithRevision(deprecatedOperations.Packages[packageRef].RefPackageVersion) + if err != nil { + return nil, err + } + } + for _, operationView := range operationsView { + if !graphQLSheetCreated { + err := report.createGraphQLSheet() + if err != nil { + return nil, err + } + graphQLSheetCreated = true + } + for _, deprecatedItem := range operationView.DeprecatedItems { + cellsValues = make(map[string]interface{}) + cellsValues[fmt.Sprintf("A%d", rowIndex)] = deprecatedOperations.Packages[packageRef].RefPackageId + cellsValues[fmt.Sprintf("B%d", rowIndex)] = deprecatedOperations.Packages[packageRef].RefPackageName + cellsValues[fmt.Sprintf("C%d", rowIndex)] = deprecatedOperations.Packages[packageRef].ServiceName + cellsValues[fmt.Sprintf("D%d", rowIndex)] = versionName + cellsValues[fmt.Sprintf("E%d", rowIndex)] = operationView.Title + cellsValues[fmt.Sprintf("F%d", rowIndex)] = operationView.Type + cellsValues[fmt.Sprintf("G%d", rowIndex)] = strings.ToUpper(operationView.Method) + cellsValues[fmt.Sprintf("H%d", rowIndex)] = strings.Join(operationView.Tags, ",") + cellsValues[fmt.Sprintf("I%d", rowIndex)] = strings.ToUpper(operationView.ApiKind) + if len(deprecatedItem.PreviousReleaseVersions) > 0 { + cellsValues[fmt.Sprintf("J%d", rowIndex)] = deprecatedItem.PreviousReleaseVersions[0] + } + cellsValues[fmt.Sprintf("K%d", rowIndex)] = deprecatedItem.Description + if deprecatedItem.DeprecatedInfo != "" { + cellsValues[fmt.Sprintf("L%d", rowIndex)] = deprecatedItem.DeprecatedInfo + } + err := setCellsValues(report.workbook, view.GraphQLSheetName, cellsValues) + if err != nil { + return nil, err + } + if rowIndex%2 == 0 { + err = report.workbook.SetCellStyle(view.GraphQLSheetName, fmt.Sprintf("A%d", rowIndex), fmt.Sprintf("L%d", rowIndex), evenCellStyle) + } else { + err = report.workbook.SetCellStyle(view.GraphQLSheetName, fmt.Sprintf("A%d", rowIndex), fmt.Sprintf("L%d", rowIndex), oddCellStyle) + } + if err != nil { + return nil, err + } + rowIndex += 1 + } + } + } + + rowIndex = 2 + protobufSheetCreated := false + for packageRef, operationsView := range protobufOperations { + versionName := deprecatedOperations.Packages[packageRef].RefPackageVersion + if !deprecatedOperations.Packages[packageRef].NotLatestRevision { + versionName, err = getVersionNameFromVersionWithRevision(deprecatedOperations.Packages[packageRef].RefPackageVersion) + if err != nil { + return nil, err + } + } + for _, operationView := range operationsView { + if !protobufSheetCreated { + err := report.createProtobufSheet() + if err != nil { + return nil, err + } + protobufSheetCreated = true + } + for _, deprecatedItem := range operationView.DeprecatedItems { + cellsValues = make(map[string]interface{}) + cellsValues[fmt.Sprintf("A%d", rowIndex)] = deprecatedOperations.Packages[packageRef].RefPackageId + cellsValues[fmt.Sprintf("B%d", rowIndex)] = deprecatedOperations.Packages[packageRef].RefPackageName + cellsValues[fmt.Sprintf("C%d", rowIndex)] = deprecatedOperations.Packages[packageRef].ServiceName + cellsValues[fmt.Sprintf("D%d", rowIndex)] = versionName + cellsValues[fmt.Sprintf("E%d", rowIndex)] = operationView.Title + cellsValues[fmt.Sprintf("F%d", rowIndex)] = operationView.Type + cellsValues[fmt.Sprintf("G%d", rowIndex)] = strings.ToUpper(operationView.Method) + cellsValues[fmt.Sprintf("H%d", rowIndex)] = strings.ToUpper(operationView.ApiKind) + if len(deprecatedItem.PreviousReleaseVersions) > 0 { + cellsValues[fmt.Sprintf("I%d", rowIndex)] = deprecatedItem.PreviousReleaseVersions[0] + } + cellsValues[fmt.Sprintf("J%d", rowIndex)] = deprecatedItem.Description + if deprecatedItem.DeprecatedInfo != "" { + cellsValues[fmt.Sprintf("K%d", rowIndex)] = deprecatedItem.DeprecatedInfo + } + err := setCellsValues(report.workbook, view.ProtobufSheetName, cellsValues) + if err != nil { + return nil, err + } + if rowIndex%2 == 0 { + err = report.workbook.SetCellStyle(view.ProtobufSheetName, fmt.Sprintf("A%d", rowIndex), fmt.Sprintf("K%d", rowIndex), evenCellStyle) + } else { + err = report.workbook.SetCellStyle(view.ProtobufSheetName, fmt.Sprintf("A%d", rowIndex), fmt.Sprintf("K%d", rowIndex), oddCellStyle) + } + if err != nil { + return nil, err + } + rowIndex += 1 + } + } + } + err = report.setupSettings() + if err != nil { + return nil, err + } + return report.workbook, nil +} +func buildOperationsWorkbook(operations *view.Operations, packageName, versionName, versionStatus string) (*excelize.File, error) { + var err error + + apiChangesReport, err := excelize.OpenFile(ExcelTemplatePath) + defer func() { + if err := apiChangesReport.Close(); err != nil { + log.Errorf("Failed to close excel template file: %v", err.Error()) + } + }() + if err != nil { + log.Errorf("Failed to open excel template file: %v", err.Error()) + return nil, err + } + report := OperationsReport{ + workbook: apiChangesReport, + startColumn: "A", + endColumn: "J", + columnDefaultWidth: 35, + } + + buildCoverPage(report.workbook, packageName, "API Operations", versionName, versionStatus) + + evenCellStyle := getEvenCellStyle(report.workbook) + oddCellStyle := getOddCellStyle(report.workbook) + + restOperations := make(map[string][]view.RestOperationView) + graphQLOperations := make(map[string][]view.GraphQLOperationView) + protobufOperations := make(map[string][]view.ProtobufOperationView) + + for _, operation := range operations.Operations { + if restOperation, ok := operation.(view.RestOperationView); ok { + restOperations[restOperation.PackageRef] = append(restOperations[restOperation.PackageRef], restOperation) + continue + } + if graphQLOperation, ok := operation.(view.GraphQLOperationView); ok { + graphQLOperations[graphQLOperation.PackageRef] = append(graphQLOperations[graphQLOperation.PackageRef], graphQLOperation) + } + if protobufOperation, ok := operation.(view.ProtobufOperationView); ok { + protobufOperations[protobufOperation.PackageRef] = append(protobufOperations[protobufOperation.PackageRef], protobufOperation) + } + } + var cellsValues map[string]interface{} + rowIndex := 2 + restSheetCreated := false + for packageRef, operationsView := range restOperations { + versionName := operations.Packages[packageRef].RefPackageVersion + if !operations.Packages[packageRef].NotLatestRevision { + versionName, err = getVersionNameFromVersionWithRevision(operations.Packages[packageRef].RefPackageVersion) + if err != nil { + return nil, err + } + } + for _, operationView := range operationsView { + if !restSheetCreated { + err := report.createRestSheet() + if err != nil { + return nil, err + } + restSheetCreated = true + } + cellsValues = make(map[string]interface{}) + cellsValues[fmt.Sprintf("A%d", rowIndex)] = operations.Packages[packageRef].RefPackageId + cellsValues[fmt.Sprintf("B%d", rowIndex)] = operations.Packages[packageRef].RefPackageName + cellsValues[fmt.Sprintf("C%d", rowIndex)] = operations.Packages[packageRef].ServiceName + cellsValues[fmt.Sprintf("D%d", rowIndex)] = versionName + cellsValues[fmt.Sprintf("E%d", rowIndex)] = operationView.Title + cellsValues[fmt.Sprintf("F%d", rowIndex)] = strings.ToUpper(operationView.Method) + cellsValues[fmt.Sprintf("G%d", rowIndex)] = operationView.Path + cellsValues[fmt.Sprintf("H%d", rowIndex)] = strings.Join(operationView.Tags, " ") + cellsValues[fmt.Sprintf("I%d", rowIndex)] = strings.ToUpper(operationView.ApiKind) + cellsValues[fmt.Sprintf("J%d", rowIndex)] = strings.ToLower(strconv.FormatBool(operationView.Deprecated)) + err := setCellsValues(report.workbook, view.RestAPISheetName, cellsValues) + if err != nil { + return nil, err + } + if rowIndex%2 == 0 { + err = report.workbook.SetCellStyle(view.RestAPISheetName, fmt.Sprintf("A%d", rowIndex), fmt.Sprintf("J%d", rowIndex), evenCellStyle) + } else { + err = report.workbook.SetCellStyle(view.RestAPISheetName, fmt.Sprintf("A%d", rowIndex), fmt.Sprintf("J%d", rowIndex), oddCellStyle) + } + if err != nil { + return nil, err + } + rowIndex += 1 + } + } + + rowIndex = 2 + graphQLSheetCreated := false + for packageRef, operationsView := range graphQLOperations { + versionName := operations.Packages[packageRef].RefPackageVersion + if !operations.Packages[packageRef].NotLatestRevision { + versionName, err = getVersionNameFromVersionWithRevision(operations.Packages[packageRef].RefPackageVersion) + if err != nil { + return nil, err + } + } + for _, operationView := range operationsView { + if !graphQLSheetCreated { + err := report.createGraphQLSheet() + if err != nil { + return nil, err + } + graphQLSheetCreated = true + } + cellsValues = make(map[string]interface{}) + cellsValues[fmt.Sprintf("A%d", rowIndex)] = operations.Packages[packageRef].RefPackageId + cellsValues[fmt.Sprintf("B%d", rowIndex)] = operations.Packages[packageRef].RefPackageName + cellsValues[fmt.Sprintf("C%d", rowIndex)] = operations.Packages[packageRef].ServiceName + cellsValues[fmt.Sprintf("D%d", rowIndex)] = versionName + cellsValues[fmt.Sprintf("E%d", rowIndex)] = operationView.Title + cellsValues[fmt.Sprintf("F%d", rowIndex)] = strings.ToUpper(operationView.Method) + cellsValues[fmt.Sprintf("G%d", rowIndex)] = operationView.Type + cellsValues[fmt.Sprintf("H%d", rowIndex)] = strings.ToUpper(operationView.ApiKind) + cellsValues[fmt.Sprintf("I%d", rowIndex)] = strings.ToLower(strconv.FormatBool(operationView.Deprecated)) + err := setCellsValues(report.workbook, view.GraphQLSheetName, cellsValues) + if err != nil { + return nil, err + } + if rowIndex%2 == 0 { + err = report.workbook.SetCellStyle(view.GraphQLSheetName, fmt.Sprintf("A%d", rowIndex), fmt.Sprintf("I%d", rowIndex), evenCellStyle) + } else { + err = report.workbook.SetCellStyle(view.GraphQLSheetName, fmt.Sprintf("A%d", rowIndex), fmt.Sprintf("I%d", rowIndex), oddCellStyle) + } + if err != nil { + return nil, err + } + rowIndex += 1 + } + } + + rowIndex = 2 + protobufSheetCreated := false + for packageRef, operationsView := range protobufOperations { + versionName := operations.Packages[packageRef].RefPackageVersion + if !operations.Packages[packageRef].NotLatestRevision { + versionName, err = getVersionNameFromVersionWithRevision(operations.Packages[packageRef].RefPackageVersion) + if err != nil { + return nil, err + } + } + for _, operationView := range operationsView { + if !protobufSheetCreated { + err := report.createProtobufSheet() + if err != nil { + return nil, err + } + protobufSheetCreated = true + } + cellsValues = make(map[string]interface{}) + cellsValues[fmt.Sprintf("A%d", rowIndex)] = operations.Packages[packageRef].RefPackageId + cellsValues[fmt.Sprintf("B%d", rowIndex)] = operations.Packages[packageRef].RefPackageName + cellsValues[fmt.Sprintf("C%d", rowIndex)] = operations.Packages[packageRef].ServiceName + cellsValues[fmt.Sprintf("D%d", rowIndex)] = versionName + cellsValues[fmt.Sprintf("E%d", rowIndex)] = operationView.Title + cellsValues[fmt.Sprintf("F%d", rowIndex)] = strings.ToUpper(operationView.Method) + cellsValues[fmt.Sprintf("G%d", rowIndex)] = operationView.Type + cellsValues[fmt.Sprintf("H%d", rowIndex)] = strings.ToUpper(operationView.ApiKind) + cellsValues[fmt.Sprintf("I%d", rowIndex)] = strings.ToLower(strconv.FormatBool(operationView.Deprecated)) + err := setCellsValues(report.workbook, view.ProtobufSheetName, cellsValues) + if err != nil { + return nil, err + } + if rowIndex%2 == 0 { + err = report.workbook.SetCellStyle(view.ProtobufSheetName, fmt.Sprintf("A%d", rowIndex), fmt.Sprintf("J%d", rowIndex), evenCellStyle) + } else { + err = report.workbook.SetCellStyle(view.ProtobufSheetName, fmt.Sprintf("A%d", rowIndex), fmt.Sprintf("J%d", rowIndex), oddCellStyle) + } + if err != nil { + return nil, err + } + rowIndex += 1 + } + } + err = report.setupSettings() + if err != nil { + return nil, err + } + return report.workbook, nil +} + +type ApiChangesReport struct { + workbook *excelize.File + firstSheetIndex int + startColumn string + endColumn string + columnDefaultWidth float64 +} + +func buildApiChangesWorkbook(versionChanges *view.VersionChangesView, packageName, versionName, versionStatus string) (*excelize.File, error) { + var err error + apiChangesReport, err := excelize.OpenFile(ExcelTemplatePath) + defer func() { + if err := apiChangesReport.Close(); err != nil { + log.Errorf("Failed to close excel template file: %v", err.Error()) + } + }() + if err != nil { + log.Errorf("Failed to open excel template file: %v", err.Error()) + return nil, err + } + report := ApiChangesReport{ + workbook: apiChangesReport, + startColumn: "A", + endColumn: "M", + columnDefaultWidth: 35, + } + + reportName := fmt.Sprintf("API changes between versions %s and %s", versionChanges.PreviousVersion, versionName) + + buildCoverPage(report.workbook, packageName, reportName, versionName, versionStatus) + + var cellsValues map[string]interface{} + report.firstSheetIndex, err = report.workbook.NewSheet(view.SummarySheetName) + if err != nil { + return nil, err + } + evenCellStyle := getEvenCellStyle(report.workbook) + oddCellStyle := getOddCellStyle(report.workbook) + summaryCellStyle := getSummaryCellStyle(report.workbook) + summaryFirstHeaderStyle := getSummaryFirstHeaderStyle(report.workbook) + summaryHeaderStyle := getSummaryHeaderStyle(report.workbook) + restApiMap := make(map[string][]view.RestOperationComparisonChangesView) + graphQLApiMap := make(map[string][]view.GraphQLOperationComparisonChangesView) + protobufApiMap := make(map[string][]view.ProtobufOperationComparisonChangesView) + err = report.setupSettings() + if err != nil { + return nil, err + } + + for _, operation := range versionChanges.Operations { + if restOperation, ok := operation.(view.RestOperationComparisonChangesView); ok { + if restOperation.PackageRef == "" { + restApiMap[restOperation.PreviousVersionPackageRef] = append(restApiMap[restOperation.PreviousVersionPackageRef], restOperation) + continue + } + restApiMap[restOperation.PackageRef] = append(restApiMap[restOperation.PackageRef], restOperation) + continue + } + if graphQLOperation, ok := operation.(view.GraphQLOperationComparisonChangesView); ok { + if graphQLOperation.PackageRef == "" { + graphQLApiMap[graphQLOperation.PreviousVersionPackageRef] = append(graphQLApiMap[graphQLOperation.PreviousVersionPackageRef], graphQLOperation) + continue + } + graphQLApiMap[graphQLOperation.PackageRef] = append(graphQLApiMap[graphQLOperation.PackageRef], graphQLOperation) + } + if protobufOperation, ok := operation.(view.ProtobufOperationComparisonChangesView); ok { + if protobufOperation.PackageRef == "" { + protobufApiMap[protobufOperation.PreviousVersionPackageRef] = append(protobufApiMap[protobufOperation.PreviousVersionPackageRef], protobufOperation) + continue + } + protobufApiMap[protobufOperation.PackageRef] = append(protobufApiMap[protobufOperation.PackageRef], protobufOperation) + } + } + + restApiAllChangesSummaryMap := make(map[string]view.ChangeSummary) + graphQLApiAllChangesSummaryMap := make(map[string]view.ChangeSummary) + protobufApiAllChangesSummaryMap := make(map[string]view.ChangeSummary) + + for key, value := range restApiMap { + summary := restApiAllChangesSummaryMap[key] + for _, changelogView := range value { + if changelogView.ChangeSummary.Deprecated > 0 { + summary.Deprecated += 1 + } + if changelogView.ChangeSummary.NonBreaking > 0 { + summary.NonBreaking += 1 + } + if changelogView.ChangeSummary.Breaking > 0 { + summary.Breaking += 1 + } + if changelogView.ChangeSummary.SemiBreaking > 0 { + summary.SemiBreaking += 1 + } + if changelogView.ChangeSummary.Annotation > 0 { + summary.Annotation += 1 + } + if changelogView.ChangeSummary.Unclassified > 0 { + summary.Unclassified += 1 + } + } + restApiAllChangesSummaryMap[key] = summary + } + + for key, value := range graphQLApiMap { + summary := graphQLApiAllChangesSummaryMap[key] + for _, changelogView := range value { + if changelogView.ChangeSummary.Deprecated > 0 { + summary.Deprecated += 1 + } + if changelogView.ChangeSummary.NonBreaking > 0 { + summary.NonBreaking += 1 + } + if changelogView.ChangeSummary.Breaking > 0 { + summary.Breaking += 1 + } + if changelogView.ChangeSummary.SemiBreaking > 0 { + summary.SemiBreaking += 1 + } + if changelogView.ChangeSummary.Annotation > 0 { + summary.Annotation += 1 + } + if changelogView.ChangeSummary.Unclassified > 0 { + summary.Unclassified += 1 + } + } + graphQLApiAllChangesSummaryMap[key] = summary + } + for key, value := range protobufApiMap { + summary := protobufApiAllChangesSummaryMap[key] + for _, changelogView := range value { + if changelogView.ChangeSummary.Deprecated > 0 { + summary.Deprecated += 1 + } + if changelogView.ChangeSummary.NonBreaking > 0 { + summary.NonBreaking += 1 + } + if changelogView.ChangeSummary.Breaking > 0 { + summary.Breaking += 1 + } + if changelogView.ChangeSummary.SemiBreaking > 0 { + summary.SemiBreaking += 1 + } + if changelogView.ChangeSummary.Annotation > 0 { + summary.Annotation += 1 + } + if changelogView.ChangeSummary.Unclassified > 0 { + summary.Unclassified += 1 + } + } + protobufApiAllChangesSummaryMap[key] = summary + } + + cellsValues = make(map[string]interface{}) + cellsValues["A1"] = view.SummarySheetName + cellsValues["A2"] = view.PackageIDColumnName + cellsValues["A3"] = view.PackageNameColumnName + cellsValues["A4"] = view.ServiceNameColumnName + cellsValues["A5"] = view.VersionColumnName + cellsValues["A6"] = view.PreviousVersionColumnName + cellsValues["A7"] = view.APITypeColumnName + cellsValues["A8"] = "Number of operations with breaking changes" + cellsValues["A9"] = "Number of operations with risky changes" + cellsValues["A10"] = "Number of operations with non-breaking changes" + cellsValues["A11"] = "Number of operations with deprecated changes" + cellsValues["A12"] = "Number of operations with annotation changes" + cellsValues["A13"] = "Number of operations with unclassified changes" + err = setCellsValues(report.workbook, view.SummarySheetName, cellsValues) + if err != nil { + return nil, err + } + err = report.workbook.SetCellStyle(view.SummarySheetName, "A1", "A1", summaryFirstHeaderStyle) + if err != nil { + return nil, err + } + err = report.workbook.SetCellStyle(view.SummarySheetName, "A2", "A13", summaryHeaderStyle) + if err != nil { + return nil, err + } + + for key, value := range restApiAllChangesSummaryMap { + versionName := versionChanges.Packages[key].RefPackageVersion + if !versionChanges.Packages[key].NotLatestRevision { + versionName, err = getVersionNameFromVersionWithRevision(versionChanges.Packages[key].RefPackageVersion) + if err != nil { + return nil, err + } + } + previousVersionName := versionChanges.Packages[restApiMap[key][0].PreviousVersionPackageRef].RefPackageVersion + if !versionChanges.Packages[restApiMap[key][0].PreviousVersionPackageRef].NotLatestRevision { + previousVersionName, err = getVersionNameFromVersionWithRevision(versionChanges.Packages[restApiMap[key][0].PreviousVersionPackageRef].RefPackageVersion) + if err != nil { + return nil, err + } + } + cellsValues = make(map[string]interface{}) + cellsValues["B2"] = versionChanges.Packages[key].RefPackageId + cellsValues["B3"] = versionChanges.Packages[key].RefPackageName + cellsValues["B4"] = versionChanges.Packages[key].ServiceName + cellsValues["B5"] = versionName + cellsValues["B6"] = previousVersionName + cellsValues["B7"] = "rest" + cellsValues["B8"] = value.Breaking + cellsValues["B9"] = value.SemiBreaking + cellsValues["B10"] = value.NonBreaking + cellsValues["B11"] = value.Deprecated + cellsValues["B12"] = value.Annotation + cellsValues["B13"] = value.Unclassified + err := setCellsValues(report.workbook, view.SummarySheetName, cellsValues) + if err != nil { + return nil, err + } + err = report.workbook.SetCellStyle(view.SummarySheetName, "B1", "B13", summaryCellStyle) + if err != nil { + return nil, err + } + } + for key, value := range graphQLApiAllChangesSummaryMap { + versionName := versionChanges.Packages[key].RefPackageVersion + if !versionChanges.Packages[key].NotLatestRevision { + versionName, err = getVersionNameFromVersionWithRevision(versionChanges.Packages[key].RefPackageVersion) + if err != nil { + return nil, err + } + } + previousVersionName := versionChanges.Packages[graphQLApiMap[key][0].PreviousVersionPackageRef].RefPackageVersion + if !versionChanges.Packages[graphQLApiMap[key][0].PreviousVersionPackageRef].NotLatestRevision { + previousVersionName, err = getVersionNameFromVersionWithRevision(versionChanges.Packages[graphQLApiMap[key][0].PreviousVersionPackageRef].RefPackageVersion) + if err != nil { + return nil, err + } + } + cellsValues = make(map[string]interface{}) + cellsValues["C2"] = versionChanges.Packages[key].RefPackageId + cellsValues["C3"] = versionChanges.Packages[key].RefPackageName + cellsValues["C4"] = versionChanges.Packages[key].ServiceName + cellsValues["C5"] = versionName + cellsValues["C6"] = previousVersionName + cellsValues["C7"] = "graphQL" + cellsValues["C8"] = value.Breaking + cellsValues["C9"] = value.SemiBreaking + cellsValues["C10"] = value.NonBreaking + cellsValues["C11"] = value.Deprecated + cellsValues["C12"] = value.Annotation + cellsValues["C13"] = value.Unclassified + err := setCellsValues(report.workbook, view.SummarySheetName, cellsValues) + if err != nil { + return nil, err + } + err = report.workbook.SetCellStyle(view.SummarySheetName, "C1", "C13", summaryCellStyle) + if err != nil { + return nil, err + } + } + for key, value := range protobufApiAllChangesSummaryMap { + versionName := versionChanges.Packages[key].RefPackageVersion + if !versionChanges.Packages[key].NotLatestRevision { + versionName, err = getVersionNameFromVersionWithRevision(versionChanges.Packages[key].RefPackageVersion) + if err != nil { + return nil, err + } + } + previousVersionName := versionChanges.Packages[protobufApiMap[key][0].PreviousVersionPackageRef].RefPackageVersion + if !versionChanges.Packages[protobufApiMap[key][0].PreviousVersionPackageRef].NotLatestRevision { + previousVersionName, err = getVersionNameFromVersionWithRevision(versionChanges.Packages[protobufApiMap[key][0].PreviousVersionPackageRef].RefPackageVersion) + if err != nil { + return nil, err + } + } + cellsValues = make(map[string]interface{}) + cellsValues["D2"] = versionChanges.Packages[key].RefPackageId + cellsValues["D3"] = versionChanges.Packages[key].RefPackageName + cellsValues["D4"] = versionChanges.Packages[key].ServiceName + cellsValues["D5"] = versionName + cellsValues["D6"] = previousVersionName + cellsValues["D7"] = "protobuf" + cellsValues["D8"] = value.Breaking + cellsValues["D9"] = value.SemiBreaking + cellsValues["D10"] = value.NonBreaking + cellsValues["D11"] = value.Deprecated + cellsValues["D12"] = value.Annotation + cellsValues["D13"] = value.Unclassified + err := setCellsValues(report.workbook, view.SummarySheetName, cellsValues) + if err != nil { + return nil, err + } + err = report.workbook.SetCellStyle(view.SummarySheetName, "D1", "D13", summaryCellStyle) + if err != nil { + return nil, err + } + } + + rowIndex := 2 + restSheetCreated := false + for key, changelogRestOperationView := range restApiMap { + versionName := versionChanges.Packages[key].RefPackageVersion + if !versionChanges.Packages[key].NotLatestRevision { + versionName, err = getVersionNameFromVersionWithRevision(versionChanges.Packages[key].RefPackageVersion) + if err != nil { + return nil, err + } + } + for _, changelogView := range changelogRestOperationView { + previousVersionName := versionChanges.Packages[changelogView.PreviousVersionPackageRef].RefPackageVersion + if !versionChanges.Packages[changelogView.PreviousVersionPackageRef].NotLatestRevision { + previousVersionName, err = getVersionNameFromVersionWithRevision(versionChanges.Packages[changelogView.PreviousVersionPackageRef].RefPackageVersion) + if err != nil { + return nil, err + } + } + for _, change := range changelogView.Changes { + commonOperationChange := view.GetSingleOperationChangeCommon(change) + if !restSheetCreated { + err := report.createRestSheet() + if err != nil { + return nil, err + } + restSheetCreated = true + } + cellsValues = make(map[string]interface{}) + cellsValues[fmt.Sprintf("A%d", rowIndex)] = versionChanges.Packages[key].RefPackageId + cellsValues[fmt.Sprintf("B%d", rowIndex)] = versionChanges.Packages[key].RefPackageName + cellsValues[fmt.Sprintf("C%d", rowIndex)] = versionChanges.Packages[key].ServiceName + cellsValues[fmt.Sprintf("D%d", rowIndex)] = versionName + cellsValues[fmt.Sprintf("E%d", rowIndex)] = previousVersionName + cellsValues[fmt.Sprintf("F%d", rowIndex)] = strings.Replace(changelogView.Title, "Semi-Breaking", "Risky", -1) + cellsValues[fmt.Sprintf("G%d", rowIndex)] = changelogView.Method + cellsValues[fmt.Sprintf("H%d", rowIndex)] = changelogView.Path + cellsValues[fmt.Sprintf("I%d", rowIndex)] = changelogView.Action + cellsValues[fmt.Sprintf("J%d", rowIndex)] = commonOperationChange.Description + cellsValues[fmt.Sprintf("K%d", rowIndex)] = mapServerity(commonOperationChange.Severity) + cellsValues[fmt.Sprintf("L%d", rowIndex)] = versionChanges.Packages[key].Kind + cellsValues[fmt.Sprintf("M%d", rowIndex)] = changelogView.ApiKind + err := setCellsValues(report.workbook, view.RestAPISheetName, cellsValues) + if err != nil { + return nil, err + } + if rowIndex%2 == 0 { + err = report.workbook.SetCellStyle(view.RestAPISheetName, fmt.Sprintf("A%d", rowIndex), fmt.Sprintf("M%d", rowIndex), evenCellStyle) + } else { + err = report.workbook.SetCellStyle(view.RestAPISheetName, fmt.Sprintf("A%d", rowIndex), fmt.Sprintf("M%d", rowIndex), oddCellStyle) + } + if err != nil { + return nil, err + } + rowIndex += 1 + } + } + } + + rowIndex = 2 + graphQLSheetCreated := false + for key, changelogGraphQLOperationView := range graphQLApiMap { + versionName := versionChanges.Packages[key].RefPackageVersion + if !versionChanges.Packages[key].NotLatestRevision { + versionName, err = getVersionNameFromVersionWithRevision(versionChanges.Packages[key].RefPackageVersion) + if err != nil { + return nil, err + } + } + for _, changelogView := range changelogGraphQLOperationView { + previousVersionName := versionChanges.Packages[changelogView.PreviousVersionPackageRef].RefPackageVersion + if !versionChanges.Packages[changelogView.PreviousVersionPackageRef].NotLatestRevision { + previousVersionName, err = getVersionNameFromVersionWithRevision(versionChanges.Packages[changelogView.PreviousVersionPackageRef].RefPackageVersion) + if err != nil { + return nil, err + } + } + for _, change := range changelogView.Changes { + commonOperationChange := view.GetSingleOperationChangeCommon(change) + if !graphQLSheetCreated { + err := report.createGraphQLSheet() + if err != nil { + return nil, err + } + graphQLSheetCreated = true + } + cellsValues = make(map[string]interface{}) + cellsValues[fmt.Sprintf("A%d", rowIndex)] = versionChanges.Packages[key].RefPackageId + cellsValues[fmt.Sprintf("B%d", rowIndex)] = versionChanges.Packages[key].RefPackageName + cellsValues[fmt.Sprintf("C%d", rowIndex)] = versionChanges.Packages[key].ServiceName + cellsValues[fmt.Sprintf("D%d", rowIndex)] = versionName + cellsValues[fmt.Sprintf("E%d", rowIndex)] = previousVersionName + cellsValues[fmt.Sprintf("F%d", rowIndex)] = strings.Replace(changelogView.Title, "Semi-Breaking", "Risky", -1) + cellsValues[fmt.Sprintf("G%d", rowIndex)] = changelogView.Method + cellsValues[fmt.Sprintf("H%d", rowIndex)] = changelogView.Type + cellsValues[fmt.Sprintf("I%d", rowIndex)] = changelogView.Action + cellsValues[fmt.Sprintf("J%d", rowIndex)] = commonOperationChange.Description + cellsValues[fmt.Sprintf("K%d", rowIndex)] = mapServerity(commonOperationChange.Severity) + cellsValues[fmt.Sprintf("L%d", rowIndex)] = versionChanges.Packages[key].Kind + cellsValues[fmt.Sprintf("M%d", rowIndex)] = changelogView.ApiKind + err := setCellsValues(report.workbook, view.GraphQLSheetName, cellsValues) + if err != nil { + return nil, err + } + if rowIndex%2 == 0 { + err = report.workbook.SetCellStyle(view.GraphQLSheetName, fmt.Sprintf("A%d", rowIndex), fmt.Sprintf("M%d", rowIndex), evenCellStyle) + } else { + err = report.workbook.SetCellStyle(view.GraphQLSheetName, fmt.Sprintf("A%d", rowIndex), fmt.Sprintf("M%d", rowIndex), oddCellStyle) + } + if err != nil { + return nil, err + } + rowIndex += 1 + } + } + } + + rowIndex = 2 + protobufSheetCreated := false + for key, changelogProtobufOperationView := range protobufApiMap { + versionName := versionChanges.Packages[key].RefPackageVersion + if !versionChanges.Packages[key].NotLatestRevision { + versionName, err = getVersionNameFromVersionWithRevision(versionChanges.Packages[key].RefPackageVersion) + if err != nil { + return nil, err + } + } + for _, changelogView := range changelogProtobufOperationView { + previousVersionName := versionChanges.Packages[changelogView.PreviousVersionPackageRef].RefPackageVersion + if !versionChanges.Packages[changelogView.PreviousVersionPackageRef].NotLatestRevision { + previousVersionName, err = getVersionNameFromVersionWithRevision(versionChanges.Packages[changelogView.PreviousVersionPackageRef].RefPackageVersion) + if err != nil { + return nil, err + } + } + for _, change := range changelogView.Changes { + commonOperationChange := view.GetSingleOperationChangeCommon(change) + if !protobufSheetCreated { + err := report.createProtobufSheet() + if err != nil { + return nil, err + } + protobufSheetCreated = true + } + cellsValues = make(map[string]interface{}) + cellsValues[fmt.Sprintf("A%d", rowIndex)] = versionChanges.Packages[key].RefPackageId + cellsValues[fmt.Sprintf("B%d", rowIndex)] = versionChanges.Packages[key].RefPackageName + cellsValues[fmt.Sprintf("C%d", rowIndex)] = versionChanges.Packages[key].ServiceName + cellsValues[fmt.Sprintf("D%d", rowIndex)] = versionName + cellsValues[fmt.Sprintf("E%d", rowIndex)] = previousVersionName + cellsValues[fmt.Sprintf("F%d", rowIndex)] = strings.Replace(changelogView.Title, "Semi-Breaking", "Risky", -1) + cellsValues[fmt.Sprintf("G%d", rowIndex)] = changelogView.Method + cellsValues[fmt.Sprintf("H%d", rowIndex)] = changelogView.Type + cellsValues[fmt.Sprintf("I%d", rowIndex)] = changelogView.Action + cellsValues[fmt.Sprintf("J%d", rowIndex)] = commonOperationChange.Description + cellsValues[fmt.Sprintf("K%d", rowIndex)] = mapServerity(commonOperationChange.Severity) + cellsValues[fmt.Sprintf("L%d", rowIndex)] = versionChanges.Packages[key].Kind + cellsValues[fmt.Sprintf("M%d", rowIndex)] = changelogView.ApiKind + err := setCellsValues(report.workbook, view.ProtobufSheetName, cellsValues) + if err != nil { + return nil, err + } + if rowIndex%2 == 0 { + err = report.workbook.SetCellStyle(view.ProtobufSheetName, fmt.Sprintf("A%d", rowIndex), fmt.Sprintf("M%d", rowIndex), evenCellStyle) + } else { + err = report.workbook.SetCellStyle(view.ProtobufSheetName, fmt.Sprintf("A%d", rowIndex), fmt.Sprintf("M%d", rowIndex), oddCellStyle) + } + if err != nil { + return nil, err + } + rowIndex += 1 + } + } + } + return report.workbook, nil +} + +func mapServerity(severity string) string { + switch severity { + case "semi-breaking": + return "risky" + default: + return severity + } +} + +func (a *ApiChangesReport) setupSettings() error { + a.workbook.SetActiveSheet(a.firstSheetIndex) + err := a.workbook.DeleteSheet("Sheet1") + if err != nil { + return err + } + err = a.workbook.SetColWidth(view.SummarySheetName, a.startColumn, a.endColumn, a.columnDefaultWidth) + if err != nil { + return err + } + return nil +} + +func (o *OperationsReport) setupSettings() error { + o.workbook.SetActiveSheet(o.firstSheetIndex) + err := o.workbook.DeleteSheet("Sheet1") + if err != nil { + return err + } + return nil +} + +func (o *DeprecatedOperationsReport) setupSettings() error { + o.workbook.SetActiveSheet(o.firstSheetIndex) + err := o.workbook.DeleteSheet("Sheet1") + if err != nil { + return err + } + return nil +} + +func setCellsValues(report *excelize.File, sheetName string, columnsValue map[string]interface{}) error { + for key, value := range columnsValue { + err := report.SetCellValue(sheetName, key, value) + if err != nil { + return err + } + } + return nil +} + +func (a *ApiChangesReport) createGraphQLSheet() error { + headerRowIndex := 1 + _, err := a.workbook.NewSheet(view.GraphQLSheetName) + headerStyle := getHeaderStyle(a.workbook) + if err != nil { + return err + } + err = a.workbook.SetColWidth(view.GraphQLSheetName, a.startColumn, a.endColumn, a.columnDefaultWidth) + if err != nil { + return err + } + cellsValues := make(map[string]interface{}) + cellsValues[fmt.Sprintf("A%d", headerRowIndex)] = view.PackageIDColumnName + cellsValues[fmt.Sprintf("B%d", headerRowIndex)] = view.PackageNameColumnName + cellsValues[fmt.Sprintf("C%d", headerRowIndex)] = view.ServiceNameColumnName + cellsValues[fmt.Sprintf("D%d", headerRowIndex)] = view.VersionColumnName + cellsValues[fmt.Sprintf("E%d", headerRowIndex)] = view.PreviousVersionColumnName + cellsValues[fmt.Sprintf("F%d", headerRowIndex)] = view.OperationTitleColumnName + cellsValues[fmt.Sprintf("G%d", headerRowIndex)] = view.OperationMethodColumnName + cellsValues[fmt.Sprintf("H%d", headerRowIndex)] = view.OperationTypeColumnName + cellsValues[fmt.Sprintf("I%d", headerRowIndex)] = view.OperationActionColumnName + cellsValues[fmt.Sprintf("J%d", headerRowIndex)] = view.ChangeDescriptionColumnName + cellsValues[fmt.Sprintf("K%d", headerRowIndex)] = view.ChangeSeverityColumnName + cellsValues[fmt.Sprintf("L%d", headerRowIndex)] = view.KindColumnName + cellsValues[fmt.Sprintf("M%d", headerRowIndex)] = view.APIKindColumnName + err = setCellsValues(a.workbook, view.GraphQLSheetName, cellsValues) + if err != nil { + return err + } + err = a.workbook.SetCellStyle(view.GraphQLSheetName, fmt.Sprintf("A%d", headerRowIndex), fmt.Sprintf("M%d", headerRowIndex), headerStyle) + if err != nil { + return err + } + err = a.workbook.AutoFilter(view.GraphQLSheetName, fmt.Sprintf("%s:%s", fmt.Sprintf("A%d", headerRowIndex), fmt.Sprintf("M%d", headerRowIndex)), []excelize.AutoFilterOptions{}) + if err != nil { + return err + } + return nil +} + +func (a *ApiChangesReport) createProtobufSheet() error { + headerRowIndex := 1 + headerStyle := getHeaderStyle(a.workbook) + _, err := a.workbook.NewSheet(view.ProtobufSheetName) + if err != nil { + return err + } + err = a.workbook.SetColWidth(view.ProtobufSheetName, a.startColumn, a.endColumn, a.columnDefaultWidth) + if err != nil { + return err + } + cellsValues := make(map[string]interface{}) + cellsValues[fmt.Sprintf("A%d", headerRowIndex)] = view.PackageIDColumnName + cellsValues[fmt.Sprintf("B%d", headerRowIndex)] = view.PackageNameColumnName + cellsValues[fmt.Sprintf("C%d", headerRowIndex)] = view.ServiceNameColumnName + cellsValues[fmt.Sprintf("D%d", headerRowIndex)] = view.VersionColumnName + cellsValues[fmt.Sprintf("E%d", headerRowIndex)] = view.PreviousVersionColumnName + cellsValues[fmt.Sprintf("F%d", headerRowIndex)] = view.OperationTitleColumnName + cellsValues[fmt.Sprintf("G%d", headerRowIndex)] = view.OperationMethodColumnName + cellsValues[fmt.Sprintf("H%d", headerRowIndex)] = view.OperationTypeColumnName + cellsValues[fmt.Sprintf("I%d", headerRowIndex)] = view.OperationActionColumnName + cellsValues[fmt.Sprintf("J%d", headerRowIndex)] = view.ChangeDescriptionColumnName + cellsValues[fmt.Sprintf("K%d", headerRowIndex)] = view.ChangeSeverityColumnName + cellsValues[fmt.Sprintf("L%d", headerRowIndex)] = view.KindColumnName + cellsValues[fmt.Sprintf("M%d", headerRowIndex)] = view.APIKindColumnName + err = setCellsValues(a.workbook, view.ProtobufSheetName, cellsValues) + if err != nil { + return err + } + err = a.workbook.SetCellStyle(view.ProtobufSheetName, fmt.Sprintf("A%d", headerRowIndex), fmt.Sprintf("M%d", headerRowIndex), headerStyle) + if err != nil { + return err + } + err = a.workbook.AutoFilter(view.ProtobufSheetName, fmt.Sprintf("%s:%s", fmt.Sprintf("A%d", headerRowIndex), fmt.Sprintf("M%d", headerRowIndex)), []excelize.AutoFilterOptions{}) + if err != nil { + return err + } + return nil +} + +func (a *ApiChangesReport) createRestSheet() error { + headerRowIndex := 1 + headerStyle := getHeaderStyle(a.workbook) + _, err := a.workbook.NewSheet(view.RestAPISheetName) + if err != nil { + return err + } + err = a.workbook.SetColWidth(view.RestAPISheetName, a.startColumn, a.endColumn, a.columnDefaultWidth) + if err != nil { + return err + } + cellsValues := make(map[string]interface{}) + cellsValues[fmt.Sprintf("A%d", headerRowIndex)] = view.PackageIDColumnName + cellsValues[fmt.Sprintf("B%d", headerRowIndex)] = view.PackageNameColumnName + cellsValues[fmt.Sprintf("C%d", headerRowIndex)] = view.ServiceNameColumnName + cellsValues[fmt.Sprintf("D%d", headerRowIndex)] = view.VersionColumnName + cellsValues[fmt.Sprintf("E%d", headerRowIndex)] = view.PreviousVersionColumnName + cellsValues[fmt.Sprintf("F%d", headerRowIndex)] = view.OperationTitleColumnName + cellsValues[fmt.Sprintf("G%d", headerRowIndex)] = view.OperationMethodColumnName + cellsValues[fmt.Sprintf("H%d", headerRowIndex)] = view.OperationPathColumnName + cellsValues[fmt.Sprintf("I%d", headerRowIndex)] = view.OperationActionColumnName + cellsValues[fmt.Sprintf("J%d", headerRowIndex)] = view.ChangeDescriptionColumnName + cellsValues[fmt.Sprintf("K%d", headerRowIndex)] = view.ChangeSeverityColumnName + cellsValues[fmt.Sprintf("L%d", headerRowIndex)] = view.KindColumnName + cellsValues[fmt.Sprintf("M%d", headerRowIndex)] = view.APIKindColumnName + err = setCellsValues(a.workbook, view.RestAPISheetName, cellsValues) + if err != nil { + return err + } + err = a.workbook.SetCellStyle(view.RestAPISheetName, fmt.Sprintf("A%d", headerRowIndex), fmt.Sprintf("M%d", headerRowIndex), headerStyle) + if err != nil { + return err + } + err = a.workbook.AutoFilter(view.RestAPISheetName, fmt.Sprintf("%s:%s", fmt.Sprintf("A%d", headerRowIndex), fmt.Sprintf("M%d", headerRowIndex)), []excelize.AutoFilterOptions{}) + if err != nil { + return err + } + return nil +} + +func (o *OperationsReport) createRestSheet() error { + var err error + headerRowIndex := 1 + o.firstSheetIndex, err = o.workbook.NewSheet(view.RestAPISheetName) + headerStyle := getHeaderStyle(o.workbook) + if err != nil { + return err + } + err = o.workbook.SetColWidth(view.RestAPISheetName, o.startColumn, o.endColumn, o.columnDefaultWidth) + if err != nil { + return err + } + cellsValues := make(map[string]interface{}) + cellsValues[fmt.Sprintf("A%d", headerRowIndex)] = view.PackageIDColumnName + cellsValues[fmt.Sprintf("B%d", headerRowIndex)] = view.PackageNameColumnName + cellsValues[fmt.Sprintf("C%d", headerRowIndex)] = view.ServiceNameColumnName + cellsValues[fmt.Sprintf("D%d", headerRowIndex)] = view.VersionColumnName + cellsValues[fmt.Sprintf("E%d", headerRowIndex)] = view.OperationTitleColumnName + cellsValues[fmt.Sprintf("F%d", headerRowIndex)] = view.OperationMethodColumnName + cellsValues[fmt.Sprintf("G%d", headerRowIndex)] = view.OperationPathColumnName + cellsValues[fmt.Sprintf("H%d", headerRowIndex)] = view.TagColumnName + cellsValues[fmt.Sprintf("I%d", headerRowIndex)] = view.KindColumnName + cellsValues[fmt.Sprintf("J%d", headerRowIndex)] = view.DeprecatedColumnName + err = setCellsValues(o.workbook, view.RestAPISheetName, cellsValues) + if err != nil { + return err + } + err = o.workbook.SetCellStyle(view.RestAPISheetName, fmt.Sprintf("A%d", headerRowIndex), fmt.Sprintf("J%d", headerRowIndex), headerStyle) + if err != nil { + return err + } + err = o.workbook.AutoFilter(view.RestAPISheetName, fmt.Sprintf("%s:%s", fmt.Sprintf("A%d", headerRowIndex), fmt.Sprintf("J%d", headerRowIndex)), []excelize.AutoFilterOptions{}) + if err != nil { + return err + } + return nil +} + +func (o *OperationsReport) createGraphQLSheet() error { + var err error + headerRowIndex := 1 + o.firstSheetIndex, err = o.workbook.NewSheet(view.GraphQLSheetName) + if err != nil { + return err + } + err = o.workbook.SetColWidth(view.GraphQLSheetName, o.startColumn, o.endColumn, o.columnDefaultWidth) + if err != nil { + return err + } + headerStyle := getHeaderStyle(o.workbook) + cellsValues := make(map[string]interface{}) + cellsValues[fmt.Sprintf("A%d", headerRowIndex)] = view.PackageIDColumnName + cellsValues[fmt.Sprintf("B%d", headerRowIndex)] = view.PackageNameColumnName + cellsValues[fmt.Sprintf("C%d", headerRowIndex)] = view.ServiceNameColumnName + cellsValues[fmt.Sprintf("D%d", headerRowIndex)] = view.VersionColumnName + cellsValues[fmt.Sprintf("E%d", headerRowIndex)] = view.OperationTitleColumnName + cellsValues[fmt.Sprintf("F%d", headerRowIndex)] = view.OperationMethodColumnName + cellsValues[fmt.Sprintf("G%d", headerRowIndex)] = view.OperationTypeColumnName + cellsValues[fmt.Sprintf("H%d", headerRowIndex)] = view.KindColumnName + cellsValues[fmt.Sprintf("I%d", headerRowIndex)] = view.Deprecated + err = setCellsValues(o.workbook, view.GraphQLSheetName, cellsValues) + if err != nil { + return err + } + err = o.workbook.SetCellStyle(view.GraphQLSheetName, fmt.Sprintf("A%d", headerRowIndex), fmt.Sprintf("I%d", headerRowIndex), headerStyle) + if err != nil { + return err + } + err = o.workbook.AutoFilter(view.GraphQLSheetName, fmt.Sprintf("%s:%s", fmt.Sprintf("A%d", headerRowIndex), fmt.Sprintf("I%d", headerRowIndex)), []excelize.AutoFilterOptions{}) + if err != nil { + return err + } + return nil +} + +func getHeaderStyle(file *excelize.File) (style int) { + headerStyle, _ := file.NewStyle(&excelize.Style{ + Font: &excelize.Font{ + Bold: true, + Family: "Arial", + Size: 10, + Color: "FFFFFF", + }, + Border: []excelize.Border{ + {Type: "left", Color: "E2E5E8", Style: 1}, + {Type: "right", Color: "E2E5E8", Style: 1}, + {Type: "top", Color: "E2E5E8", Style: 1}, + {Type: "bottom", Color: "E2E5E8", Style: 1}, + }, + Fill: excelize.Fill{ + Type: "pattern", + Color: []string{"4E79A0"}, + Pattern: 1, + }, + }) + return headerStyle +} + +func getSummaryFirstHeaderStyle(file *excelize.File) (style int) { + headerStyle, _ := file.NewStyle(&excelize.Style{ + Font: &excelize.Font{ + Bold: true, + Family: "Arial", + Size: 10, + }, + Border: []excelize.Border{ + {Type: "left", Color: "000000", Style: 1}, + {Type: "right", Color: "000000", Style: 1}, + {Type: "top", Color: "000000", Style: 1}, + {Type: "bottom", Color: "000000", Style: 1}, + }, + Fill: excelize.Fill{ + Type: "pattern", + Color: []string{"DAE3ED"}, + Pattern: 1, + }, + Alignment: &excelize.Alignment{ + Horizontal: "center", + }, + }) + return headerStyle +} + +func getSummaryHeaderStyle(file *excelize.File) (style int) { + headerStyle, _ := file.NewStyle(&excelize.Style{ + Font: &excelize.Font{ + Bold: true, + Family: "Arial", + Size: 10, + }, + Border: []excelize.Border{ + {Type: "left", Color: "000000", Style: 1}, + {Type: "right", Color: "000000", Style: 1}, + {Type: "top", Color: "000000", Style: 1}, + {Type: "bottom", Color: "000000", Style: 1}, + }, + Fill: excelize.Fill{ + Type: "pattern", + Color: []string{"DAE3ED"}, + Pattern: 1, + }, + }) + return headerStyle +} + +func buildCoverPage(file *excelize.File, packageName, reportName, packageVersion, packageVersionStatus string) error { + var err error + + err = file.AddShape("Cover Page", "B15", + &excelize.Shape{ + Type: "rect", + Paragraph: []excelize.RichTextRun{ + { + Text: packageName, + Font: &excelize.Font{ + Family: "Arial", + Size: 24, + Color: "183147", + }, + }, + }, + Width: 800, + Height: 50, + }, + ) + if err != nil { + return err + } + + err = file.AddShape("Cover Page", "B18", + &excelize.Shape{ + Type: "rect", + Paragraph: []excelize.RichTextRun{ + { + Text: reportName, + Font: &excelize.Font{ + Family: "Arial", + Size: 16, + Color: "91ABC4", + }, + }, + }, + Width: 800, + Height: 50, + }, + ) + if err != nil { + return err + } + + err = file.SetCellValue("Cover Page", "C23", packageVersion) + if err != nil { + return err + } + err = file.SetCellValue("Cover Page", "C24", packageVersionStatus) + if err != nil { + return err + } + currentTime := time.Now() + err = file.SetCellValue("Cover Page", "C25", currentTime.Format("2006-01-02")) // YYYY-MM-DD + if err != nil { + return err + } + return nil +} + +func getEvenCellStyle(file *excelize.File) (style int) { + evenCellStyle, _ := file.NewStyle(&excelize.Style{ + Font: &excelize.Font{ + Family: "Arial", + Size: 10, + }, + Border: []excelize.Border{ + {Type: "left", Color: "E2E5E8", Style: 1}, + {Type: "right", Color: "E2E5E8", Style: 1}, + {Type: "top", Color: "E2E5E8", Style: 1}, + {Type: "bottom", Color: "E2E5E8", Style: 1}, + }, + Fill: excelize.Fill{ + Type: "pattern", + Color: []string{"#F5F7F8"}, + Pattern: 1, + }, + }) + return evenCellStyle +} + +func getOddCellStyle(file *excelize.File) (style int) { + oddCellStyle, _ := file.NewStyle(&excelize.Style{ + Font: &excelize.Font{ + Family: "Arial", + Size: 10, + }, + Border: []excelize.Border{ + {Type: "left", Color: "E2E5E8", Style: 1}, + {Type: "right", Color: "E2E5E8", Style: 1}, + {Type: "top", Color: "E2E5E8", Style: 1}, + {Type: "bottom", Color: "E2E5E8", Style: 1}, + }, + }) + return oddCellStyle +} + +func getSummaryCellStyle(file *excelize.File) (style int) { + oddCellStyle, _ := file.NewStyle(&excelize.Style{ + Font: &excelize.Font{ + Family: "Arial", + Size: 10, + }, + Border: []excelize.Border{ + {Type: "left", Color: "000000", Style: 1}, + {Type: "right", Color: "000000", Style: 1}, + {Type: "top", Color: "000000", Style: 1}, + {Type: "bottom", Color: "000000", Style: 1}, + }, + Alignment: &excelize.Alignment{ + Horizontal: "left", + }, + }) + return oddCellStyle +} + +func (o *OperationsReport) createProtobufSheet() error { + var err error + headerRowIndex := 1 + o.firstSheetIndex, err = o.workbook.NewSheet(view.ProtobufSheetName) + if err != nil { + return err + } + err = o.workbook.SetColWidth(view.ProtobufSheetName, o.startColumn, o.endColumn, o.columnDefaultWidth) + if err != nil { + return err + } + headerStyle := getHeaderStyle(o.workbook) + cellsValues := make(map[string]interface{}) + cellsValues[fmt.Sprintf("A%d", headerRowIndex)] = view.PackageIDColumnName + cellsValues[fmt.Sprintf("B%d", headerRowIndex)] = view.PackageNameColumnName + cellsValues[fmt.Sprintf("C%d", headerRowIndex)] = view.ServiceNameColumnName + cellsValues[fmt.Sprintf("D%d", headerRowIndex)] = view.VersionColumnName + cellsValues[fmt.Sprintf("E%d", headerRowIndex)] = view.OperationTitleColumnName + cellsValues[fmt.Sprintf("F%d", headerRowIndex)] = view.OperationMethodColumnName + cellsValues[fmt.Sprintf("G%d", headerRowIndex)] = view.OperationTypeColumnName + cellsValues[fmt.Sprintf("H%d", headerRowIndex)] = view.KindColumnName + cellsValues[fmt.Sprintf("I%d", headerRowIndex)] = view.Deprecated + err = setCellsValues(o.workbook, view.ProtobufSheetName, cellsValues) + if err != nil { + return err + } + err = o.workbook.SetCellStyle(view.ProtobufSheetName, fmt.Sprintf("A%d", headerRowIndex), fmt.Sprintf("I%d", headerRowIndex), headerStyle) + if err != nil { + return err + } + err = o.workbook.AutoFilter(view.ProtobufSheetName, fmt.Sprintf("%s:%s", fmt.Sprintf("A%d", headerRowIndex), fmt.Sprintf("I%d", headerRowIndex)), []excelize.AutoFilterOptions{}) + if err != nil { + return err + } + return nil +} + +func (o *DeprecatedOperationsReport) createRestSheet() error { + var err error + headerRowIndex := 1 + headerStyle := getHeaderStyle(o.workbook) + o.firstSheetIndex, err = o.workbook.NewSheet(view.RestAPISheetName) + if err != nil { + return err + } + err = o.workbook.SetColWidth(view.RestAPISheetName, o.startColumn, o.endColumn, o.columnDefaultWidth) + if err != nil { + return err + } + cellsValues := make(map[string]interface{}) + cellsValues[fmt.Sprintf("A%d", headerRowIndex)] = view.PackageIDColumnName + cellsValues[fmt.Sprintf("B%d", headerRowIndex)] = view.PackageNameColumnName + cellsValues[fmt.Sprintf("C%d", headerRowIndex)] = view.ServiceNameColumnName + cellsValues[fmt.Sprintf("D%d", headerRowIndex)] = view.VersionColumnName + cellsValues[fmt.Sprintf("E%d", headerRowIndex)] = view.OperationTitleColumnName + cellsValues[fmt.Sprintf("F%d", headerRowIndex)] = view.OperationMethodColumnName + cellsValues[fmt.Sprintf("G%d", headerRowIndex)] = view.OperationPathColumnName + cellsValues[fmt.Sprintf("H%d", headerRowIndex)] = view.TagColumnName + cellsValues[fmt.Sprintf("I%d", headerRowIndex)] = view.KindColumnName + cellsValues[fmt.Sprintf("J%d", headerRowIndex)] = view.DeprecatedSinceColumnName + cellsValues[fmt.Sprintf("K%d", headerRowIndex)] = view.DeprecatedDescriptionColumnName + cellsValues[fmt.Sprintf("L%d", headerRowIndex)] = view.AdditionalInformationColumnName + err = setCellsValues(o.workbook, view.RestAPISheetName, cellsValues) + if err != nil { + return err + } + err = o.workbook.SetCellStyle(view.RestAPISheetName, fmt.Sprintf("A%d", headerRowIndex), fmt.Sprintf("L%d", headerRowIndex), headerStyle) + if err != nil { + return err + } + err = o.workbook.AutoFilter(view.RestAPISheetName, fmt.Sprintf("%s:%s", fmt.Sprintf("A%d", headerRowIndex), fmt.Sprintf("L%d", headerRowIndex)), []excelize.AutoFilterOptions{}) + if err != nil { + return err + } + return nil +} + +func (o *DeprecatedOperationsReport) createGraphQLSheet() error { + var err error + headerRowIndex := 1 + headerStyle := getHeaderStyle(o.workbook) + o.firstSheetIndex, err = o.workbook.NewSheet(view.GraphQLSheetName) + if err != nil { + return err + } + err = o.workbook.SetColWidth(view.GraphQLSheetName, o.startColumn, o.endColumn, o.columnDefaultWidth) + if err != nil { + return err + } + cellsValues := make(map[string]interface{}) + cellsValues[fmt.Sprintf("A%d", headerRowIndex)] = view.PackageIDColumnName + cellsValues[fmt.Sprintf("B%d", headerRowIndex)] = view.PackageNameColumnName + cellsValues[fmt.Sprintf("C%d", headerRowIndex)] = view.ServiceNameColumnName + cellsValues[fmt.Sprintf("D%d", headerRowIndex)] = view.VersionColumnName + cellsValues[fmt.Sprintf("E%d", headerRowIndex)] = view.OperationTitleColumnName + cellsValues[fmt.Sprintf("F%d", headerRowIndex)] = view.OperationTypeColumnName + cellsValues[fmt.Sprintf("G%d", headerRowIndex)] = view.OperationMethodColumnName + cellsValues[fmt.Sprintf("H%d", headerRowIndex)] = view.TagColumnName + cellsValues[fmt.Sprintf("I%d", headerRowIndex)] = view.KindColumnName + cellsValues[fmt.Sprintf("J%d", headerRowIndex)] = view.DeprecatedSinceColumnName + cellsValues[fmt.Sprintf("K%d", headerRowIndex)] = view.DeprecatedDescriptionColumnName + cellsValues[fmt.Sprintf("L%d", headerRowIndex)] = view.AdditionalInformationColumnName + err = setCellsValues(o.workbook, view.GraphQLSheetName, cellsValues) + if err != nil { + return err + } + err = o.workbook.SetCellStyle(view.GraphQLSheetName, fmt.Sprintf("A%d", headerRowIndex), fmt.Sprintf("L%d", headerRowIndex), headerStyle) + if err != nil { + return err + } + err = o.workbook.AutoFilter(view.GraphQLSheetName, fmt.Sprintf("%s:%s", fmt.Sprintf("A%d", headerRowIndex), fmt.Sprintf("L%d", headerRowIndex)), []excelize.AutoFilterOptions{}) + if err != nil { + return err + } + return nil +} + +func (o *DeprecatedOperationsReport) createProtobufSheet() error { + var err error + headerRowIndex := 1 + headerStyle := getHeaderStyle(o.workbook) + o.firstSheetIndex, err = o.workbook.NewSheet(view.ProtobufSheetName) + if err != nil { + return err + } + err = o.workbook.SetColWidth(view.ProtobufSheetName, o.startColumn, o.endColumn, o.columnDefaultWidth) + if err != nil { + return err + } + cellsValues := make(map[string]interface{}) + cellsValues[fmt.Sprintf("A%d", headerRowIndex)] = view.PackageIDColumnName + cellsValues[fmt.Sprintf("B%d", headerRowIndex)] = view.PackageNameColumnName + cellsValues[fmt.Sprintf("C%d", headerRowIndex)] = view.ServiceNameColumnName + cellsValues[fmt.Sprintf("D%d", headerRowIndex)] = view.VersionColumnName + cellsValues[fmt.Sprintf("E%d", headerRowIndex)] = view.OperationTitleColumnName + cellsValues[fmt.Sprintf("F%d", headerRowIndex)] = view.OperationTypeColumnName + cellsValues[fmt.Sprintf("G%d", headerRowIndex)] = view.OperationMethodColumnName + cellsValues[fmt.Sprintf("H%d", headerRowIndex)] = view.KindColumnName + cellsValues[fmt.Sprintf("I%d", headerRowIndex)] = view.DeprecatedSinceColumnName + cellsValues[fmt.Sprintf("J%d", headerRowIndex)] = view.DeprecatedDescriptionColumnName + cellsValues[fmt.Sprintf("K%d", headerRowIndex)] = view.AdditionalInformationColumnName + err = setCellsValues(o.workbook, view.ProtobufSheetName, cellsValues) + if err != nil { + return err + } + err = o.workbook.SetCellStyle(view.ProtobufSheetName, fmt.Sprintf("A%d", headerRowIndex), fmt.Sprintf("K%d", headerRowIndex), headerStyle) + if err != nil { + return err + } + err = o.workbook.AutoFilter(view.ProtobufSheetName, fmt.Sprintf("%s:%s", fmt.Sprintf("A%d", headerRowIndex), fmt.Sprintf("K%d", headerRowIndex)), []excelize.AutoFilterOptions{}) + if err != nil { + return err + } + return nil +} + +func (e excelServiceImpl) getVersionNameForAttachmentName(packageId, version string) (string, error) { + latestRevision, err := e.publishedRepo.GetLatestRevision(packageId, version) + if err != nil { + return "", err + } + versionName, versionRevision, err := SplitVersionRevision(version) + if err != nil { + return "", err + } + if latestRevision == versionRevision { + return versionName, nil + } + return version, nil +} + +func getVersionNameFromVersionWithRevision(version string) (string, error) { + versionName, _, err := SplitVersionRevision(version) + if err != nil { + return "", err + } + return versionName, nil +} + +func (e excelServiceImpl) ExportBusinessMetrics(businessMetrics []view.BusinessMetric) (*excelize.File, string, error) { + var err error + workbook := excelize.NewFile() + report := businessMetricsReport{ + workbook: workbook, + } + err = report.createResultSheet(businessMetrics) + if err != nil { + return nil, "", err + } + err = report.workbook.DeleteSheet("Sheet1") + if err != nil { + return nil, "", fmt.Errorf("failed to delete default Sheet1: %v", err.Error()) + } + filename := fmt.Sprintf("business_metrics_%v.xlsx", time.Now().Format("2006-01-02 15-04-05")) + return report.workbook, filename, nil +} + +type businessMetricsReport struct { + workbook *excelize.File +} + +func (b *businessMetricsReport) createResultSheet(businessMetrics []view.BusinessMetric) error { + sheetName := "Result" + headerStyle := getHeaderStyle(b.workbook) + evenCellStyle := getEvenCellStyle(b.workbook) + oddCellStyle := getOddCellStyle(b.workbook) + _, err := b.workbook.NewSheet(sheetName) + if err != nil { + return fmt.Errorf("failed to create new sheet: %v", err) + } + cells := make(map[string]interface{}, 0) + cells["A1"] = "Date" + cells["B1"] = "Package" + cells["C1"] = "Metric" + cells["D1"] = "User" + cells["E1"] = "Value" + err = b.workbook.SetCellStyle(sheetName, "A1", "E1", headerStyle) + if err != nil { + return err + } + + b.workbook.SetColWidth(sheetName, "A", "A", 12) + b.workbook.SetColWidth(sheetName, "B", "B", 30) + b.workbook.SetColWidth(sheetName, "C", "C", 30) + b.workbook.SetColWidth(sheetName, "D", "D", 30) + b.workbook.SetColWidth(sheetName, "E", "E", 10) + rowIndex := 2 + for _, businessMetric := range businessMetrics { + cells[fmt.Sprintf("A%d", rowIndex)] = businessMetric.Date + cells[fmt.Sprintf("B%d", rowIndex)] = businessMetric.PackageId + cells[fmt.Sprintf("C%d", rowIndex)] = businessMetric.Metric + cells[fmt.Sprintf("D%d", rowIndex)] = businessMetric.Username + cells[fmt.Sprintf("E%d", rowIndex)] = businessMetric.Value + if rowIndex%2 == 0 { + err = b.workbook.SetCellStyle(sheetName, fmt.Sprintf("A%d", rowIndex), fmt.Sprintf("E%d", rowIndex), evenCellStyle) + } else { + err = b.workbook.SetCellStyle(sheetName, fmt.Sprintf("A%d", rowIndex), fmt.Sprintf("E%d", rowIndex), oddCellStyle) + } + if err != nil { + return err + } + rowIndex++ + } + err = setCellsValues(b.workbook, sheetName, cells) + if err != nil { + return fmt.Errorf("failed to set cell values: %v", err.Error()) + } + return nil +} diff --git a/qubership-apihub-service/service/GitClientProvider.go b/qubership-apihub-service/service/GitClientProvider.go new file mode 100644 index 0000000..b75c18f --- /dev/null +++ b/qubership-apihub-service/service/GitClientProvider.go @@ -0,0 +1,230 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "fmt" + "net/http" + "sync" + "time" + + "github.com/buraksezer/olric" + "github.com/shaj13/libcache" + log "github.com/sirupsen/logrus" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/cache" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/client" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type GitClientProvider interface { + GetUserClient(integration view.GitIntegrationType, userId string) (client.GitClient, error) + GetUserClientWithNewKey(integration view.GitIntegrationType, userId string, token string) (client.GitClient, error) + GetConfiguration(integration view.GitIntegrationType) (*client.GitClientConfiguration, error) + UpdateUserCache(integration view.GitIntegrationType, userId string, expiresAt time.Time) error +} + +func NewGitClientProvider(configs []client.GitClientConfiguration, repo repository.GitIntegrationRepository, + tokenRevocationHandler client.TokenRevocationHandler, tokenExpirationHandler client.TokenExpirationHandler, op cache.OlricProvider) (GitClientProvider, error) { + + cache := libcache.LRU.New(1000) + cache.SetTTL(time.Minute * 60) + cache.RegisterOnExpired(func(key, _ interface{}) { + cache.Delete(key) + }) + + provider := gitClientProviderImpl{ + op: op, + } + provider.repo = repo + provider.gitlabClientUserCache = cache + provider.tokenRevocationHandler = tokenRevocationHandler + provider.tokenExpirationHandler = tokenExpirationHandler + + for _, config := range configs { + switch config.Integration { + case view.GitlabIntegration: + provider.gitlabConfiguration = config + break + default: + return nil, fmt.Errorf("unknown integration type: %s, unable to create client provider", config.Integration) + } + } + utils.SafeAsync(func() { + provider.deleteGCRevokedUsersFromCache() + }) + return &provider, nil +} + +// todo probably need to rename this topic +const GCRevokedUsersTopicName = "git-client-revoked-users" + +func (p *gitClientProviderImpl) deleteGCRevokedUsersFromCache() { + var err error + p.olricC = p.op.Get() + topicName := GCRevokedUsersTopicName + p.gcRevokedUsersTopic, err = p.olricC.NewDTopic(topicName, 10000, 1) + if err != nil { + log.Errorf("Failed to create DTopic: %s", err.Error()) + } + p.gcRevokedUsersTopic.AddListener(func(topic olric.DTopicMessage) { + p.userGCMutex.Lock() + defer p.userGCMutex.Unlock() + + userId := fmt.Sprintf("%v", topic.Message) + p.gitlabClientUserCache.Delete(userId) + }) +} + +type gitClientProviderImpl struct { + repo repository.GitIntegrationRepository + gitlabConfiguration client.GitClientConfiguration + gitlabClientUserCache libcache.Cache + tokenRevocationHandler client.TokenRevocationHandler + tokenExpirationHandler client.TokenExpirationHandler + op cache.OlricProvider + olricC *olric.Olric + gcRevokedUsersTopic *olric.DTopic + userGCMutex sync.RWMutex +} + +func (p *gitClientProviderImpl) GetUserClient(integration view.GitIntegrationType, userId string) (client.GitClient, error) { + //todo check if project's integration type is supported on project creation + if integration != view.GitlabIntegration { + return nil, fmt.Errorf("unsupported integration type: %s", integration) + } + + var err error + + entity, err := p.repo.GetUserApiKey(integration, userId) + if err != nil { + return nil, fmt.Errorf("unable to get api key for user %s and integration %s: %v", userId, integration, err.Error()) + } + if entity == nil || entity.AccessToken == "" { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ApiKeyNotFound, + Message: exception.ApiKeyNotFoundMsg, + Params: map[string]interface{}{"user": userId, "integration": integration}, + } + } + + var cl *client.GitClient + if element, exists := p.gitlabClientUserCache.Peek(userId); exists { + cl = element.(*client.GitClient) + } + if !entity.ExpiresAt.IsZero() { + // we have a time limited token, need to check if it's close to expire or expired + if time.Until(entity.ExpiresAt) < (5 * time.Minute) { + // it's time to refresh the token + var refreshError error + var updatedToken string + var updatedExpiresAt *time.Time + updatedToken, updatedExpiresAt, refreshError = p.tokenExpirationHandler.TokenExpired(entity.UserId, entity.Integration) + if refreshError != nil { + if time.Until(entity.ExpiresAt) < 0 { + // the token is not valid anymore, we can't create the client + return nil, refreshError + } else { + if cl != nil { + // we should treat the err as warning if the token is still valid and we have cache + log.Warnf("GetUserClient: failed to refresh expiring gitlab token (but client cache is still valid): %v", refreshError) + return *cl, nil + } else { + // we have no cache, but the token should still be valid, so we can create the client using existing token and expiresAt + updatedToken = entity.AccessToken + updatedExpiresAt = &entity.ExpiresAt + log.Warnf("GetUserClient: failed to refresh expiring gitlab token for user %s (and no client cached): %v", entity.UserId, refreshError) + } + } + } + newGitClient, err := client.NewGitlabOauthClient(p.gitlabConfiguration.BaseUrl, updatedToken, userId, p.tokenRevocationHandler, p.tokenExpirationHandler) + if err != nil { + return nil, fmt.Errorf("failed to init gitlab client: %v", err) + } + cl = &newGitClient + if updatedExpiresAt == nil || updatedExpiresAt.IsZero() { + p.gitlabClientUserCache.Store(userId, cl) + } else { + p.gitlabClientUserCache.StoreWithTTL(userId, cl, time.Until(*updatedExpiresAt)) + } + return newGitClient, nil + } + } + + if cl == nil { + newGitClient, err := client.NewGitlabOauthClient(p.gitlabConfiguration.BaseUrl, entity.AccessToken, userId, p.tokenRevocationHandler, p.tokenExpirationHandler) + if err != nil { + return nil, fmt.Errorf("failed to init gitlab client: %v", err) + } + cl = &newGitClient + if entity.ExpiresAt.IsZero() { + p.gitlabClientUserCache.Store(userId, cl) + } else { + p.gitlabClientUserCache.StoreWithTTL(userId, cl, time.Until(entity.ExpiresAt)) + } + } + return *cl, nil +} + +// GetUserClientWithNewKey In this case integration entity doesn't exist yet +func (p *gitClientProviderImpl) GetUserClientWithNewKey(integration view.GitIntegrationType, userId string, token string) (client.GitClient, error) { + if integration != view.GitlabIntegration { + return nil, fmt.Errorf("unsupported integration type: %s", integration) + } + + cl, err := client.NewGitlabOauthClient(p.gitlabConfiguration.BaseUrl, token, userId, p.tokenRevocationHandler, p.tokenExpirationHandler) + if err != nil { + return nil, err + } + p.gitlabClientUserCache.Store(userId, &cl) + return cl, nil +} + +func (p *gitClientProviderImpl) GetConfiguration(integration view.GitIntegrationType) (*client.GitClientConfiguration, error) { + switch integration { + case view.GitlabIntegration: + return &p.gitlabConfiguration, nil + default: + return nil, fmt.Errorf("unknown integration type: %s, unable to get configuration", integration) + } +} + +func (p *gitClientProviderImpl) UpdateUserCache(integration view.GitIntegrationType, userId string, expiresAt time.Time) error { + var err error + + entity, err := p.repo.GetUserApiKey(integration, userId) + if err != nil { + return fmt.Errorf("unable to get api key for user %s and integration %s: %v", userId, integration, err.Error()) + } + if entity == nil || entity.AccessToken == "" { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ApiKeyNotFound, + Message: exception.ApiKeyNotFoundMsg, + Params: map[string]interface{}{"user": userId, "integration": integration}, + } + } + + cl, err := client.NewGitlabOauthClient(p.gitlabConfiguration.BaseUrl, entity.AccessToken, userId, p.tokenRevocationHandler, p.tokenExpirationHandler) + if err != nil { + return err + } + p.gitlabClientUserCache.StoreWithTTL(userId, &cl, time.Until(expiresAt)) + return nil +} diff --git a/qubership-apihub-service/service/GitHookService.go b/qubership-apihub-service/service/GitHookService.go new file mode 100644 index 0000000..06d5297 --- /dev/null +++ b/qubership-apihub-service/service/GitHookService.go @@ -0,0 +1,412 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + goctx "context" + "fmt" + "net/http" + "strconv" + "strings" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + log "github.com/sirupsen/logrus" + "github.com/xanzy/go-gitlab" +) + +type GitHookService interface { + SetGitLabToken(ctx context.SecurityContext, projectId string, token string) error + HandleGitLabEvent(eventType gitlab.EventType, event interface{}, token string) ([]view.PublishV2Response, error) +} + +func NewGitHookService(projectRepo repository.PrjGrpIntRepository, branchService BranchService, buildService BuildService, userService UserService) GitHookService { + return &gitHookService{ + projectRepo: projectRepo, + branchService: branchService, + buildService: buildService, + userService: userService, + } +} + +type gitHookService struct { + projectRepo repository.PrjGrpIntRepository + branchService BranchService + buildService BuildService + userService UserService +} + +func (s gitHookService) SetGitLabToken(ctx context.SecurityContext, projectId string, token string) error { + existingPrj, err := s.projectRepo.GetById(projectId) + if err != nil { + return err + } + if existingPrj == nil { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ProjectNotFound, + Message: exception.ProjectNotFoundMsg, + Params: map[string]interface{}{"projectId": projectId}, + } + } + existingPrj.SecretToken = token + existingPrj.SecretTokenUserId = ctx.GetUserId() + _, err = s.projectRepo.Update(existingPrj) + return err +} + +func (s gitHookService) HandleGitLabEvent(eventType gitlab.EventType, event interface{}, token string) ([]view.PublishV2Response, error) { + switch e := event.(type) { + case *gitlab.PushEvent: + log.Debugf("Parsed push event: %v", *e) + wrappedEvent, err := s.newGitlabPushEventWrapper(e, token) + if err != nil { + return nil, err + } + return s.handleGitLabBranchUpdated(wrappedEvent) + case *gitlab.TagEvent: + log.Debugf("Parsed tag event: %v", *e) + wrappedEvent, err := s.newGitlabTagEventWrapper(e, token) + if err != nil { + return nil, err + } + return s.handleGitLabBranchUpdated(wrappedEvent) + default: + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.GitIntegrationUnsupportedHookEventType, + Message: exception.GitIntegrationUnsupportedHookEventTypeMsg, + Params: map[string]interface{}{ + "type": string(eventType), + }, + } + } +} + +func (s gitHookService) handleGitLabBranchUpdated(e gitlabEventWrapper) ([]view.PublishV2Response, error) { + projects, err := s.projectRepo.GetProjectsForIntegration(string(view.GitlabIntegration), strconv.Itoa(e.getGitlabProjectId()), e.getToken()) + if err != nil { + return nil, err + } + + result := []view.PublishV2Response{} + log.Debugf("Found %v projects", len(projects)) + for _, project := range projects { + userId := e.getUserId(project.SecretTokenUserId) + log.Debugf("Creating user context for project %q with userId %q", project.Id, userId) + usrCtx := context.CreateFromId(userId) + + goCtx := context.CreateContextWithSecurity(goctx.Background(), usrCtx) + goCtx = context.CreateContextWithStacktrace(goCtx, fmt.Sprintf("handleGitLabBranchUpdated(%s)", e.String())) + + log.Debugf("Getting branch details from git for project %+v, branch %s", project.Id, e.getBranch()) + branchDetails, err := s.branchService.GetBranchDetailsFromGitCommit(goCtx, project.Id, e.getBranch(), e.getCommitId()) + if err != nil { + return nil, err + } + if branchDetails == nil { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.ConfigNotFound, + Message: exception.ConfigNotFoundMsg, + Params: map[string]interface{}{"projectId": project.Id, "branch": e.getBranch()}, + } + } + log.Debugf("Branch details %+v", *branchDetails) + + if !e.shouldPublish(branchDetails) { + log.Debugf("Publish is not required") + continue + } + + log.Debugf("Getting version publish details from git for project %+v, branch %s", project.Id, e.getBranch()) + publishDetails, err := s.branchService.GetVersionPublishDetailsFromGitCommit(goCtx, project.Id, e.getBranch(), e.getCommitId()) + if err != nil { + return nil, err + } + if publishDetails != nil { + log.Debugf("Version publish details: %+v", *publishDetails) + } + + buildConfig, err := e.getBuildConfig(&project, branchDetails, publishDetails, e.getBranch(), userId) + if err != nil { + return nil, err + } + log.Debugf("Build Config created from branch details %+v", buildConfig) + src, err := s.branchService.GetAllZippedContentFromGitCommit(goCtx, branchDetails, project.Id, e.getBranch(), e.getCommitId()) + if err != nil { + return nil, err + } + resp, err := s.buildService.PublishVersion(usrCtx, *buildConfig, src, false, "", nil, false, false) + if err != nil { + return nil, err + } + log.Debugf("Publish response: %+v", *resp) + result = append(result, *resp) + } + return result, nil +} + +func (s gitHookService) getUserId(eventUser, eventEmail string) (string, error) { + users, err := s.userService.GetUsersByIds([]string{eventUser}) + if err != nil { + return "", err + } + if len(users) > 0 { + return users[0].Id, nil + } + user, err := s.userService.GetUserByEmail(eventEmail) + if err != nil { + return "", err + } + if user != nil { + return user.Id, nil + } + return "", nil +} + +func ContentsToBCFiles(branchContent []view.Content) []view.BCFile { + result := []view.BCFile{} + for i := range branchContent { + result = append(result, view.BCFile{ + FileId: branchContent[i].FileId, + Publish: &branchContent[i].Publish, + Labels: branchContent[i].Labels, + BlobId: branchContent[i].BlobId, + }) + } + return result +} + +func RefsToBCRefs(refs []view.Ref) []view.BCRef { + result := []view.BCRef{} + for _, r := range refs { + result = append(result, view.BCRef{ + RefId: r.RefPackageId, + Version: r.RefPackageVersion, + }) + } + return result +} + +type gitlabEventWrapper interface { + getToken() string + getGitlabProjectId() int + getBranch() string + getCommitId() string + getUserId(defaultUrerId string) string + shouldPublish(branchDetails *view.Branch) bool + getBuildConfig(project *entity.ProjectIntEntity, branchDetails *view.Branch, + versionPublishDetails *view.GitVersionPublish, branch, userId string) (*view.BuildConfig, error) + String() string +} + +type gitlabTagEventWrapper struct { + event *gitlab.TagEvent + token string + branch string + commitId string + userId string +} + +func (s *gitHookService) newGitlabTagEventWrapper(event *gitlab.TagEvent, token string) (gitlabEventWrapper, error) { + userId, err := s.getUserId(event.UserUsername, event.UserEmail) + if err != nil { + return nil, err + } + return &gitlabTagEventWrapper{ + event: event, + token: token, + branch: strings.TrimPrefix(event.Ref, "refs/tags/"), + commitId: event.After, + userId: userId, + }, nil +} + +func (w gitlabTagEventWrapper) getToken() string { + return w.token +} + +func (w gitlabTagEventWrapper) getGitlabProjectId() int { + return w.event.ProjectID +} + +func (w gitlabTagEventWrapper) getBranch() string { + return w.branch +} + +func (w gitlabTagEventWrapper) getCommitId() string { + return w.commitId +} + +func (w gitlabTagEventWrapper) getUserId(defaultUrerId string) string { + if w.userId == "" { + return defaultUrerId + } + return w.userId +} + +func (w gitlabTagEventWrapper) shouldPublish(branchDetails *view.Branch) bool { + return true +} + +func (w gitlabTagEventWrapper) getBuildConfig(project *entity.ProjectIntEntity, branchDetails *view.Branch, + versionPublishDetails *view.GitVersionPublish, branch, userId string) (*view.BuildConfig, error) { + return &view.BuildConfig{ + PackageId: project.PackageId, + Version: branch, + BuildType: view.BuildType, + Status: string(view.Draft), + CreatedBy: userId, + Refs: RefsToBCRefs(branchDetails.Refs), + Files: ContentsToBCFiles(branchDetails.Files), + Metadata: view.BuildConfigMetadata{ + BranchName: branch, + RepositoryUrl: project.RepositoryUrl, + }, + }, nil +} + +func (w gitlabTagEventWrapper) String() string { + return fmt.Sprintf("gitlabEventWrapper{Type: %s, Branch: %s, Commit: %s, UserId: %s}", gitlab.EventTypeTagPush, w.branch, w.commitId, w.userId) +} + +type gitlabPushEventWrapper struct { + event *gitlab.PushEvent + token string + branch string + commitId string + userId string + changedFiles map[string]struct{} +} + +func (s *gitHookService) newGitlabPushEventWrapper(event *gitlab.PushEvent, token string) (gitlabEventWrapper, error) { + userId, err := s.getUserId(event.UserUsername, event.UserEmail) + if err != nil { + return nil, err + } + result := &gitlabPushEventWrapper{ + event: event, + token: token, + branch: strings.TrimPrefix(event.Ref, "refs/heads/"), + commitId: event.After, + userId: userId, + } + result.calculateChangedFiles() + return result, nil +} + +func (w gitlabPushEventWrapper) getToken() string { + return w.token +} + +func (w gitlabPushEventWrapper) getGitlabProjectId() int { + return w.event.ProjectID +} + +func (w gitlabPushEventWrapper) getBranch() string { + return w.branch +} + +func (w gitlabPushEventWrapper) getCommitId() string { + return w.commitId +} + +func (w gitlabPushEventWrapper) getUserId(defaultUrerId string) string { + if w.userId == "" { + return defaultUrerId + } + return w.userId +} + +func (w gitlabPushEventWrapper) shouldPublish(branchDetails *view.Branch) bool { + if _, ok := w.changedFiles[getApihubConfigFileId(branchDetails.ProjectId)]; ok { + return true + } + if _, ok := w.changedFiles[getApihubVersionPublishFileId(branchDetails.ProjectId)]; ok { + return true + } + for _, file := range branchDetails.Files { + if _, ok := w.changedFiles[file.FileId]; ok { + return true + } + } + return false +} + +func (w *gitlabPushEventWrapper) calculateChangedFiles() { + var paths []string + w.changedFiles = map[string]struct{}{} + if w.event.TotalCommitsCount == 0 { + return + } + for _, commit := range w.event.Commits { + paths = append(paths, commit.Added...) + paths = append(paths, commit.Modified...) + paths = append(paths, commit.Removed...) + } + for _, path := range paths { + if _, ok := w.changedFiles[path]; !ok { + w.changedFiles[path] = struct{}{} + } + } +} + +func (w gitlabPushEventWrapper) getBuildConfig(project *entity.ProjectIntEntity, branchDetails *view.Branch, + versionPublishDetails *view.GitVersionPublish, branch, userId string) (*view.BuildConfig, error) { + + packageId := project.PackageId + version := branch + previousVersion := "" + previousVersionPackageId := "" + status := string(view.Draft) + if versionPublishDetails != nil { + if versionPublishDetails.PackageId != packageId { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.GitVersionPublishFileInvalid, + Message: exception.GitVersionPublishFileInvalidMsg, + Params: map[string]interface{}{"projectId": project.Id, "branch": branch}, + Debug: "packageId doesn't match with packageId on project", + } + } + version = versionPublishDetails.Version + previousVersion = versionPublishDetails.PreviousVersion + previousVersionPackageId = versionPublishDetails.PreviousVersionPackageId + status = versionPublishDetails.Status + } + return &view.BuildConfig{ + PackageId: packageId, + Version: version, + PreviousVersion: previousVersion, + PreviousVersionPackageId: previousVersionPackageId, + BuildType: view.BuildType, + Status: status, + CreatedBy: userId, + Refs: RefsToBCRefs(branchDetails.Refs), + Files: ContentsToBCFiles(branchDetails.Files), + Metadata: view.BuildConfigMetadata{ + BranchName: branch, + RepositoryUrl: project.RepositoryUrl, + }, + }, nil +} + +func (w gitlabPushEventWrapper) String() string { + return fmt.Sprintf("gitlabEventWrapper{Type: %s, Branch: %s, Commit: %s, UserId: %s}", gitlab.EventTypePush, w.branch, w.commitId, w.userId) +} diff --git a/qubership-apihub-service/service/GitRepoFilesService.go b/qubership-apihub-service/service/GitRepoFilesService.go new file mode 100644 index 0000000..6509604 --- /dev/null +++ b/qubership-apihub-service/service/GitRepoFilesService.go @@ -0,0 +1,115 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + goctx "context" + "fmt" + "net/http" + "strings" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type GitRepoFilesService interface { + ListFiles(ctx context.SecurityContext, projectId string, branchName string, path string, pagingParams view.PagingParams, onlyAddable bool) ([]view.FileNode, error) +} + +func NewProjectFilesService(gitClientProvider GitClientProvider, repo repository.PrjGrpIntRepository, branchService BranchService) GitRepoFilesService { + configFolder := ApiHubBaseConfigPath + configFolder = strings.TrimPrefix(configFolder, "/") + configFolder = strings.TrimSuffix(configFolder, "/") + return &projectFilesServiceImpl{gitClientProvider: gitClientProvider, repo: repo, branchService: branchService, configFolder: configFolder} +} + +type projectFilesServiceImpl struct { + gitClientProvider GitClientProvider + repo repository.PrjGrpIntRepository + branchService BranchService + configFolder string +} + +func (p projectFilesServiceImpl) ListFiles(ctx context.SecurityContext, projectId string, branchName string, path string, pagingParams view.PagingParams, onlyAddable bool) ([]view.FileNode, error) { + goCtx := context.CreateContextWithSecurity(goctx.Background(), ctx) // TODO: should be context from the request + goCtx = context.CreateContextWithStacktrace(goCtx, fmt.Sprintf("ListFiles(%s,%s,%s,%+v,%t)", projectId, branchName, path, pagingParams, onlyAddable)) + + project, err := p.repo.GetById(projectId) + if err != nil { + return nil, err + } + if project == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ProjectNotFound, + Message: exception.ProjectNotFoundMsg, + Params: map[string]interface{}{"projectId": projectId}, + } + } + + it, err := view.GitIntegrationTypeFromStr(project.IntegrationType) + if err != nil { + return nil, err + } + + gitClient, err := p.gitClientProvider.GetUserClient(it, ctx.GetUserId()) + if err != nil { + return nil, fmt.Errorf("failed to get git client: %v", err) + } + + existingFiles := map[string]bool{} + existingFolders := []string{} + if onlyAddable { + branch, err := p.branchService.GetBranchDetailsEP(goCtx, projectId, branchName, false) + if err != nil { + return nil, err + } + processedPath := strings.TrimPrefix(path, "/") + processedPath = strings.TrimSuffix(processedPath, "/") + + if branch != nil && len(branch.Files) != 0 { + for _, bFile := range branch.Files { + if bFile.IsFolder && strings.HasPrefix(bFile.FileId, processedPath) { + existingFolders = append(existingFolders, bFile.FileId) + continue + } + if bFile.Status != view.StatusExcluded && bFile.Status != view.StatusDeleted && strings.HasPrefix(bFile.FileId, processedPath) { + existingFiles[bFile.FileId] = true + } + } + } + } + + files, err := gitClient.ListDirectory(goCtx, project.RepositoryId, branchName, path, pagingParams, existingFiles, existingFolders) + if err != nil { + return nil, err + } + + configFolderIndex := -1 + for index, file := range files { + if file.Name == p.configFolder { + configFolderIndex = index + break + } + } + if configFolderIndex != -1 { + files = append(files[:configFolderIndex], files[configFolderIndex+1:]...) + } + + return files, nil +} diff --git a/qubership-apihub-service/service/GroupService.go b/qubership-apihub-service/service/GroupService.go new file mode 100644 index 0000000..5dad182 --- /dev/null +++ b/qubership-apihub-service/service/GroupService.go @@ -0,0 +1,259 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "net/http" + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type GroupService interface { + AddGroup(ctx context.SecurityContext, group *view.Group) (*view.Group, error) + GetAllGroups(ctx context.SecurityContext, depth int, id string, name string, onlyFavorite bool) (*view.Groups, error) + GetGroup(id string) (*view.Group, error) + GetGroupInfo(ctx context.SecurityContext, id string) (*view.GroupInfo, error) + FavorGroup(ctx context.SecurityContext, id string) error + DisfavorGroup(ctx context.SecurityContext, id string) error +} + +func NewGroupService(repo repository.PrjGrpIntRepository, projectService ProjectService, favoritesRepo repository.FavoritesRepository, publishedRepo repository.PublishedRepository, userRepo repository.UserRepository) GroupService { + return &groupServiceImpl{ + repo: repo, + projectService: projectService, + favoritesRepo: favoritesRepo, + publishedRepo: publishedRepo, + userRepo: userRepo, + } +} + +type groupServiceImpl struct { + repo repository.PrjGrpIntRepository + projectService ProjectService + favoritesRepo repository.FavoritesRepository + publishedRepo repository.PublishedRepository + userRepo repository.UserRepository +} + +func (g groupServiceImpl) AddGroup(ctx context.SecurityContext, group *view.Group) (*view.Group, error) { + if group.ParentId != "" { + existingEnt, err := g.publishedRepo.GetPackageGroup(group.ParentId) + if err != nil { + return nil, err + } + if existingEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ParentGroupNotFound, + Message: exception.ParentGroupNotFoundMsg, + Params: map[string]interface{}{"parentId": group.ParentId}, + } + } + group.Id = group.ParentId + "." + group.Alias + } else { + group.Id = group.Alias + } + exGrp, err := g.publishedRepo.GetPackageIncludingDeleted(group.Id) + if err != nil { + return nil, err + } + + if exGrp != nil { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.AliasAlreadyTaken, + Message: exception.AliasAlreadyTakenMsg, + Params: map[string]interface{}{"alias": group.Alias}, + } + } + packageIdReserved, err := g.userRepo.PrivatePackageIdExists(group.Id) + if err != nil { + return nil, err + } + if packageIdReserved { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.AliasAlreadyTaken, + Message: exception.AliasAlreadyTakenMsg, + Params: map[string]interface{}{"alias": group.Alias}, + } + } + + group.CreatedAt = time.Now() + group.CreatedBy = ctx.GetUserId() + err = g.publishedRepo.CreatePackage(entity.MakePackageGroupEntity(group)) + if err != nil { + return nil, err + } + return group, err +} + +func (g groupServiceImpl) GetAllGroups(ctx context.SecurityContext, depth int, id string, name string, onlyFavorite bool) (*view.Groups, error) { + var entities []entity.PackageFavEntity + var err error + if depth == 0 { + entities, err = g.publishedRepo.GetAllPackageGroups(name, onlyFavorite, ctx.GetUserId()) + if err != nil { + return nil, err + } + } else if depth == 1 { + entities, err = g.publishedRepo.GetChildPackageGroups(id, name, onlyFavorite, ctx.GetUserId()) + if err != nil { + return nil, err + } + } else { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectDepthForRootGroups, + Message: exception.IncorrectDepthForRootGroupsMsg, + Params: map[string]interface{}{"depth": depth}, + } + } + var ids []string + for _, ent := range entities { + ids = append(ids, ent.Id) + } + lastVersions, err := g.publishedRepo.GetLastVersions(ids) + if err != nil { + return nil, err + } + if len(lastVersions) > 0 { + versionsMap := map[string]string{} + for _, version := range lastVersions { + versionsMap[version.PackageId] = version.Version + } + for i, ent := range entities { + val, exists := versionsMap[ent.Id] + if exists { + entities[i].LastVersion = val + } + } + } + + result := view.Groups{Groups: []view.Group{}} + for _, ent := range entities { + //do not add starting group + if ent.Id == id { + continue + } + result.Groups = append(result.Groups, *entity.MakePackageGroupFavView(&ent)) + } + + return &result, nil +} + +func (g groupServiceImpl) GetGroup(id string) (*view.Group, error) { + ent, err := g.publishedRepo.GetPackageGroup(id) + if err != nil { + return nil, err + } + if ent == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.GroupNotFound, + Message: exception.GroupNotFoundMsg, + Params: map[string]interface{}{"id": id}, + } + } + return entity.MakePackageGroupView(ent), err +} + +func (g groupServiceImpl) GetGroupInfo(ctx context.SecurityContext, id string) (*view.GroupInfo, error) { + ent, err := g.publishedRepo.GetPackageGroup(id) + if err != nil { + return nil, err + } + if ent == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.GroupNotFound, + Message: exception.GroupNotFoundMsg, + Params: map[string]interface{}{"id": id}, + } + } + parents, err := g.publishedRepo.GetParentPackageGroups(id) + if err != nil { + return nil, err + } + var parentGroups []view.Group + for _, grp := range parents { + if grp.Id != id { + parentGroups = append(parentGroups, *entity.MakePackageGroupView(&grp)) + } + } + isFavorite, err := g.favoritesRepo.IsFavoritePackage(ctx.GetUserId(), id) + if err != nil { + return nil, err + } + lastVersion, err := g.publishedRepo.GetLastVersion(id) + if err != nil { + return nil, err + } + if lastVersion != nil { + ent.LastVersion = lastVersion.Version + } + + return entity.MakePackageGroupInfoView(ent, parentGroups, isFavorite), nil +} +func (g groupServiceImpl) FavorGroup(ctx context.SecurityContext, id string) error { + userId := ctx.GetUserId() + + favorite, err := g.favoritesRepo.IsFavoritePackage(userId, id) + if err != nil { + return err + } + + if favorite { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.AlreadyFavored, + Message: exception.AlreadyFavoredMsg, + Params: map[string]interface{}{"id": id, "user": userId}, + } + } + err = g.favoritesRepo.AddPackageToFavorites(userId, id) + if err != nil { + return err + } + return nil +} + +func (g groupServiceImpl) DisfavorGroup(ctx context.SecurityContext, id string) error { + userId := ctx.GetUserId() + favorite, err := g.favoritesRepo.IsFavoritePackage(userId, id) + if err != nil { + return err + } + if !favorite { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.NotFavored, + Message: exception.NotFavoredMsg, + Params: map[string]interface{}{"id": id, "user": userId}, + } + } + err = g.favoritesRepo.RemovePackageFromFavorites(userId, id) + if err != nil { + return err + } + return nil +} diff --git a/qubership-apihub-service/service/IntegrationsService.go b/qubership-apihub-service/service/IntegrationsService.go new file mode 100644 index 0000000..ef4de95 --- /dev/null +++ b/qubership-apihub-service/service/IntegrationsService.go @@ -0,0 +1,150 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + goctx "context" + "fmt" + "net/http" + "sort" + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/client" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type IntegrationsService interface { + GetUserApiKeyStatus(integration view.GitIntegrationType, userId string) (view.ApiKeyStatus, error) + SetUserApiKey(integration view.GitIntegrationType, userId string, apiKey string) error + DeleteUserApiKey(integration view.GitIntegrationType, userId string) error + SetOauthGitlabTokenForUser(integration view.GitIntegrationType, userId string, oauthToken string, refreshToken string, expiresAt time.Time, redirectUri string) error + ListRepositories(ctx context.SecurityContext, integration view.GitIntegrationType, search string) ([]view.GitRepository, []view.GitGroup, error) + ListBranchesAndTags(ctx context.SecurityContext, integration view.GitIntegrationType, repoId string, filter string) (*view.GitBranches, error) +} + +const ApiKeyStatusPresent = "API_KEY_PRESENT" +const ApiKeyStatusAbsent = "API_KEY_ABSENT" +const ApiKeyStatusRevoked = "API_KEY_REVOKED" + +func NewIntegrationsService(repo repository.GitIntegrationRepository, gitClientProvider GitClientProvider) IntegrationsService { + return &integrationsServiceImpl{repo: repo, gitClientProvider: gitClientProvider} +} + +type integrationsServiceImpl struct { + repo repository.GitIntegrationRepository + gitClientProvider GitClientProvider +} + +func (s integrationsServiceImpl) ListRepositories(ctx context.SecurityContext, integration view.GitIntegrationType, search string) ([]view.GitRepository, []view.GitGroup, error) { + goCtx := context.CreateContextWithSecurity(goctx.Background(), ctx) // TODO: should be context from the request + goCtx = context.CreateContextWithStacktrace(goCtx, fmt.Sprintf("ListRepositories(%+v,%s)", integration, search)) + + client, err := s.gitClientProvider.GetUserClient(integration, ctx.GetUserId()) + if err != nil { + return nil, nil, err + } + return client.SearchRepositories(goCtx, search, 15) +} + +func (s integrationsServiceImpl) ListBranchesAndTags(ctx context.SecurityContext, integration view.GitIntegrationType, repoId string, filter string) (*view.GitBranches, error) { + goCtx := context.CreateContextWithSecurity(goctx.Background(), ctx) // TODO: should be context from the request + goCtx = context.CreateContextWithStacktrace(goCtx, fmt.Sprintf("ListBranchesAndTags(%+v,%s,%s)", integration, repoId, filter)) + + gitClient, err := s.gitClientProvider.GetUserClient(integration, ctx.GetUserId()) + if err != nil { + return nil, err + } + branchNames, _, err := gitClient.GetRepoBranches(goCtx, repoId, filter, -1) + if err != nil { + return nil, err + } + branches := make([]view.GitBranch, 0) + for _, name := range branchNames { + branches = append(branches, view.GitBranch{Name: name}) + } + tags, err := gitClient.GetRepoTags(goCtx, repoId, filter, -1) + for _, name := range tags { + branches = append(branches, view.GitBranch{Name: name}) + } + sort.Slice(branches, func(i, j int) bool { + return branches[i].Name < branches[j].Name + }) + return &view.GitBranches{Branches: branches}, nil +} + +func (s integrationsServiceImpl) SetOauthGitlabTokenForUser(integration view.GitIntegrationType, userId string, oauthToken string, refreshToken string, expiresAt time.Time, redirectUri string) error { + conf, err := s.gitClientProvider.GetConfiguration(integration) + if err != nil { + return err + } + + // probe test for new oauth token + _, err = client.NewGitlabOauthClient(conf.BaseUrl, oauthToken, userId, &client.TokenRevocationHandlerStub{}, &client.TokenExpirationHandlerStub{}) + if err != nil { + return fmt.Errorf("retrieved project access token is not functional. Error - %s", err.Error()) + } + + _, err = s.repo.SaveUserApiKey(entity.ApiKeyEntity{ + Integration: integration, + UserId: userId, + AccessToken: oauthToken, + RefreshToken: refreshToken, + ExpiresAt: expiresAt, + RedirectUri: redirectUri, + FailedRefreshAttempts: 0, + }) + if err != nil { + return err + } + return s.gitClientProvider.UpdateUserCache(integration, userId, expiresAt) +} + +func (s integrationsServiceImpl) GetUserApiKeyStatus(integration view.GitIntegrationType, userId string) (view.ApiKeyStatus, error) { + entity, err := s.repo.GetUserApiKey(integration, userId) + if err != nil { + return view.ApiKeyStatus{}, err + } + if entity != nil && entity.AccessToken != "" { + if entity.IsRevoked { + return view.ApiKeyStatus{Status: ApiKeyStatusRevoked}, nil + } + return view.ApiKeyStatus{Status: ApiKeyStatusPresent}, nil + } else { + return view.ApiKeyStatus{Status: ApiKeyStatusAbsent}, nil + } +} + +func (s integrationsServiceImpl) SetUserApiKey(integration view.GitIntegrationType, userId string, apiKey string) error { + _, err := s.gitClientProvider.GetUserClientWithNewKey(integration, userId, apiKey) + if err != nil { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.GitIntegrationConnectFailed, + Message: exception.GitIntegrationConnectFailedMsg, + Params: map[string]interface{}{"type": integration, "user": userId}, + Debug: err.Error(), + } + } + _, err = s.repo.SaveUserApiKey(entity.ApiKeyEntity{Integration: integration, UserId: userId, AccessToken: apiKey}) + return err +} + +func (s integrationsServiceImpl) DeleteUserApiKey(integration view.GitIntegrationType, userId string) error { + return s.repo.DeleteUserApiKey(integration, userId) +} diff --git a/qubership-apihub-service/service/InternalWebsocketService.go b/qubership-apihub-service/service/InternalWebsocketService.go new file mode 100644 index 0000000..4c9c521 --- /dev/null +++ b/qubership-apihub-service/service/InternalWebsocketService.go @@ -0,0 +1,234 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "encoding/json" + "fmt" + "sync" + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/cache" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/buraksezer/olric" + "github.com/gorilla/websocket" + "github.com/shaj13/libcache" + log "github.com/sirupsen/logrus" +) + +type InternalWebsocketService interface { + GetBranchSessionLogs(projectId string, branchName string) []interface{} + GetFileSessionLogs(projectId string, branchName string, fileId string) []interface{} + LogIncomingBranchMessages(host string, projectId string, branchName string, token string) + LogIncomingFileMessages(host string, projectId string, branchName string, fileId string, token string) + SendMessageToFileWebsocket(host string, projectId string, branchName string, fileId string, token string, message interface{}) +} + +func NewInternalWebsocketService(wsLoadBalancer WsLoadBalancer, op cache.OlricProvider) InternalWebsocketService { + logsCache := libcache.FIFO.New(0) + logsCache.SetTTL(time.Hour) + is := &internalWebsocketServiceImpl{ + logsCache: logsCache, + wsLoadBalancer: wsLoadBalancer, + op: op, + isReadyWg: sync.WaitGroup{}, + } + is.isReadyWg.Add(1) + utils.SafeAsync(func() { + is.initDTopic() + }) + return is +} + +type internalWebsocketServiceImpl struct { + logsCache libcache.Cache + wsLoadBalancer WsLoadBalancer + op cache.OlricProvider + isReadyWg sync.WaitGroup + olricC *olric.Olric + wsLogMessages map[string][]interface{} + wsLogMutex sync.RWMutex + wsLogTopic *olric.DTopic +} + +type wsMessage struct { + SessionId string + Data interface{} +} + +func (l *internalWebsocketServiceImpl) initDTopic() { + var err error + l.olricC = l.op.Get() + topicName := "ws-log-messages" + l.wsLogTopic, err = l.olricC.NewDTopic(topicName, 10000, 1) + if err != nil { + log.Errorf("Failed to create DTopic: %s", err.Error()) + } + l.wsLogMessages = make(map[string][]interface{}) + l.wsLogTopic.AddListener(func(topic olric.DTopicMessage) { + // lock the mutex to prevent concurrent access to the logs map + l.wsLogMutex.Lock() + defer l.wsLogMutex.Unlock() + + var wsMsg wsMessage + if topic.Message != nil { + err := json.Unmarshal(topic.Message.([]byte), &wsMsg) + if err != nil { + log.Errorf("Error while deserializing the ws log message : %s", err) + return + } + } + messages, ok := l.wsLogMessages[wsMsg.SessionId] + if !ok { + messages = make([]interface{}, 0) + } + messages = append(messages, wsMsg.Data) + l.wsLogMessages[wsMsg.SessionId] = messages + }) + + l.isReadyWg.Done() +} + +func (l *internalWebsocketServiceImpl) GetBranchSessionLogs(projectId string, branchName string) []interface{} { + return l.getSessionLogs(makeBranchEditSessionId(projectId, branchName)) +} + +func (l *internalWebsocketServiceImpl) GetFileSessionLogs(projectId string, branchName string, fileId string) []interface{} { + return l.getSessionLogs(makeFileEditSessionId(projectId, branchName, fileId)) +} + +func (l *internalWebsocketServiceImpl) LogIncomingBranchMessages(host string, projectId string, branchName string, token string) { + + srv, err := l.wsLoadBalancer.SelectWsServer(projectId, branchName, "") + if err != nil { + log.Errorf("Failed to select ws server: %s", err.Error()) + return + } + var wsUrl string + if srv != LocalServer { + wsUrl = "ws://" + srv + fmt.Sprintf(":8080/ws/v1/projects/%s/branches/%s?token=%s", projectId, branchName, token) + } else { + wsUrl = "ws://" + fmt.Sprintf("localhost:8080/ws/v1/projects/%s/branches/%s?token=%s", projectId, branchName, token) + } + + sessionId := makeBranchEditSessionId(projectId, branchName) + utils.SafeAsync(func() { + l.logIncommingMessages(wsUrl, sessionId) + }) +} + +func (l *internalWebsocketServiceImpl) LogIncomingFileMessages(host string, projectId string, branchName string, fileId string, token string) { + srv, err := l.wsLoadBalancer.SelectWsServer(projectId, branchName, "") + if err != nil { + log.Errorf("Failed to select ws server: %s", err.Error()) + return + } + var wsUrl string + if srv != LocalServer { + wsUrl = "ws://" + srv + fmt.Sprintf(":8080/ws/v1/projects/%s/branches/%s/files/%s?token=%s", projectId, branchName, fileId, token) + } else { + wsUrl = "ws://" + fmt.Sprintf("localhost:8080/ws/v1/projects/%s/branches/%s/files/%s?token=%s", projectId, branchName, fileId, token) + } + + sessionId := makeFileEditSessionId(projectId, branchName, fileId) + utils.SafeAsync(func() { + l.logIncommingMessages(wsUrl, sessionId) + }) +} + +func (l *internalWebsocketServiceImpl) SendMessageToFileWebsocket(host string, projectId string, branchName string, fileId string, token string, message interface{}) { + srv, err := l.wsLoadBalancer.SelectWsServer(projectId, branchName, "") + if err != nil { + log.Errorf("Failed to select ws server: %s", err.Error()) + return + } + var wsUrl string + if srv != LocalServer { + wsUrl = "ws://" + srv + fmt.Sprintf(":8080/ws/v1/projects/%s/branches/%s/files/%s?token=%s", projectId, branchName, fileId, token) + } else { + wsUrl = "ws://" + host + fmt.Sprintf("/ws/v1/projects/%s/branches/%s/files/%s?token=%s", projectId, branchName, fileId, token) + } + + utils.SafeAsync(func() { + l.sendMessageToWebsocket(wsUrl, message) + }) +} + +func (l *internalWebsocketServiceImpl) storeSessionLogs(sessionId string, message interface{}) { + var data wsMessage + if message != nil { + data = wsMessage{SessionId: sessionId, + Data: message, + } + serializedMessage, err := json.Marshal(data) + if err != nil { + log.Errorf("Error while serializing the ws log message : %s", err) + return + } + err = l.wsLogTopic.Publish(serializedMessage) + if err != nil { + log.Errorf("Error while publishing the ws log message : %s", err) + } + } +} + +func (l *internalWebsocketServiceImpl) getSessionLogs(sessionId string) []interface{} { + messages, ok := l.wsLogMessages[sessionId] + if !ok { + log.Infof("ws log message key not found for sessionId: %s", sessionId) + return make([]interface{}, 0) + } + return messages +} + +func (l *internalWebsocketServiceImpl) logIncommingMessages(wsUrl string, sessionId string) { + ws, _, err := websocket.DefaultDialer.Dial(wsUrl, nil) + if err != nil { + log.Errorf("Failed to connect to internal ws : %v , %s ", err.Error(), wsUrl) + return + } + defer ws.Close() + + ws.SetReadDeadline(time.Now().Add(time.Hour)) + for { + _, message, err := ws.ReadMessage() + if err != nil { + log.Debugf("Stop reading from internal ws: %v", err.Error()) + break + } + var jsonMessage interface{} + err = json.Unmarshal(message, &jsonMessage) + if err != nil { + log.Debugf("Failed to decode message from internal ws: %v", err.Error()) + continue + } + l.storeSessionLogs(sessionId, jsonMessage) + } +} + +func (l *internalWebsocketServiceImpl) sendMessageToWebsocket(wsUrl string, message interface{}) { + ws, _, err := websocket.DefaultDialer.Dial(wsUrl, nil) + if err != nil { + log.Errorf("Failed to connect to internal ws: %v , %s ", err.Error(), wsUrl) + return + } + defer ws.Close() + + if err := ws.WriteJSON(message); err != nil { + log.Debugf("Failed to send message to internal ws: %v", err.Error()) + return + } + time.Sleep(time.Minute * 5) //wait for server to respond +} diff --git a/qubership-apihub-service/service/LogsService.go b/qubership-apihub-service/service/LogsService.go new file mode 100644 index 0000000..e9832e5 --- /dev/null +++ b/qubership-apihub-service/service/LogsService.go @@ -0,0 +1,41 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "fmt" + "strings" + + log "github.com/sirupsen/logrus" +) + +type LogsService interface { + StoreLogs(obj map[string]interface{}) +} + +func NewLogsService() LogsService { + return &logsServiceImpl{} +} + +type logsServiceImpl struct { +} + +func (l logsServiceImpl) StoreLogs(obj map[string]interface{}) { + fields := make([]string, 0) + for key, value := range obj { + fields = append(fields, fmt.Sprintf("%v: %v", key, value)) + } + log.Error(strings.Join(fields, ", ")) //todo maybe log.Info? +} diff --git a/qubership-apihub-service/service/MetricsService.go b/qubership-apihub-service/service/MetricsService.go new file mode 100644 index 0000000..d6a4471 --- /dev/null +++ b/qubership-apihub-service/service/MetricsService.go @@ -0,0 +1,78 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/db" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + "github.com/robfig/cron/v3" + log "github.com/sirupsen/logrus" +) + +type MetricsService interface { + CreateJob(schedule string) error +} + +func NewMetricsService(metricsRepository repository.MetricsRepository) MetricsService { + return &metricsServiceImpl{ + metricsRepository: metricsRepository, + cron: cron.New(), + } +} + +type metricsServiceImpl struct { + metricsRepository repository.MetricsRepository + connectionProvider db.ConnectionProvider + cron *cron.Cron +} + +func (c *metricsServiceImpl) CreateJob(schedule string) error { + job := MetricsGetterJob{ + schedule: schedule, + metricsRepository: c.metricsRepository, + } + + if len(c.cron.Entries()) == 0 { + location, err := time.LoadLocation("") + if err != nil { + return err + } + c.cron = cron.New(cron.WithLocation(location)) + c.cron.Start() + } + + _, err := c.cron.AddJob(schedule, &job) + if err != nil { + log.Warnf("[Metrics service] Job wasn't added for schedule - %s. With error - %s", schedule, err) + return err + } + log.Infof("[Metrics service] Job was created with schedule - %s", schedule) + + return nil +} + +type MetricsGetterJob struct { + schedule string + metricsRepository repository.MetricsRepository +} + +func (j MetricsGetterJob) Run() { + err := j.metricsRepository.StartGetMetricsProcess() + if err != nil { + log.Errorf("[MetricsGetterJob-Run] err - %s", err.Error()) + } +} diff --git a/qubership-apihub-service/service/MinioStorageService.go b/qubership-apihub-service/service/MinioStorageService.go new file mode 100644 index 0000000..fe1b91c --- /dev/null +++ b/qubership-apihub-service/service/MinioStorageService.go @@ -0,0 +1,400 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "bytes" + "context" + "crypto/x509" + "encoding/base64" + "errors" + "fmt" + "io" + "os" + "strings" + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + "github.com/minio/minio-go/v7" + "github.com/minio/minio-go/v7/pkg/credentials" + log "github.com/sirupsen/logrus" +) + +type MinioStorageService interface { + UploadFilesToBucket() error + GetFile(ctx context.Context, tableName, entityId string) ([]byte, error) + UploadFile(ctx context.Context, tableName, entityId string, content []byte) error + RemoveFile(ctx context.Context, tableName, entityId string) error + RemoveFiles(ctx context.Context, tableName string, entityIds []string) error + DownloadFilesFromBucketToDatabase() error +} + +func NewMinioStorageService(buildRepository repository.BuildResultRepository, publishRepo repository.PublishedRepository, creds *view.MinioStorageCreds) MinioStorageService { + return &minioStorageServiceImpl{ + buildRepository: buildRepository, + minioClient: createMinioClient(creds), + publishRepo: publishRepo, + creds: creds, + } +} + +type minioStorageServiceImpl struct { + buildRepository repository.BuildResultRepository + minioClient *minioClient + publishRepo repository.PublishedRepository + creds *view.MinioStorageCreds +} + +type minioClient struct { + client *minio.Client + error error +} + +// todo add more logs for ex - [15 / 100] entities were stored to database.... +func (m minioStorageServiceImpl) DownloadFilesFromBucketToDatabase() error { + ctx := context.Background() + buildResultFileKeys := make([]string, 0) + publishedSourceArchiveFileKeys := make([]string, 0) + foldersChan := m.minioClient.client.ListObjects(ctx, m.creds.BucketName, minio.ListObjectsOptions{}) + for folder := range foldersChan { + objectsChan := m.minioClient.client.ListObjects(ctx, m.creds.BucketName, minio.ListObjectsOptions{Prefix: folder.Key}) + switch folder.Key { + case fmt.Sprintf("%s/", view.BUILD_RESULT_TABLE): + for buildResult := range objectsChan { + buildResultFileKeys = append(buildResultFileKeys, buildResult.Key) + } + case fmt.Sprintf("%s/", view.PUBLISHED_SOURCES_ARCHIVES_TABLE): + for publishedSourceArchive := range objectsChan { + publishedSourceArchiveFileKeys = append(publishedSourceArchiveFileKeys, publishedSourceArchive.Key) + } + } + } + + log.Infof("MINIO. %d files were found", len(buildResultFileKeys)+len(publishedSourceArchiveFileKeys)) + + if len(buildResultFileKeys) > 0 { + utils.SafeAsync(func() { + entitiesCount := 0 + for _, key := range buildResultFileKeys { + buildId := getEntityId(fmt.Sprintf("%s/", view.BUILD_RESULT_TABLE), key) + if buildId == "" { + log.Errorf("unsupported file key format. folder - '%s', file - '%s'", fmt.Sprintf("%s/", view.BUILD_RESULT_TABLE), key) + continue + } + data, err := m.getFile(ctx, key) + if err != nil { + log.Errorf("failed to get file from minio by key -%s. Error - %s", key, err.Error()) + continue + } + err = m.buildRepository.StoreBuildResult(entity.BuildResultEntity{BuildId: buildId, Data: data}) + if err != nil { + log.Infof("%d build_result entities were stored from minio to database", entitiesCount) + log.Errorf("StoreBuildResults() produce error -%s", err.Error()) + return + } + entitiesCount++ + } + log.Infof("%d build_result entities were stored from minio to database", entitiesCount) + }) + } + + if len(publishedSourceArchiveFileKeys) > 0 { + utils.SafeAsync(func() { + entitiesCount := 0 + for _, key := range publishedSourceArchiveFileKeys { + checksum := getEntityId(fmt.Sprintf("%s/", view.PUBLISHED_SOURCES_ARCHIVES_TABLE), key) + if checksum == "" { + log.Errorf("unsupported file key format. folder - '%s', file - '%s'", fmt.Sprintf("%s/", view.PUBLISHED_SOURCES_ARCHIVES_TABLE), key) + continue + } + data, err := m.getFile(ctx, key) + if err != nil { + log.Errorf("failed to get file from minio by key -%s. Error - %s", key, err.Error()) + continue + } + err = m.publishRepo.SavePublishedSourcesArchive(&entity.PublishedSrcArchiveEntity{Checksum: checksum, Data: data}) + if err != nil { + log.Infof("%d published_sources_archives entities were stored from minio to database", entitiesCount) + log.Infof("SavePublishedSourcesArchives() produce error -%s", err.Error()) + return + } + entitiesCount++ + } + log.Infof("%d published_sources_archives entities were stored from minio to database", entitiesCount) + }) + } + + return nil +} + +func (m minioStorageServiceImpl) UploadFilesToBucket() error { + ctx := context.Background() + err := m.createBucketIfNotExists(ctx) + if err != nil { + return err + } + + log.Info("Uploading files to MINIO") + utils.SafeAsync(func() { + uploadedIds, err := m.uploadBuildResults(ctx) + if err != nil { + log.Errorf("uploadBuildResults produces an error - %s", err.Error()) + } + log.Info("Build results were uploaded to MINIO") + + if len(uploadedIds) > 0 { + err = m.buildRepository.DeleteBuildResults(uploadedIds) + if err != nil { + log.Errorf("DeleteBuildResults produces an error - %s", err.Error()) + } + log.Info("Build results were deleted from database") + } + }) + if !m.creds.IsOnlyForBuildResult { + utils.SafeAsync(func() { + uploadedChecksums, err := m.uploadPublishedSourcesArchives(ctx) + if err != nil { + log.Errorf("uploadPublishedSourcesArchives produces an error - %s", err.Error()) + } + log.Info("Published source archives were uploaded to MINIO") + + if len(uploadedChecksums) > 0 { + err = m.publishRepo.DeletePublishedSourcesArchives(uploadedChecksums) + if err != nil { + log.Errorf("DeletePublishedSourcesArchives produces an error - %s", err.Error()) + } + log.Info("Published source archives were deleted from database") + } + }) + } + + return nil +} + +func (m minioStorageServiceImpl) createBucketIfNotExists(ctx context.Context) error { + exists, err := bucketExists(ctx, m.minioClient.client, m.creds.BucketName) + if err != nil { + return err + } + if exists { + log.Infof(fmt.Sprintf("Minio bucket - %s exists", m.creds.BucketName)) + } else { + err = m.minioClient.client.MakeBucket(ctx, m.creds.BucketName, minio.MakeBucketOptions{}) + if err != nil { + return err + } + exists, err = bucketExists(ctx, m.minioClient.client, m.creds.BucketName) + if err != nil { + return err + } + if exists { + log.Infof(fmt.Sprintf("Minio bucket - %s was created", m.creds.BucketName)) + } + } + return nil +} + +func createMinioClient(creds *view.MinioStorageCreds) *minioClient { + client := new(minioClient) + var err error + tr, err := minio.DefaultTransport(true) + if err != nil { + log.Warnf("error creating the minio connection: error creating the default transport layer: %v", err) + client.error = err + return client + } + crt, err := os.CreateTemp("", "minio.cert") + if err != nil { + log.Warn(err.Error()) + client.error = err + return client + } + decodeSamlCert, err := base64.StdEncoding.DecodeString(creds.Crt) + if err != nil { + log.Warn(err.Error()) + client.error = err + return client + } + + _, err = crt.WriteString(string(decodeSamlCert)) + rootCAs := mustGetSystemCertPool() + data, err := os.ReadFile(crt.Name()) + if err == nil { + rootCAs.AppendCertsFromPEM(data) + } + tr.TLSClientConfig.RootCAs = rootCAs + + minioClient, err := minio.New(creds.Endpoint, &minio.Options{ + Creds: credentials.NewStaticV4(creds.AccessKeyId, creds.SecretAccessKey, ""), + Secure: true, + Transport: tr, + }) + if err != nil { + if strings.Contains(err.Error(), "endpoint") { + err = errors.New("invalid storage URL") + } + log.Warn(err.Error()) + client.error = err + return client + } + log.Infof("MINIO instance initialized") + client.client = minioClient + return client +} + +func (m minioStorageServiceImpl) uploadBuildResults(ctx context.Context) ([]string, error) { + offset := 0 + ids := make([]string, 0) + var buildResult *entity.BuildResultEntity + var err error + for { + buildResult, err = m.buildRepository.GetBuildResultWithOffset(offset) + if err != nil { + log.Infof("%d build_results were ulpoaded to minio storage, until got error", offset) + break + } + if buildResult == nil { + log.Infof("%d build_results were ulpoaded to minio storage, until buildResult is null", offset) + break + } + err = m.putObject(ctx, buildFileName(view.BUILD_RESULT_TABLE, buildResult.BuildId), buildResult.Data) + if err != nil { + log.Infof("%d build_results were ulpoaded to minio storage, until got error", offset) + break + } + ids = append(ids, buildResult.BuildId) + offset++ + } + return ids, err +} + +// file name table_name + checksum +func (m minioStorageServiceImpl) uploadPublishedSourcesArchives(ctx context.Context) ([]string, error) { + offset := 0 + checksums := make([]string, 0) + for { + publishedSourceArchive, err := m.publishRepo.GetPublishedSourcesArchives(offset) + if err != nil { + log.Infof("%d published_sources_archives were uploaded to minio storage, before error was received", offset) + break + } + if publishedSourceArchive == nil { + log.Infof("%d published_sources_archives were uploaded to minio storage, before publishedSourceArchive became null", offset) + break + } + err = m.putObject(ctx, buildFileName(view.PUBLISHED_SOURCES_ARCHIVES_TABLE, publishedSourceArchive.Checksum), publishedSourceArchive.Data) + if err != nil { + log.Infof("%d published_sources_archives were uploaded to minio storage, before error was received", offset) + break + } + checksums = append(checksums, publishedSourceArchive.Checksum) + offset++ + } + return checksums, nil +} + +func (m minioStorageServiceImpl) UploadFile(ctx context.Context, tableName, entityId string, content []byte) error { + start := time.Now() + err := m.putObject(ctx, buildFileName(tableName, entityId), content) + utils.PerfLog(time.Since(start).Milliseconds(), 500, "UploadFile: upload file to Minio") + if err != nil { + return err + } + return nil +} + +func (m minioStorageServiceImpl) putObject(ctx context.Context, fileName string, content []byte) error { + _, err := m.minioClient.client.PutObject(ctx, m.creds.BucketName, fileName, bytes.NewReader(content), int64(len(content)), minio.PutObjectOptions{}) + if err != nil { + return err + } + return nil +} + +func (m minioStorageServiceImpl) GetFile(ctx context.Context, tableName, entityId string) ([]byte, error) { + return m.getFile(ctx, buildFileName(tableName, entityId)) +} + +// fullFileName - tableName/entity_id.zip +func (m minioStorageServiceImpl) getFile(ctx context.Context, fullFileName string) ([]byte, error) { + minioObject, err := m.minioClient.client.GetObject(ctx, m.creds.BucketName, fullFileName, minio.GetObjectOptions{}) + if err != nil { + log.Warn(err) + return nil, err + } + minioObjectContent, err := io.ReadAll(minioObject) + return minioObjectContent, err +} + +func (m minioStorageServiceImpl) RemoveFile(ctx context.Context, tableName, entityId string) error { + return m.removeFile(ctx, buildFileName(tableName, entityId)) +} + +func (m minioStorageServiceImpl) RemoveFiles(ctx context.Context, tableName string, entityIds []string) error { + minioObjectsChan := make(chan minio.ObjectInfo, len(entityIds)) + utils.SafeAsync(func() { + for _, id := range entityIds { + minioObjectsChan <- minio.ObjectInfo{Key: buildFileName(tableName, id)} + } + defer close(minioObjectsChan) + }) + errMsg := make([]string, 0) + errChan := m.minioClient.client.RemoveObjects(ctx, m.creds.BucketName, minioObjectsChan, minio.RemoveObjectsOptions{}) + for removeError := range errChan { + errMsg = append(errMsg, removeError.Err.Error()) + } + if len(errMsg) > 0 { + return errors.New(strings.Join(errMsg, ". ")) + } + return nil +} + +func (m minioStorageServiceImpl) removeFile(ctx context.Context, fileName string) error { + err := m.minioClient.client.RemoveObject(ctx, m.creds.BucketName, fileName, minio.RemoveObjectOptions{}) + if err != nil { + return err + } + return nil +} + +func bucketExists(ctx context.Context, minioClient *minio.Client, bucketName string) (bool, error) { + exists, err := minioClient.BucketExists(ctx, bucketName) + if err != nil { + return false, err + } + return exists, nil +} +func mustGetSystemCertPool() *x509.CertPool { + pool, err := x509.SystemCertPool() + if err != nil { + return x509.NewCertPool() + } + return pool +} + +func buildFileName(tableName, entityId string) string { + return fmt.Sprintf("%s/%s.zip", tableName, entityId) +} + +func getEntityId(folderName string, fileName string) string { + if strings.Contains(fileName, folderName) && strings.Contains(fileName, ".zip") { + entityIdDotZip := strings.ReplaceAll(fileName, folderName, "") + return strings.ReplaceAll(entityIdDotZip, ".zip", "") + } + return "" +} diff --git a/qubership-apihub-service/service/MonitoringService.go b/qubership-apihub-service/service/MonitoringService.go new file mode 100644 index 0000000..0e0cd06 --- /dev/null +++ b/qubership-apihub-service/service/MonitoringService.go @@ -0,0 +1,334 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "context" + "crypto/sha1" + "encoding/hex" + "encoding/json" + "fmt" + "strings" + "sync" + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/db" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/go-pg/pg/v10" + log "github.com/sirupsen/logrus" +) + +type MonitoringService interface { + AddVersionOpenCount(packageId string, version string) + AddDocumentOpenCount(packageId string, version string, slug string) + AddOperationOpenCount(packageId string, version string, operationId string) + IncreaseBusinessMetricCounter(userId string, metric string, key string) + AddEndpointCall(path string, options interface{}) +} + +func NewMonitoringService(cp db.ConnectionProvider) MonitoringService { + monitoringService := &monitoringServiceImpl{ + versionOpenCount: make(map[string]int), + documentOpenCount: make(map[string]int), + operationOpenCount: make(map[string]int), + businessMetrics: make(map[string]map[string]map[string]int), + endpointCalls: make(map[string]map[string]interface{}), + endpointCallsCount: make(map[string]map[string]int), + versionOCMutex: &sync.RWMutex{}, + documentOCMutex: &sync.RWMutex{}, + operationOCMutex: &sync.RWMutex{}, + businessMetricsMutex: &sync.RWMutex{}, + endpointCallsMutex: &sync.RWMutex{}, + cp: cp, + } + monitoringService.startPeriodicFlushJob(time.Minute * 5) + return monitoringService +} + +type monitoringServiceImpl struct { + cp db.ConnectionProvider + versionOpenCount map[string]int + versionOCMutex *sync.RWMutex + documentOpenCount map[string]int + documentOCMutex *sync.RWMutex + operationOpenCount map[string]int + operationOCMutex *sync.RWMutex + businessMetrics map[string]map[string]map[string]int + businessMetricsMutex *sync.RWMutex + endpointCalls map[string]map[string]interface{} + endpointCallsCount map[string]map[string]int + endpointCallsMutex *sync.RWMutex +} + +func (m *monitoringServiceImpl) AddVersionOpenCount(packageId string, version string) { + utils.SafeAsync(func() { + versionKey := getVersionKey(packageId, version) + m.versionOCMutex.Lock() + defer m.versionOCMutex.Unlock() + m.versionOpenCount[versionKey]++ + }) +} + +func getVersionKey(packageId string, version string) string { + if strings.Contains(version, "@") { + version = strings.Split(version, "@")[0] + } + return packageId + stringSeparator + version +} + +func splitVersionKey(versionKey string) (string, string) { + versionKeySplit := strings.Split(versionKey, stringSeparator) + return versionKeySplit[0], versionKeySplit[1] +} + +func (m *monitoringServiceImpl) AddDocumentOpenCount(packageId string, version string, slug string) { + utils.SafeAsync(func() { + documentKey := getDocumentKey(packageId, version, slug) + m.documentOCMutex.Lock() + defer m.documentOCMutex.Unlock() + m.documentOpenCount[documentKey]++ + }) +} + +func getDocumentKey(packageId string, version string, slug string) string { + if strings.Contains(version, "@") { + version = strings.Split(version, "@")[0] + } + return packageId + stringSeparator + version + stringSeparator + slug +} + +func splitDocumentKey(documentKey string) (string, string, string) { + documentKeySplit := strings.Split(documentKey, stringSeparator) + return documentKeySplit[0], documentKeySplit[1], documentKeySplit[2] +} + +func (m *monitoringServiceImpl) AddOperationOpenCount(packageId string, version string, operationId string) { + utils.SafeAsync(func() { + operationKey := getOperationKey(packageId, version, operationId) + m.operationOCMutex.Lock() + defer m.operationOCMutex.Unlock() + m.operationOpenCount[operationKey]++ + }) +} + +func getOperationKey(packageId string, version string, operationId string) string { + if strings.Contains(version, "@") { + version = strings.Split(version, "@")[0] + } + return packageId + stringSeparator + version + stringSeparator + operationId +} + +func splitOperationKey(operationKey string) (string, string, string) { + operationKeySplit := strings.Split(operationKey, stringSeparator) + return operationKeySplit[0], operationKeySplit[1], operationKeySplit[2] +} + +func (m *monitoringServiceImpl) startPeriodicFlushJob(interval time.Duration) { + utils.SafeAsync(func() { + for { + time.Sleep(interval) + err := m.flushOpenCount() + if err != nil { + log.Errorf("PeriodicFlushJob failed to flush open count to db: %v", err.Error()) + } + err = m.flushBusinessMetrics() + if err != nil { + log.Errorf("PeriodicFlushJob failed to flush business metrics to db: %v", err.Error()) + } + err = m.flushEndpointCalls() + if err != nil { + log.Errorf("PeriodicFlushJob failed to flush endpoint calls to db: %v", err.Error()) + } + } + }) +} + +func (m *monitoringServiceImpl) flushOpenCount() error { + if len(m.versionOpenCount) == 0 && len(m.documentOpenCount) == 0 && len(m.operationOpenCount) == 0 { + return nil + } + m.versionOCMutex.Lock() + m.documentOCMutex.Lock() + m.operationOCMutex.Lock() + defer m.versionOCMutex.Unlock() + defer m.documentOCMutex.Unlock() + defer m.operationOCMutex.Unlock() + + ctx := context.Background() + err := m.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + versionOpenCountInsertQuery := ` + insert into published_version_open_count as pv + values (?, ?, ?) + on conflict (package_id, version) do update + set open_count = pv.open_count + ?` + for versionKey, openCount := range m.versionOpenCount { + packageId, version := splitVersionKey(versionKey) + _, err := tx.Exec(versionOpenCountInsertQuery, packageId, version, openCount, openCount) + if err != nil { + return err + } + } + documentOpenCountInsertQuery := ` + insert into published_document_open_count as pd + values (?, ?, ?, ?) + on conflict (package_id, version, slug) do update + set open_count = pd.open_count + ?` + for documentKey, openCount := range m.documentOpenCount { + packageId, version, slug := splitDocumentKey(documentKey) + _, err := tx.Exec(documentOpenCountInsertQuery, packageId, version, slug, openCount, openCount) + if err != nil { + return err + } + } + operationOpenCountInsertQuery := ` + insert into operation_open_count as o + values (?, ?, ?, ?) + on conflict (package_id, version, operation_id) do update + set open_count = o.open_count + ?` + for operationKey, openCount := range m.operationOpenCount { + packageId, version, operationId := splitOperationKey(operationKey) + _, err := tx.Exec(operationOpenCountInsertQuery, packageId, version, operationId, openCount, openCount) + if err != nil { + return err + } + } + return nil + }) + m.versionOpenCount = make(map[string]int) + m.documentOpenCount = make(map[string]int) + m.operationOpenCount = make(map[string]int) + return err +} + +func (m *monitoringServiceImpl) IncreaseBusinessMetricCounter(userId string, metric string, key string) { + utils.SafeAsync(func() { + m.businessMetricsMutex.Lock() + defer m.businessMetricsMutex.Unlock() + if _, userMetricExists := m.businessMetrics[userId]; !userMetricExists { + m.businessMetrics[userId] = map[string]map[string]int{metric: {key: 1}} + } else { + if _, metricExists := m.businessMetrics[userId][metric]; !metricExists { + m.businessMetrics[userId][metric] = map[string]int{key: 1} + } else { + if _, keyExists := m.businessMetrics[userId][metric][key]; keyExists { + m.businessMetrics[userId][metric][key]++ + } else { + m.businessMetrics[userId][metric][key] = 1 + } + } + } + }) +} + +func (m *monitoringServiceImpl) flushBusinessMetrics() error { + if len(m.businessMetrics) == 0 { + return nil + } + m.businessMetricsMutex.Lock() + defer m.businessMetricsMutex.Unlock() + + insertQuery := ` + insert into business_metric + values (?, ?, ?, ?, ?, ?) + on conflict (year, month, day, user_id, metric) + do update + set data = coalesce(business_metric.data, '{}') || ( + SELECT jsonb_object_agg(key, coalesce((business_metric.data ->> key)::int, 0) + coalesce(value::int, 0)) + from jsonb_each_text(EXCLUDED.data) + );` + + timeNow := time.Now() + year := timeNow.Year() + month := timeNow.Month() + day := timeNow.Day() + + ctx := context.Background() + err := m.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + for userId, metrics := range m.businessMetrics { + if len(metrics) == 0 { + continue + } + for metric, values := range metrics { + if len(values) == 0 { + continue + } + _, err := tx.Exec(insertQuery, year, month, day, metric, values, userId) + if err != nil { + return fmt.Errorf("failed to insert business metric %s: %w", metric, err) + } + } + } + return nil + }) + if err != nil { + return err + } + + m.businessMetrics = make(map[string]map[string]map[string]int) + return nil +} + +func (m *monitoringServiceImpl) AddEndpointCall(path string, options interface{}) { + utils.SafeAsync(func() { + m.endpointCallsMutex.Lock() + defer m.endpointCallsMutex.Unlock() + jsonBytes, _ := json.Marshal(options) + hashBytes := sha1.Sum(jsonBytes) + optionsHash := hex.EncodeToString(hashBytes[:]) + if _, pathExists := m.endpointCallsCount[path]; !pathExists { + m.endpointCallsCount[path] = map[string]int{optionsHash: 1} + m.endpointCalls[path] = map[string]interface{}{optionsHash: options} + } else { + m.endpointCallsCount[path][optionsHash]++ + if _, exists := m.endpointCalls[path][optionsHash]; !exists { + m.endpointCalls[path][optionsHash] = options + } + } + }) +} + +func (m *monitoringServiceImpl) flushEndpointCalls() error { + if len(m.endpointCalls) == 0 { + return nil + } + m.endpointCallsMutex.Lock() + defer m.endpointCallsMutex.Unlock() + + insertQuery := ` + insert into endpoint_calls as ec + values (?, ?, ?, ?) + on conflict (path, hash) do update + set count = ec.count + ?;` + + ctx := context.Background() + err := m.cp.GetConnection().RunInTransaction(ctx, func(tx *pg.Tx) error { + for path, options := range m.endpointCalls { + for hash, optionsObj := range options { + count := m.endpointCallsCount[path][hash] + _, err := tx.Exec(insertQuery, path, hash, optionsObj, count, count) + if err != nil { + return fmt.Errorf("failed to insert endpoint calls for %s with options %+v: %w", path, optionsObj, err) + } + } + } + return nil + }) + if err != nil { + return err + } + m.endpointCalls = make(map[string]map[string]interface{}) + m.endpointCallsCount = make(map[string]map[string]int) + return nil +} diff --git a/qubership-apihub-service/service/OperationGroupService.go b/qubership-apihub-service/service/OperationGroupService.go new file mode 100644 index 0000000..8871111 --- /dev/null +++ b/qubership-apihub-service/service/OperationGroupService.go @@ -0,0 +1,1078 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "fmt" + "net/http" + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + "github.com/google/uuid" + log "github.com/sirupsen/logrus" +) + +type OperationGroupService interface { + SetBuildService(buildService BuildService) + + CreateOperationGroup_deprecated(packageId string, version string, apiType string, createReq view.CreateOperationGroupReq_deprecated) error + CreateOperationGroup(ctx context.SecurityContext, packageId string, version string, apiType string, createReq view.CreateOperationGroupReq) error + ReplaceOperationGroup_deprecated(packageId string, version string, apiType string, groupName string, replaceReq view.ReplaceOperationGroupReq_deprecated) error + ReplaceOperationGroup(ctx context.SecurityContext, packageId string, version string, apiType string, groupName string, replaceReq view.ReplaceOperationGroupReq) error + UpdateOperationGroup_deprecated(packageId string, version string, apiType string, groupName string, updateReq view.UpdateOperationGroupReq_deprecated) error + UpdateOperationGroup(ctx context.SecurityContext, packageId string, version string, apiType string, groupName string, updateReq view.UpdateOperationGroupReq) error + DeleteOperationGroup(ctx context.SecurityContext, packageId string, version string, apiType string, groupName string) error + CalculateOperationGroups(packageId string, version string, groupingPrefix string) ([]string, error) + GetGroupedOperations(packageId string, version string, apiType string, groupName string, searchReq view.OperationListReq) (*view.GroupedOperations, error) + CheckOperationGroupExists(packageId string, version string, apiType string, groupName string) (bool, error) + GetOperationGroupExportTemplate(packageId string, version string, apiType string, groupName string) ([]byte, string, error) + StartOperationGroupPublish(ctx context.SecurityContext, packageId string, version string, apiType string, groupName string, req view.OperationGroupPublishReq) (string, error) + GetOperationGroupPublishStatus(publishId string) (*view.OperationGroupPublishStatusResponse, error) +} + +func NewOperationGroupService(operationRepository repository.OperationRepository, publishedRepo repository.PublishedRepository, packageVersionEnrichmentService PackageVersionEnrichmentService, activityTrackingService ActivityTrackingService) OperationGroupService { + return &operationGroupServiceImpl{ + operationRepo: operationRepository, + publishedRepo: publishedRepo, + packageVersionEnrichmentService: packageVersionEnrichmentService, + atService: activityTrackingService, + } +} + +type operationGroupServiceImpl struct { + operationRepo repository.OperationRepository + publishedRepo repository.PublishedRepository + packageVersionEnrichmentService PackageVersionEnrichmentService + atService ActivityTrackingService + buildService BuildService +} + +func (o *operationGroupServiceImpl) SetBuildService(buildService BuildService) { + o.buildService = buildService +} + +func (o operationGroupServiceImpl) CheckOperationGroupExists(packageId string, version string, apiType string, groupName string) (bool, error) { + versionEnt, err := o.publishedRepo.GetVersion(packageId, version) + if err != nil { + return false, err + } + if versionEnt == nil { + return false, nil + } + group, err := o.operationRepo.GetOperationGroup(packageId, versionEnt.Version, versionEnt.Revision, apiType, groupName) + if err != nil { + return false, err + } + if group != nil { + return true, nil + } else { + return false, nil + } +} + +func (o operationGroupServiceImpl) CreateOperationGroup_deprecated(packageId string, version string, apiType string, createReq view.CreateOperationGroupReq_deprecated) error { + versionEnt, err := o.publishedRepo.GetVersion(packageId, version) + if err != nil { + return err + } + if versionEnt == nil { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": version, "packageId": packageId}, + } + } + if createReq.GroupName == "" { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.EmptyOperationGroupName, + Message: exception.EmptyOperationGroupNameMsg, + } + } + + existingGroup, err := o.operationRepo.GetOperationGroup(versionEnt.PackageId, versionEnt.Version, versionEnt.Revision, apiType, createReq.GroupName) + if err != nil { + return err + } + if existingGroup != nil { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.OperationGroupAlreadyExists, + Message: exception.OperationGroupAlreadyExistsMsg, + Params: map[string]interface{}{"groupName": createReq.GroupName}, + } + } + uniqueGroupId := view.MakeOperationGroupId(versionEnt.PackageId, versionEnt.Version, versionEnt.Revision, apiType, createReq.GroupName) + newGroupEntity := &entity.OperationGroupEntity{ + PackageId: versionEnt.PackageId, + Version: versionEnt.Version, + Revision: versionEnt.Revision, + ApiType: apiType, + GroupName: createReq.GroupName, + GroupId: uniqueGroupId, + Description: createReq.Description, + Autogenerated: false, + } + return o.operationRepo.CreateOperationGroup(newGroupEntity, nil) +} + +func (o operationGroupServiceImpl) CreateOperationGroup(ctx context.SecurityContext, packageId string, version string, apiType string, createReq view.CreateOperationGroupReq) error { + versionEnt, err := o.publishedRepo.GetVersion(packageId, version) + if err != nil { + return err + } + if versionEnt == nil { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": version, "packageId": packageId}, + } + } + if createReq.GroupName == "" { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.EmptyOperationGroupName, + Message: exception.EmptyOperationGroupNameMsg, + } + } + + existingGroup, err := o.operationRepo.GetOperationGroup(versionEnt.PackageId, versionEnt.Version, versionEnt.Revision, apiType, createReq.GroupName) + if err != nil { + return err + } + if existingGroup != nil { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.OperationGroupAlreadyExists, + Message: exception.OperationGroupAlreadyExistsMsg, + Params: map[string]interface{}{"groupName": createReq.GroupName}, + } + } + uniqueGroupId := view.MakeOperationGroupId(versionEnt.PackageId, versionEnt.Version, versionEnt.Revision, apiType, createReq.GroupName) + + newGroupEntity := &entity.OperationGroupEntity{ + PackageId: versionEnt.PackageId, + Version: versionEnt.Version, + Revision: versionEnt.Revision, + ApiType: apiType, + GroupName: createReq.GroupName, + GroupId: uniqueGroupId, + Description: createReq.Description, + Autogenerated: false, + } + var templateEnt *entity.OperationGroupTemplateEntity + if createReq.TemplateFilename != "" { + templateEnt = entity.MakeOperationGroupTemplateEntity(createReq.Template) + newGroupEntity.TemplateChecksum = templateEnt.Checksum + newGroupEntity.TemplateFilename = createReq.TemplateFilename + } + err = o.operationRepo.CreateOperationGroup(newGroupEntity, templateEnt) + if err != nil { + return err + } + err = o.operationRepo.AddOperationGroupHistory(entity.MakeOperationGroupHistoryEntity(*newGroupEntity, view.OperationGroupActionCreate, ctx.GetUserId())) + if err != nil { + log.Errorf("failed to insert operation group history: %v", err.Error()) + } + dataMap := map[string]interface{}{} + dataMap["groupName"] = newGroupEntity.GroupName + dataMap["version"] = newGroupEntity.Version + dataMap["revision"] = newGroupEntity.Revision + dataMap["apiType"] = newGroupEntity.ApiType + o.atService.TrackEvent(view.ActivityTrackingEvent{ + Type: view.ATETCreateManualGroup, + Data: dataMap, + PackageId: newGroupEntity.PackageId, + Date: time.Now(), + UserId: ctx.GetUserId(), + }) + + return nil +} + +func (o operationGroupServiceImpl) ReplaceOperationGroup_deprecated(packageId string, version string, apiType string, groupName string, replaceReq view.ReplaceOperationGroupReq_deprecated) error { + versionEnt, err := o.publishedRepo.GetVersion(packageId, version) + if err != nil { + return err + } + if versionEnt == nil { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": version, "packageId": packageId}, + } + } + existingGroup, err := o.operationRepo.GetOperationGroup(versionEnt.PackageId, versionEnt.Version, versionEnt.Revision, apiType, groupName) + if err != nil { + return err + } + if existingGroup == nil { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.OperationGroupNotFound, + Message: exception.OperationGroupNotFoundMsg, + Params: map[string]interface{}{"groupName": groupName}, + } + } + if existingGroup.Autogenerated { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.OperationGroupNotModifiable, + Message: exception.OperationGroupNotModifiableMsg, + Params: map[string]interface{}{"groupName": groupName}, + } + } + if groupName != replaceReq.GroupName { + if replaceReq.GroupName == "" { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.EmptyOperationGroupName, + Message: exception.EmptyOperationGroupNameMsg, + } + } + existingNewGroup, err := o.operationRepo.GetOperationGroup(versionEnt.PackageId, versionEnt.Version, versionEnt.Revision, apiType, replaceReq.GroupName) + if err != nil { + return err + } + if existingNewGroup != nil { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.OperationGroupAlreadyExists, + Message: exception.OperationGroupAlreadyExistsMsg, + Params: map[string]interface{}{"groupName": replaceReq.GroupName}, + } + } + } + + if len(replaceReq.Operations) > view.OperationGroupOperationsLimit { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.GroupOperationsLimitExceeded, + Message: exception.GroupOperationsLimitExceededMsg, + Params: map[string]interface{}{"limit": view.OperationGroupOperationsLimit}, + } + } + newGroupEntity := *existingGroup + newGroupEntity.GroupName = replaceReq.GroupName + newGroupEntity.Description = replaceReq.Description + newGroupEntity.GroupId = view.MakeOperationGroupId(newGroupEntity.PackageId, newGroupEntity.Version, newGroupEntity.Revision, newGroupEntity.ApiType, newGroupEntity.GroupName) + operationEntities := make([]entity.GroupedOperationEntity, 0) + allowedVersions := make(map[string]struct{}, 0) + refs, err := o.publishedRepo.GetVersionRefsV3(versionEnt.PackageId, versionEnt.Version, versionEnt.Revision) + if err != nil { + return err + } + if len(refs) > 0 { + for _, ref := range refs { + if ref.Excluded { + continue + } + allowedVersions[view.MakePackageRefKey(ref.RefPackageId, ref.RefVersion, ref.RefRevision)] = struct{}{} + } + } else { + allowedVersions[view.MakePackageRefKey(versionEnt.PackageId, versionEnt.Version, versionEnt.Revision)] = struct{}{} + } + versionMapCache := make(map[string]entity.PublishedVersionEntity, 0) + for _, operation := range replaceReq.Operations { + operationEnt := entity.GroupedOperationEntity{ + GroupId: newGroupEntity.GroupId, + OperationId: operation.OperationId, + } + if operation.PackageId == "" || operation.Version == "" { + operationEnt.PackageId = newGroupEntity.PackageId + operationEnt.Version = newGroupEntity.Version + operationEnt.Revision = newGroupEntity.Revision + } else { + operationVersion, operationRevision, err := repository.SplitVersionRevision(operation.Version) + if err != nil { + return err + } + //versionMapCache includes version revision so any version without specified '@revision' will not hit the cache + if versionEnt, cached := versionMapCache[view.MakePackageRefKey(operation.PackageId, operationVersion, operationRevision)]; cached { + operationEnt.PackageId = versionEnt.PackageId + operationEnt.Version = versionEnt.Version + operationEnt.Revision = versionEnt.Revision + } else { + versionEnt, err := o.publishedRepo.GetVersion(operation.PackageId, operation.Version) + if err != nil { + return err + } + if versionEnt == nil { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": operation.Version, "packageId": operation.PackageId}, + } + } + versionKey := view.MakePackageRefKey(versionEnt.PackageId, versionEnt.Version, versionEnt.Revision) + if _, allowed := allowedVersions[versionKey]; !allowed { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.GroupingVersionNotAllowed, + Message: exception.GroupingVersionNotAllowedMsg, + Params: map[string]interface{}{"version": operation.Version, "packageId": operation.PackageId}, + } + } + versionMapCache[versionKey] = *versionEnt + operationEnt.PackageId = versionEnt.PackageId + operationEnt.Version = versionEnt.Version + operationEnt.Revision = versionEnt.Revision + } + } + operationEntities = append(operationEntities, operationEnt) + } + return o.operationRepo.ReplaceOperationGroup(existingGroup, &newGroupEntity, operationEntities, nil) +} + +func (o operationGroupServiceImpl) ReplaceOperationGroup(ctx context.SecurityContext, packageId string, version string, apiType string, groupName string, replaceReq view.ReplaceOperationGroupReq) error { + versionEnt, err := o.publishedRepo.GetVersion(packageId, version) + if err != nil { + return err + } + if versionEnt == nil { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": version, "packageId": packageId}, + } + } + existingGroup, err := o.operationRepo.GetOperationGroup(versionEnt.PackageId, versionEnt.Version, versionEnt.Revision, apiType, groupName) + if err != nil { + return err + } + if existingGroup == nil { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.OperationGroupNotFound, + Message: exception.OperationGroupNotFoundMsg, + Params: map[string]interface{}{"groupName": groupName}, + } + } + if existingGroup.Autogenerated { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.OperationGroupNotModifiable, + Message: exception.OperationGroupNotModifiableMsg, + Params: map[string]interface{}{"groupName": groupName}, + } + } + if groupName != replaceReq.GroupName { + if replaceReq.GroupName == "" { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.EmptyOperationGroupName, + Message: exception.EmptyOperationGroupNameMsg, + } + } + existingNewGroup, err := o.operationRepo.GetOperationGroup(versionEnt.PackageId, versionEnt.Version, versionEnt.Revision, apiType, replaceReq.GroupName) + if err != nil { + return err + } + if existingNewGroup != nil { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.OperationGroupAlreadyExists, + Message: exception.OperationGroupAlreadyExistsMsg, + Params: map[string]interface{}{"groupName": replaceReq.GroupName}, + } + } + } + + newGroupEntity := *existingGroup + newGroupEntity.GroupName = replaceReq.GroupName + newGroupEntity.Description = replaceReq.Description + var templateEnt *entity.OperationGroupTemplateEntity + newGroupEntity.TemplateFilename = replaceReq.TemplateFilename + if replaceReq.TemplateFilename != "" { + templateEnt = entity.MakeOperationGroupTemplateEntity(replaceReq.Template) + newGroupEntity.TemplateChecksum = templateEnt.Checksum + } else { + newGroupEntity.TemplateChecksum = "" + } + newGroupEntity.GroupId = view.MakeOperationGroupId(newGroupEntity.PackageId, newGroupEntity.Version, newGroupEntity.Revision, newGroupEntity.ApiType, newGroupEntity.GroupName) + groupedOperationEntities, err := o.makeGroupedOperationEntities(versionEnt, &newGroupEntity, replaceReq.Operations) + if err != nil { + return err + } + err = o.operationRepo.ReplaceOperationGroup(existingGroup, &newGroupEntity, groupedOperationEntities, templateEnt) + if err != nil { + return err + } + err = o.clearOperationGroupCache(packageId, versionEnt.Version, versionEnt.Revision, apiType, existingGroup.GroupId) + if err != nil { + return err + } + + groupParameters := make([]string, 0) + if existingGroup.GroupId != newGroupEntity.GroupId { + err = o.operationRepo.AddOperationGroupHistory(entity.MakeOperationGroupHistoryEntity(*existingGroup, view.OperationGroupActionDelete, ctx.GetUserId())) + if err != nil { + log.Errorf("failed to insert operation group history: %v", err.Error()) + } + err = o.operationRepo.AddOperationGroupHistory(entity.MakeOperationGroupHistoryEntity(newGroupEntity, view.OperationGroupActionCreate, ctx.GetUserId())) + if err != nil { + log.Errorf("failed to insert operation group history: %v", err.Error()) + } + groupParameters = append(groupParameters, "name") + } else { + err = o.operationRepo.AddOperationGroupHistory(entity.MakeOperationGroupHistoryEntity(newGroupEntity, view.OperationGroupActionUpdate, ctx.GetUserId())) + if err != nil { + log.Errorf("failed to insert operation group history: %v", err.Error()) + } + } + if existingGroup.Description != newGroupEntity.Description { + groupParameters = append(groupParameters, "description") + } + if existingGroup.TemplateChecksum != newGroupEntity.TemplateChecksum { + groupParameters = append(groupParameters, "template") + } + if replaceReq.Operations != nil { + groupParameters = append(groupParameters, "operations") + } + dataMap := map[string]interface{}{} + dataMap["groupName"] = newGroupEntity.GroupName + dataMap["version"] = newGroupEntity.Version + dataMap["revision"] = newGroupEntity.Revision + dataMap["apiType"] = newGroupEntity.ApiType + dataMap["isPrefixGroup"] = newGroupEntity.Autogenerated + dataMap["groupParameters"] = groupParameters + o.atService.TrackEvent(view.ActivityTrackingEvent{ + Type: view.ATETOperationsGroupParameters, + Data: dataMap, + PackageId: newGroupEntity.PackageId, + Date: time.Now(), + UserId: ctx.GetUserId(), + }) + return nil +} + +func (o operationGroupServiceImpl) makeGroupedOperationEntities(versionEnt *entity.PublishedVersionEntity, groupEntity *entity.OperationGroupEntity, operations []view.GroupOperations) ([]entity.GroupedOperationEntity, error) { + if len(operations) > view.OperationGroupOperationsLimit { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.GroupOperationsLimitExceeded, + Message: exception.GroupOperationsLimitExceededMsg, + Params: map[string]interface{}{"limit": view.OperationGroupOperationsLimit}, + } + } + operationEntities := make([]entity.GroupedOperationEntity, 0) + allowedVersions := make(map[string]struct{}, 0) + refs, err := o.publishedRepo.GetVersionRefsV3(versionEnt.PackageId, versionEnt.Version, versionEnt.Revision) + if err != nil { + return nil, err + } + if len(refs) > 0 { + for _, ref := range refs { + if ref.Excluded { + continue + } + allowedVersions[view.MakePackageRefKey(ref.RefPackageId, ref.RefVersion, ref.RefRevision)] = struct{}{} + } + } else { + allowedVersions[view.MakePackageRefKey(versionEnt.PackageId, versionEnt.Version, versionEnt.Revision)] = struct{}{} + } + versionMapCache := make(map[string]entity.PublishedVersionEntity, 0) + for _, operation := range operations { + operationEnt := entity.GroupedOperationEntity{ + GroupId: groupEntity.GroupId, + OperationId: operation.OperationId, + } + if operation.PackageId == "" || operation.Version == "" { + operationEnt.PackageId = groupEntity.PackageId + operationEnt.Version = groupEntity.Version + operationEnt.Revision = groupEntity.Revision + } else { + operationVersion, operationRevision, err := repository.SplitVersionRevision(operation.Version) + if err != nil { + return nil, err + } + //versionMapCache includes version revision so any version without specified '@revision' will not hit the cache + if versionEnt, cached := versionMapCache[view.MakePackageRefKey(operation.PackageId, operationVersion, operationRevision)]; cached { + operationEnt.PackageId = versionEnt.PackageId + operationEnt.Version = versionEnt.Version + operationEnt.Revision = versionEnt.Revision + } else { + versionEnt, err := o.publishedRepo.GetVersion(operation.PackageId, operation.Version) + if err != nil { + return nil, err + } + if versionEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": operation.Version, "packageId": operation.PackageId}, + } + } + versionKey := view.MakePackageRefKey(versionEnt.PackageId, versionEnt.Version, versionEnt.Revision) + if _, allowed := allowedVersions[versionKey]; !allowed { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.GroupingVersionNotAllowed, + Message: exception.GroupingVersionNotAllowedMsg, + Params: map[string]interface{}{"version": operation.Version, "packageId": operation.PackageId}, + } + } + versionMapCache[versionKey] = *versionEnt + operationEnt.PackageId = versionEnt.PackageId + operationEnt.Version = versionEnt.Version + operationEnt.Revision = versionEnt.Revision + } + } + operationEntities = append(operationEntities, operationEnt) + } + return operationEntities, nil +} + +func (o operationGroupServiceImpl) UpdateOperationGroup_deprecated(packageId string, version string, apiType string, groupName string, updateReq view.UpdateOperationGroupReq_deprecated) error { + versionEnt, err := o.publishedRepo.GetVersion(packageId, version) + if err != nil { + return err + } + if versionEnt == nil { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": version, "packageId": packageId}, + } + } + existingGroup, err := o.operationRepo.GetOperationGroup(versionEnt.PackageId, versionEnt.Version, versionEnt.Revision, apiType, groupName) + if err != nil { + return err + } + if existingGroup == nil { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.OperationGroupNotFound, + Message: exception.OperationGroupNotFoundMsg, + Params: map[string]interface{}{"groupName": groupName}, + } + } + if existingGroup.Autogenerated { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.OperationGroupNotModifiable, + Message: exception.OperationGroupNotModifiableMsg, + Params: map[string]interface{}{"groupName": groupName}, + } + } + if updateReq.GroupName == nil && updateReq.Description == nil { + return nil + } + updatedGroup := *existingGroup + if updateReq.GroupName != nil && *updateReq.GroupName != existingGroup.GroupName { + if *updateReq.GroupName == "" { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.EmptyOperationGroupName, + Message: exception.EmptyOperationGroupNameMsg, + } + } + existingNewGroup, err := o.operationRepo.GetOperationGroup(versionEnt.PackageId, versionEnt.Version, versionEnt.Revision, apiType, *updateReq.GroupName) + if err != nil { + return err + } + if existingNewGroup != nil { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.OperationGroupAlreadyExists, + Message: exception.OperationGroupAlreadyExistsMsg, + Params: map[string]interface{}{"groupName": *updateReq.GroupName}, + } + } + + updatedGroup.GroupName = *updateReq.GroupName + updatedGroup.GroupId = view.MakeOperationGroupId(updatedGroup.PackageId, updatedGroup.Version, updatedGroup.Revision, updatedGroup.ApiType, *updateReq.GroupName) + } + if updateReq.Description != nil && *updateReq.Description != existingGroup.Description { + updatedGroup.Description = *updateReq.Description + } + return o.operationRepo.UpdateOperationGroup(existingGroup, &updatedGroup, nil, nil) +} + +func (o operationGroupServiceImpl) UpdateOperationGroup(ctx context.SecurityContext, packageId string, version string, apiType string, groupName string, updateReq view.UpdateOperationGroupReq) error { + versionEnt, err := o.publishedRepo.GetVersion(packageId, version) + if err != nil { + return err + } + if versionEnt == nil { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": version, "packageId": packageId}, + } + } + existingGroup, err := o.operationRepo.GetOperationGroup(versionEnt.PackageId, versionEnt.Version, versionEnt.Revision, apiType, groupName) + if err != nil { + return err + } + if existingGroup == nil { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.OperationGroupNotFound, + Message: exception.OperationGroupNotFoundMsg, + Params: map[string]interface{}{"groupName": groupName}, + } + } + if existingGroup.Autogenerated && updateReq.Description == nil && updateReq.Template == nil { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.OperationGroupNotModifiable, + Message: exception.OperationGroupNotModifiableMsg, + Params: map[string]interface{}{"groupName": groupName}, + } + } + if updateReq.GroupName == nil && updateReq.Description == nil && updateReq.Template == nil && updateReq.Operations == nil { + return nil + } + updatedGroup := *existingGroup + if updateReq.GroupName != nil && *updateReq.GroupName != existingGroup.GroupName { + if *updateReq.GroupName == "" { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.EmptyOperationGroupName, + Message: exception.EmptyOperationGroupNameMsg, + } + } + existingNewGroup, err := o.operationRepo.GetOperationGroup(versionEnt.PackageId, versionEnt.Version, versionEnt.Revision, apiType, *updateReq.GroupName) + if err != nil { + return err + } + if existingNewGroup != nil { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.OperationGroupAlreadyExists, + Message: exception.OperationGroupAlreadyExistsMsg, + Params: map[string]interface{}{"groupName": *updateReq.GroupName}, + } + } + + updatedGroup.GroupName = *updateReq.GroupName + updatedGroup.GroupId = view.MakeOperationGroupId(updatedGroup.PackageId, updatedGroup.Version, updatedGroup.Revision, updatedGroup.ApiType, *updateReq.GroupName) + } + if updateReq.Description != nil && *updateReq.Description != existingGroup.Description { + updatedGroup.Description = *updateReq.Description + } + var templateEnt *entity.OperationGroupTemplateEntity + if updateReq.Template != nil { + updatedGroup.TemplateFilename = updateReq.Template.TemplateFilename + if updateReq.Template.TemplateFilename != "" { + templateEnt = entity.MakeOperationGroupTemplateEntity(updateReq.Template.TemplateData) + updatedGroup.TemplateChecksum = templateEnt.Checksum + } else { + updatedGroup.TemplateChecksum = "" + } + } + var newGroupedOperationEntities *[]entity.GroupedOperationEntity + if updateReq.Operations != nil { + groupedOperationEntities, err := o.makeGroupedOperationEntities(versionEnt, &updatedGroup, *updateReq.Operations) + if err != nil { + return err + } + newGroupedOperationEntities = &groupedOperationEntities + } + + err = o.operationRepo.UpdateOperationGroup(existingGroup, &updatedGroup, templateEnt, newGroupedOperationEntities) + if err != nil { + return err + } + err = o.clearOperationGroupCache(packageId, versionEnt.Version, versionEnt.Revision, apiType, existingGroup.GroupId) + if err != nil { + return err + } + + groupParameters := make([]string, 0) + if existingGroup.GroupId != updatedGroup.GroupId { + err = o.operationRepo.AddOperationGroupHistory(entity.MakeOperationGroupHistoryEntity(*existingGroup, view.OperationGroupActionDelete, ctx.GetUserId())) + if err != nil { + log.Errorf("failed to insert operation group history: %v", err.Error()) + } + err = o.operationRepo.AddOperationGroupHistory(entity.MakeOperationGroupHistoryEntity(updatedGroup, view.OperationGroupActionCreate, ctx.GetUserId())) + if err != nil { + log.Errorf("failed to insert operation group history: %v", err.Error()) + } + groupParameters = append(groupParameters, "name") + } else { + err = o.operationRepo.AddOperationGroupHistory(entity.MakeOperationGroupHistoryEntity(updatedGroup, view.OperationGroupActionUpdate, ctx.GetUserId())) + if err != nil { + log.Errorf("failed to insert operation group history: %v", err.Error()) + } + } + if existingGroup.Description != updatedGroup.Description { + groupParameters = append(groupParameters, "description") + } + if existingGroup.TemplateChecksum != updatedGroup.TemplateChecksum { + groupParameters = append(groupParameters, "template") + } + if updateReq.Operations != nil { + groupParameters = append(groupParameters, "operations") + } + dataMap := map[string]interface{}{} + dataMap["groupName"] = updatedGroup.GroupName + dataMap["version"] = updatedGroup.Version + dataMap["revision"] = updatedGroup.Revision + dataMap["apiType"] = updatedGroup.ApiType + dataMap["isPrefixGroup"] = updatedGroup.Autogenerated + dataMap["groupParameters"] = groupParameters + o.atService.TrackEvent(view.ActivityTrackingEvent{ + Type: view.ATETOperationsGroupParameters, + Data: dataMap, + PackageId: updatedGroup.PackageId, + Date: time.Now(), + UserId: ctx.GetUserId(), + }) + return nil +} + +func (o operationGroupServiceImpl) DeleteOperationGroup(ctx context.SecurityContext, packageId string, version string, apiType string, groupName string) error { + versionEnt, err := o.publishedRepo.GetVersion(packageId, version) + if err != nil { + return err + } + if versionEnt == nil { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": version, "packageId": packageId}, + } + } + existingGroup, err := o.operationRepo.GetOperationGroup(versionEnt.PackageId, versionEnt.Version, versionEnt.Revision, apiType, groupName) + if err != nil { + return err + } + if existingGroup == nil { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.OperationGroupNotFound, + Message: exception.OperationGroupNotFoundMsg, + Params: map[string]interface{}{"groupName": groupName}, + } + } + if existingGroup.Autogenerated { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.OperationGroupNotModifiable, + Message: exception.OperationGroupNotModifiableMsg, + Params: map[string]interface{}{"groupName": groupName}, + } + } + err = o.operationRepo.DeleteOperationGroup(existingGroup) + if err != nil { + return err + } + err = o.clearOperationGroupCache(packageId, versionEnt.Version, versionEnt.Revision, apiType, existingGroup.GroupId) + if err != nil { + return err + } + err = o.operationRepo.AddOperationGroupHistory(entity.MakeOperationGroupHistoryEntity(*existingGroup, view.OperationGroupActionDelete, ctx.GetUserId())) + if err != nil { + log.Errorf("failed to insert operation group history: %v", err.Error()) + } + dataMap := map[string]interface{}{} + dataMap["groupName"] = existingGroup.GroupName + dataMap["version"] = existingGroup.Version + dataMap["revision"] = existingGroup.Revision + dataMap["apiType"] = existingGroup.ApiType + o.atService.TrackEvent(view.ActivityTrackingEvent{ + Type: view.ATETDeleteManualGroup, + Data: dataMap, + PackageId: existingGroup.PackageId, + Date: time.Now(), + UserId: ctx.GetUserId(), + }) + + return nil +} + +func (o operationGroupServiceImpl) CalculateOperationGroups(packageId string, version string, groupingPrefix string) ([]string, error) { + versionEnt, err := o.publishedRepo.GetVersion(packageId, version) + if err != nil { + return nil, err + } + if versionEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": version, "packageId": packageId}, + } + } + operationGroups, err := o.operationRepo.CalculateOperationGroups(versionEnt.PackageId, versionEnt.Version, versionEnt.Revision, groupingPrefix) + if err != nil { + return nil, err + } + return operationGroups, nil +} + +func (o operationGroupServiceImpl) GetGroupedOperations(packageId string, version string, apiType string, groupName string, searchReq view.OperationListReq) (*view.GroupedOperations, error) { + if searchReq.RefPackageId != "" { + packageEnt, err := o.publishedRepo.GetPackage(packageId) + if err != nil { + return nil, err + } + if packageEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"packageId": packageId}, + } + } + if packageEnt.Kind != entity.KIND_DASHBOARD { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.UnsupportedQueryParam, + Message: exception.UnsupportedQueryParamMsg, + Params: map[string]interface{}{"param": "refPackageId"}} + } + } + versionEnt, err := o.publishedRepo.GetVersion(packageId, version) + if err != nil { + return nil, err + } + if versionEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": version, "packageId": packageId}, + } + } + existingGroup, err := o.operationRepo.GetOperationGroup(versionEnt.PackageId, versionEnt.Version, versionEnt.Revision, apiType, groupName) + if err != nil { + return nil, err + } + if existingGroup == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.OperationGroupNotFound, + Message: exception.OperationGroupNotFoundMsg, + Params: map[string]interface{}{"groupName": groupName}, + } + } + if searchReq.Kind == "all" { + searchReq.Kind = "" + } + operationEnts, err := o.operationRepo.GetGroupedOperations(versionEnt.PackageId, versionEnt.Version, versionEnt.Revision, apiType, groupName, searchReq) + if err != nil { + return nil, err + } + operationList := make([]interface{}, 0) + packageVersions := make(map[string][]string, 0) + for _, ent := range operationEnts { + operationList = append(operationList, entity.MakeOperationView(ent)) + packageVersions[ent.PackageId] = append(packageVersions[ent.PackageId], view.MakeVersionRefKey(ent.Version, ent.Revision)) + } + packagesRefs, err := o.packageVersionEnrichmentService.GetPackageVersionRefsMap(packageVersions) + if err != nil { + return nil, err + } + operations := view.GroupedOperations{ + Operations: operationList, + Packages: packagesRefs, + } + return &operations, nil +} + +func (o operationGroupServiceImpl) clearOperationGroupCache(packageId string, version string, revision int, apiType string, groupId string) error { + return o.publishedRepo.DeleteTransformedDocuments(packageId, version, revision, apiType, groupId) +} + +func (o operationGroupServiceImpl) GetOperationGroupExportTemplate(packageId string, version string, apiType string, groupName string) ([]byte, string, error) { + versionEnt, err := o.publishedRepo.GetVersion(packageId, version) + if err != nil { + return nil, "", err + } + if versionEnt == nil { + return nil, "", &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": version, "packageId": packageId}, + } + } + operationsGroupTemplate, err := o.operationRepo.GetOperationGroupTemplateFile(versionEnt.PackageId, versionEnt.Version, versionEnt.Revision, apiType, groupName) + if err != nil { + return nil, "", err + } + if operationsGroupTemplate == nil || operationsGroupTemplate.TemplateFilename == "" { + return nil, "", &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.OperationGroupExportTemplateNotFound, + Message: exception.OperationGroupExportTemplateNotFoundMsg, + Params: map[string]interface{}{"groupName": groupName}, + } + } + return operationsGroupTemplate.Template, operationsGroupTemplate.TemplateFilename, nil +} + +func (o operationGroupServiceImpl) StartOperationGroupPublish(ctx context.SecurityContext, packageId string, version string, apiType string, groupName string, req view.OperationGroupPublishReq) (string, error) { + versionEnt, err := o.publishedRepo.GetVersion(packageId, version) + if err != nil { + return "", err + } + if versionEnt == nil { + return "", &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedVersionNotFound, + Message: exception.PublishedVersionNotFoundMsg, + Params: map[string]interface{}{"version": version}, + } + } + exists, err := o.CheckOperationGroupExists(packageId, version, apiType, groupName) + if err != nil { + return "", err + } + if !exists { + return "", &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.OperationGroupNotFound, + Message: exception.OperationGroupNotFoundMsg, + Params: map[string]interface{}{"groupName": groupName}, + } + } + + publishId := uuid.NewString() + operationGroupPublishEnt := &entity.OperationGroupPublishEntity{ + PublishId: publishId, + Status: string(view.StatusRunning), + } + err = o.publishedRepo.StoreOperationGroupPublishProcess(operationGroupPublishEnt) + if err != nil { + return "", fmt.Errorf("failed to create operation group publish process: %w", err) + } + utils.SafeAsync(func() { + o.publishOperationGroup(ctx, versionEnt, apiType, groupName, req, operationGroupPublishEnt) + }) + return publishId, nil +} + +func (o operationGroupServiceImpl) publishOperationGroup(ctx context.SecurityContext, version *entity.PublishedVersionEntity, apiType string, groupName string, req view.OperationGroupPublishReq, publishEnt *entity.OperationGroupPublishEntity) { + groupId := view.MakeOperationGroupId(version.PackageId, version.Version, version.Revision, apiType, groupName) + transformedDocuments, err := o.publishedRepo.GetTransformedDocuments(version.PackageId, view.MakeVersionRefKey(version.Version, version.Revision), apiType, groupId, view.ReducedSourceSpecificationsType, string(view.JsonDocumentFormat)) + if err != nil { + o.updatePublishProcess(publishEnt, string(view.StatusError), fmt.Sprintf("faield to get existing transformed documents: %v", err.Error())) + return + } + if transformedDocuments == nil { + err = o.transformDocuments(ctx, version, apiType, groupName) + if err != nil { + o.updatePublishProcess(publishEnt, string(view.StatusError), fmt.Sprintf("faield to tranform group operations into documents: %v", err.Error())) + return + } + transformedDocuments, err = o.publishedRepo.GetTransformedDocuments(version.PackageId, view.MakeVersionRefKey(version.Version, version.Revision), apiType, groupId, view.ReducedSourceSpecificationsType, string(view.JsonDocumentFormat)) + if err != nil { + o.updatePublishProcess(publishEnt, string(view.StatusError), fmt.Sprintf("faield to get transformed documents: %v", err.Error())) + return + } + if transformedDocuments == nil { + o.updatePublishProcess(publishEnt, string(view.StatusError), "faield to get transformed documents: transformed documents not found") + return + } + } + files := make([]view.BCFile, 0) + publishFile := true + for _, document := range transformedDocuments.DocumentsInfo { + files = append(files, view.BCFile{ + FileId: document.Filename, + Publish: &publishFile, + }) + } + groupPublishBuildConfig := view.BuildConfig{ + PackageId: req.PackageId, + Version: req.Version, + BuildType: view.BuildType, + PreviousVersion: req.PreviousVersion, + PreviousVersionPackageId: req.PreviousVersionPackageId, + Status: req.Status, + Files: files, + CreatedBy: ctx.GetUserId(), + Metadata: view.BuildConfigMetadata{ + VersionLabels: req.VersionLabels, + }, + } + build, err := o.buildService.PublishVersion(ctx, groupPublishBuildConfig, transformedDocuments.Data, false, "", nil, false, false) + if err != nil { + o.updatePublishProcess(publishEnt, string(view.StatusError), fmt.Sprintf("faield to start operation group publish: %v", err.Error())) + return + } + err = o.buildService.AwaitBuildCompletion(build.PublishId) + if err != nil { + o.updatePublishProcess(publishEnt, string(view.StatusError), fmt.Sprintf("faield to publish operation group: %v", err.Error())) + return + } + o.updatePublishProcess(publishEnt, string(view.StatusComplete), "") +} + +func (o operationGroupServiceImpl) transformDocuments(ctx context.SecurityContext, version *entity.PublishedVersionEntity, apiType string, groupName string) error { + buildId, err := o.buildService.CreateBuildWithoutDependencies(view.BuildConfig{ + PackageId: version.PackageId, + Version: view.MakeVersionRefKey(version.Version, version.Revision), + BuildType: view.ReducedSourceSpecificationsType, + Format: string(view.JsonDocumentFormat), + CreatedBy: ctx.GetUserId(), + ApiType: apiType, + GroupName: groupName, + }, false, "") + if err != nil { + return fmt.Errorf("failed to create documents transformation build: %v", err.Error()) + } + err = o.buildService.AwaitBuildCompletion(buildId) + if err != nil { + return fmt.Errorf("documents transformation build failed: %v", err.Error()) + } + return nil +} + +func (o operationGroupServiceImpl) updatePublishProcess(publishEnt *entity.OperationGroupPublishEntity, status string, details string) { + publishEnt.Status = status + publishEnt.Details = details + err := o.publishedRepo.UpdateOperationGroupPublishProcess(publishEnt) + if err != nil { + log.Errorf("failed to update operation group publish process: %v", err.Error()) + } +} + +func (o operationGroupServiceImpl) GetOperationGroupPublishStatus(publishId string) (*view.OperationGroupPublishStatusResponse, error) { + publishProcess, err := o.publishedRepo.GetOperationGroupPublishProcess(publishId) + if err != nil { + return nil, err + } + if publishProcess == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishProcessNotFound, + Message: exception.PublishProcessNotFoundMsg, + Params: map[string]interface{}{"publishId": publishId}, + } + } + return &view.OperationGroupPublishStatusResponse{ + Status: publishProcess.Status, + Message: publishProcess.Details, + }, nil +} diff --git a/qubership-apihub-service/service/OperationService.go b/qubership-apihub-service/service/OperationService.go new file mode 100644 index 0000000..288d8e1 --- /dev/null +++ b/qubership-apihub-service/service/OperationService.go @@ -0,0 +1,923 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "fmt" + "net/http" + "strings" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type OperationService interface { + GetOperations(packageId string, version string, skipRefs bool, searchReq view.OperationListReq) (*view.Operations, error) + GetOperation(searchReq view.OperationBasicSearchReq) (interface{}, error) + GetOperationsTags(searchReq view.OperationBasicSearchReq, skipRefs bool) (*view.OperationTags, error) + GetOperationChanges(packageId string, version string, operationId string, previousPackageId string, previousVersion string, severities []string) (*view.OperationChangesView, error) + GetVersionChanges_deprecated(packageId string, version string, apiType string, searchReq view.VersionChangesReq) (*view.VersionChangesView, error) + GetVersionChanges(packageId string, version string, apiType string, searchReq view.VersionChangesReq) (*view.VersionChangesView, error) + SearchForOperations_deprecated(searchReq view.SearchQueryReq) (*view.SearchResult_deprecated, error) + SearchForOperations(searchReq view.SearchQueryReq) (*view.SearchResult, error) + GetDeprecatedOperations(packageId string, version string, searchReq view.DeprecatedOperationListReq) (*view.Operations, error) + GetOperationDeprecatedItems(searchReq view.OperationBasicSearchReq) (*view.DeprecatedItems, error) + GetDeprecatedOperationsSummary(packageId string, version string) (*view.DeprecatedOperationsSummary, error) + GetOperationModelUsages(packageId string, version string, apiType string, operationId string, modelName string) (*view.OperationModelUsages, error) +} + +func NewOperationService( + operationRepository repository.OperationRepository, + publishedRepo repository.PublishedRepository, + packageVersionEnrichmentService PackageVersionEnrichmentService) OperationService { + return &operationServiceImpl{ + operationRepository: operationRepository, + publishedRepo: publishedRepo, + packageVersionEnrichmentService: packageVersionEnrichmentService, + } +} + +type operationServiceImpl struct { + operationRepository repository.OperationRepository + publishedRepo repository.PublishedRepository + packageVersionEnrichmentService PackageVersionEnrichmentService +} + +func (o operationServiceImpl) GetDeprecatedOperationsSummary(packageId string, version string) (*view.DeprecatedOperationsSummary, error) { + packageEnt, err := o.publishedRepo.GetPackage(packageId) + if err != nil { + return nil, err + } + if packageEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"packageId": packageId}, + } + } + versionEnt, err := o.publishedRepo.GetVersion(packageId, version) + if err != nil { + return nil, err + } + if versionEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": version, "packageId": packageId}, + } + } + result := new(view.DeprecatedOperationsSummary) + + if packageEnt.Kind == entity.KIND_PACKAGE { + deprecatedOperationsSummaryEnts, err := o.operationRepository.GetDeprecatedOperationsSummary(packageId, versionEnt.Version, versionEnt.Revision) + if err != nil { + return nil, err + } + deprecatedOperationTypes := make([]view.DeprecatedOperationType, 0) + for _, ent := range deprecatedOperationsSummaryEnts { + deprecatedOperationTypes = append(deprecatedOperationTypes, entity.MakeDeprecatedOperationType(ent)) + } + result.OperationTypes = &deprecatedOperationTypes + } + if packageEnt.Kind == entity.KIND_DASHBOARD { + deprecatedOperationsRefsSummaryEnts, err := o.operationRepository.GetDeprecatedOperationsRefsSummary(packageId, versionEnt.Version, versionEnt.Revision) + if err != nil { + return nil, err + } + + deprecatedOperationTypesMap := make(map[string][]entity.DeprecatedOperationsSummaryEntity) + packageVersions := make(map[string][]string) + for _, ent := range deprecatedOperationsRefsSummaryEnts { + packageRefKey := view.MakePackageRefKey(ent.PackageId, ent.Version, ent.Revision) + if deprecatedOperationTypesMap[packageRefKey] == nil { + deprecatedOperationTypesMap[packageRefKey] = make([]entity.DeprecatedOperationsSummaryEntity, 0) + } + deprecatedOperationTypesMap[packageRefKey] = append(deprecatedOperationTypesMap[packageRefKey], ent) + packageVersions[ent.PackageId] = append(packageVersions[ent.PackageId], view.MakeVersionRefKey(ent.Version, ent.Revision)) + } + + deprecatedOperationTypesRef := make([]view.DeprecatedOperationTypesRef, 0) + for packageRefKey, operationTypes := range deprecatedOperationTypesMap { + deprecatedOperationTypesRef = append(deprecatedOperationTypesRef, entity.MakeDeprecatedOperationTypesRef(packageRefKey, operationTypes)) + } + packagesRefs, err := o.packageVersionEnrichmentService.GetPackageVersionRefsMap(packageVersions) + if err != nil { + return nil, err + } + result.Refs = &deprecatedOperationTypesRef + result.Packages = &packagesRefs + } + + return result, nil +} + +func (o operationServiceImpl) GetDeprecatedOperations(packageId string, version string, searchReq view.DeprecatedOperationListReq) (*view.Operations, error) { + if searchReq.RefPackageId != "" { + packageEnt, err := o.publishedRepo.GetPackage(packageId) + if err != nil { + return nil, err + } + if packageEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"packageId": packageId}, + } + } + if packageEnt.Kind != entity.KIND_DASHBOARD { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.UnsupportedQueryParam, + Message: exception.UnsupportedQueryParamMsg, + Params: map[string]interface{}{"param": "refPackageId"}} + } + } + versionEnt, err := o.publishedRepo.GetVersion(packageId, version) + if err != nil { + return nil, err + } + if versionEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": version, "packageId": packageId}, + } + } + if searchReq.Kind == "all" { + searchReq.Kind = "" + } + deprecatedOperationEnts, err := o.operationRepository.GetDeprecatedOperations(packageId, versionEnt.Version, versionEnt.Revision, searchReq.ApiType, searchReq) + if err != nil { + return nil, err + } + deprecatedOperationList := make([]interface{}, 0) + packageVersions := make(map[string][]string) + for _, ent := range deprecatedOperationEnts { + deprecatedOperationList = append(deprecatedOperationList, entity.MakeDeprecatedOperationView(ent, searchReq.IncludeDeprecatedItems)) + packageVersions[ent.PackageId] = append(packageVersions[ent.PackageId], fmt.Sprintf("%v@%v", ent.Version, ent.Revision)) + } + packagesRefs, err := o.packageVersionEnrichmentService.GetPackageVersionRefsMap(packageVersions) + if err != nil { + return nil, err + } + operations := view.Operations{ + Operations: deprecatedOperationList, + Packages: packagesRefs, + } + return &operations, nil +} + +func (o operationServiceImpl) GetOperations(packageId string, version string, skipRefs bool, searchReq view.OperationListReq) (*view.Operations, error) { + if searchReq.RefPackageId != "" { + packageEnt, err := o.publishedRepo.GetPackage(packageId) + if err != nil { + return nil, err + } + if packageEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"packageId": packageId}, + } + } + if packageEnt.Kind != entity.KIND_DASHBOARD { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.UnsupportedQueryParam, + Message: exception.UnsupportedQueryParamMsg, + Params: map[string]interface{}{"param": "refPackageId"}} + } + } + versionEnt, err := o.publishedRepo.GetVersion(packageId, version) + if err != nil { + return nil, err + } + if versionEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": version, "packageId": packageId}, + } + } + if searchReq.Kind == "all" { + searchReq.Kind = "" + } + + searchReq.CustomTagKey, searchReq.CustomTagValue, err = parseTextFilterToCustomTagKeyValue(searchReq.TextFilter) + if err != nil { + return nil, err + } + operationEnts, err := o.operationRepository.GetOperations(packageId, versionEnt.Version, versionEnt.Revision, searchReq.ApiType, skipRefs, searchReq) + if err != nil { + return nil, err + } + operationList := make([]interface{}, 0) + packageVersions := make(map[string][]string, 0) + for _, ent := range operationEnts { + operationList = append(operationList, entity.MakeOperationView(ent)) + packageVersions[ent.PackageId] = append(packageVersions[ent.PackageId], view.MakeVersionRefKey(ent.Version, ent.Revision)) + } + packagesRefs, err := o.packageVersionEnrichmentService.GetPackageVersionRefsMap(packageVersions) + if err != nil { + return nil, err + } + operations := view.Operations{ + Operations: operationList, + Packages: packagesRefs, + } + return &operations, nil +} + +func parseTextFilterToCustomTagKeyValue(textFilter string) (string, string, error) { + if strings.Contains(textFilter, ": ") { + if len(strings.Split(textFilter, ": ")) != 2 { + return "", "", &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidTextFilterFormatForOperationCustomTag, + Message: exception.InvalidTextFilterFormatForOperationCustomTagMsg, + Params: map[string]interface{}{"textFilter": textFilter}, + } + } + return strings.Split(textFilter, ": ")[0], strings.Split(textFilter, ": ")[1], nil + } + return "", "", nil +} + +func (o operationServiceImpl) GetOperation(searchReq view.OperationBasicSearchReq) (interface{}, error) { + versionEnt, err := o.publishedRepo.GetVersion(searchReq.PackageId, searchReq.Version) + if err != nil { + return nil, err + } + if versionEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": searchReq.Version, "packageId": searchReq.PackageId}, + } + } + operationEnt, err := o.operationRepository.GetOperationById(searchReq.PackageId, versionEnt.Version, versionEnt.Revision, searchReq.ApiType, searchReq.OperationId) + if err != nil { + return nil, err + } + if operationEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.OperationNotFound, + Message: exception.OperationNotFoundMsg, + Params: map[string]interface{}{"operationId": searchReq.OperationId, "version": searchReq.Version, "packageId": searchReq.PackageId}, + } + } + operationView := entity.MakeSingleOperationView(*operationEnt) + + return &operationView, nil +} + +func (o operationServiceImpl) GetOperationDeprecatedItems(searchReq view.OperationBasicSearchReq) (*view.DeprecatedItems, error) { + versionEnt, err := o.publishedRepo.GetVersion(searchReq.PackageId, searchReq.Version) + if err != nil { + return nil, err + } + if versionEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": searchReq.Version, "packageId": searchReq.PackageId}, + } + } + operationEnt, err := o.operationRepository.GetOperationDeprecatedItems(searchReq.PackageId, versionEnt.Version, versionEnt.Revision, searchReq.ApiType, searchReq.OperationId) + if err != nil { + return nil, err + } + if operationEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.OperationNotFound, + Message: exception.OperationNotFoundMsg, + Params: map[string]interface{}{"operationId": searchReq.OperationId, "version": searchReq.Version, "packageId": searchReq.PackageId}, + } + } + if operationEnt.DeprecatedItems == nil { + return &view.DeprecatedItems{DeprecatedItems: make([]view.DeprecatedItem, 0)}, nil + } + operationView := entity.MakeSingleOperationDeprecatedItemsView(*operationEnt) + + return &operationView, nil +} + +func (o operationServiceImpl) GetOperationsTags(searchReq view.OperationBasicSearchReq, skipRefs bool) (*view.OperationTags, error) { + versionEnt, err := o.publishedRepo.GetVersion(searchReq.PackageId, searchReq.Version) + if err != nil { + return nil, err + } + if versionEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": searchReq.Version, "packageId": searchReq.PackageId}, + } + } + + searchQuery := entity.OperationTagsSearchQueryEntity{ + PackageId: searchReq.PackageId, + Version: versionEnt.Version, + Revision: versionEnt.Revision, + Type: searchReq.ApiType, + Kind: searchReq.ApiKind, + TextFilter: searchReq.TextFilter, + ApiAudience: searchReq.ApiAudience, + Limit: searchReq.Limit, + Offset: searchReq.Offset, + } + tags, err := o.operationRepository.GetOperationsTags(searchQuery, skipRefs) + if err != nil { + return nil, err + } + + return &view.OperationTags{Tags: tags}, nil +} + +func (o operationServiceImpl) GetOperationChanges(packageId string, version string, operationId string, previousPackageId string, previousVersion string, severities []string) (*view.OperationChangesView, error) { + versionEnt, err := o.publishedRepo.GetVersion(packageId, version) + if err != nil { + return nil, err + } + if versionEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": version, "packageId": packageId}, + } + } + + if previousVersion == "" || previousPackageId == "" { + if versionEnt.PreviousVersion == "" { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.NoPreviousVersion, + Message: exception.NoPreviousVersionMsg, + Params: map[string]interface{}{"version": version}, + } + } + previousVersion = versionEnt.PreviousVersion + if versionEnt.PreviousVersionPackageId != "" { + previousPackageId = versionEnt.PreviousVersionPackageId + } else { + previousPackageId = packageId + } + } + previousVersionEnt, err := o.publishedRepo.GetVersion(previousPackageId, previousVersion) + if err != nil { + return nil, err + } + if previousVersionEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": previousVersion, "packageId": previousPackageId}, + } + } + + comparisonId := view.MakeVersionComparisonId( + versionEnt.PackageId, versionEnt.Version, versionEnt.Revision, + previousVersionEnt.PackageId, previousVersionEnt.Version, previousVersionEnt.Revision, + ) + versionComparison, err := o.publishedRepo.GetVersionComparison(comparisonId) + if err != nil { + return nil, err + } + if versionComparison == nil || versionComparison.NoContent { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ComparisonNotFound, + Message: exception.ComparisonNotFoundMsg, + Params: map[string]interface{}{ + "comparisonId": comparisonId, + "packageId": versionEnt.PackageId, + "version": versionEnt.Version, + "revision": versionEnt.Revision, + "previousPackageId": previousVersionEnt.PackageId, + "previousVersion": previousVersionEnt.Version, + "previousRevision": previousVersionEnt.Revision, + }, + } + } + + changes := make([]interface{}, 0) + changedOperationEnt, err := o.operationRepository.GetOperationChanges(comparisonId, operationId, severities) + if err != nil { + return nil, err + } + if changedOperationEnt != nil { + changesView := entity.MakeOperationChangesListView(*changedOperationEnt) + for _, changeView := range changesView { + if len(severities) == 0 { + changes = append(changes, changeView) + } else { + if utils.SliceContains(severities, view.GetSingleOperationChangeCommon(changeView).Severity) { + changes = append(changes, changeView) + + } + } + } + } + return &view.OperationChangesView{Changes: changes}, nil +} + +func (o operationServiceImpl) GetVersionChanges_deprecated(packageId string, version string, apiType string, searchReq view.VersionChangesReq) (*view.VersionChangesView, error) { + versionEnt, err := o.publishedRepo.GetVersion(packageId, version) + if err != nil { + return nil, err + } + if versionEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": version, "packageId": packageId}, + } + } + + if searchReq.PreviousVersion == "" || searchReq.PreviousVersionPackageId == "" { + if versionEnt.PreviousVersion == "" { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.NoPreviousVersion, + Message: exception.NoPreviousVersionMsg, + Params: map[string]interface{}{"version": version}, + } + } + searchReq.PreviousVersion = versionEnt.PreviousVersion + if versionEnt.PreviousVersionPackageId != "" { + searchReq.PreviousVersionPackageId = versionEnt.PreviousVersionPackageId + } else { + searchReq.PreviousVersionPackageId = packageId + } + } + previousVersionEnt, err := o.publishedRepo.GetVersion(searchReq.PreviousVersionPackageId, searchReq.PreviousVersion) + if err != nil { + return nil, err + } + if previousVersionEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": searchReq.PreviousVersion, "packageId": searchReq.PreviousVersionPackageId}, + } + } + + comparisonId := view.MakeVersionComparisonId( + versionEnt.PackageId, versionEnt.Version, versionEnt.Revision, + previousVersionEnt.PackageId, previousVersionEnt.Version, previousVersionEnt.Revision, + ) + + versionComparison, err := o.publishedRepo.GetVersionComparison(comparisonId) + if err != nil { + return nil, err + } + if versionComparison == nil || versionComparison.NoContent { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ComparisonNotFound, + Message: exception.ComparisonNotFoundMsg, + Params: map[string]interface{}{ + "comparisonId": comparisonId, + "packageId": versionEnt.PackageId, + "version": versionEnt.Version, + "revision": versionEnt.Revision, + "previousPackageId": previousVersionEnt.PackageId, + "previousVersion": previousVersionEnt.Version, + "previousRevision": previousVersionEnt.Revision, + }, + } + } + searchQuery := entity.ChangelogSearchQueryEntity{ + ComparisonId: comparisonId, + ApiType: apiType, + ApiKind: searchReq.ApiKind, + TextFilter: searchReq.TextFilter, + DocumentSlug: searchReq.DocumentSlug, + Tags: searchReq.Tags, + EmptyTag: searchReq.EmptyTag, + RefPackageId: searchReq.RefPackageId, + Limit: searchReq.Limit, + Offset: searchReq.Offset, + EmptyGroup: searchReq.EmptyGroup, + Group: searchReq.Group, + GroupPackageId: versionEnt.PackageId, + GroupVersion: versionEnt.Version, + GroupRevision: versionEnt.Revision, + Severities: searchReq.Severities, + } + operationComparisons := make([]interface{}, 0) + changelogOperationEnts, err := o.operationRepository.GetChangelog_deprecated(searchQuery) + if err != nil { + return nil, err + } + + packageVersions := make(map[string][]string, 0) + for _, changelogOperationEnt := range changelogOperationEnts { + operationComparisons = append(operationComparisons, entity.MakeOperationComparisonChangelogView_deprecated(changelogOperationEnt)) + if packageRefKey := view.MakePackageRefKey(changelogOperationEnt.PackageId, changelogOperationEnt.Version, changelogOperationEnt.Revision); packageRefKey != "" { + packageVersions[changelogOperationEnt.PackageId] = append(packageVersions[changelogOperationEnt.PackageId], view.MakeVersionRefKey(changelogOperationEnt.Version, changelogOperationEnt.Revision)) + } + if previousPackageRefKey := view.MakePackageRefKey(changelogOperationEnt.PreviousPackageId, changelogOperationEnt.PreviousVersion, changelogOperationEnt.PreviousRevision); previousPackageRefKey != "" { + packageVersions[changelogOperationEnt.PreviousPackageId] = append(packageVersions[changelogOperationEnt.PreviousPackageId], view.MakeVersionRefKey(changelogOperationEnt.PreviousVersion, changelogOperationEnt.PreviousRevision)) + } + } + packagesRefs, err := o.packageVersionEnrichmentService.GetPackageVersionRefsMap(packageVersions) + if err != nil { + return nil, err + } + changelog := &view.VersionChangesView{ + PreviousVersion: view.MakeVersionRefKey(previousVersionEnt.Version, previousVersionEnt.Revision), + PreviousVersionPackageId: previousVersionEnt.PackageId, + Operations: operationComparisons, + Packages: packagesRefs, + } + return changelog, nil +} + +func (o operationServiceImpl) GetVersionChanges(packageId string, version string, apiType string, searchReq view.VersionChangesReq) (*view.VersionChangesView, error) { + versionEnt, err := o.publishedRepo.GetVersion(packageId, version) + if err != nil { + return nil, err + } + if versionEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": version, "packageId": packageId}, + } + } + + if searchReq.PreviousVersion == "" || searchReq.PreviousVersionPackageId == "" { + if versionEnt.PreviousVersion == "" { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.NoPreviousVersion, + Message: exception.NoPreviousVersionMsg, + Params: map[string]interface{}{"version": version}, + } + } + searchReq.PreviousVersion = versionEnt.PreviousVersion + if versionEnt.PreviousVersionPackageId != "" { + searchReq.PreviousVersionPackageId = versionEnt.PreviousVersionPackageId + } else { + searchReq.PreviousVersionPackageId = packageId + } + } + previousVersionEnt, err := o.publishedRepo.GetVersion(searchReq.PreviousVersionPackageId, searchReq.PreviousVersion) + if err != nil { + return nil, err + } + if previousVersionEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": searchReq.PreviousVersion, "packageId": searchReq.PreviousVersionPackageId}, + } + } + + comparisonId := view.MakeVersionComparisonId( + versionEnt.PackageId, versionEnt.Version, versionEnt.Revision, + previousVersionEnt.PackageId, previousVersionEnt.Version, previousVersionEnt.Revision, + ) + + versionComparison, err := o.publishedRepo.GetVersionComparison(comparisonId) + if err != nil { + return nil, err + } + if versionComparison == nil || versionComparison.NoContent { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ComparisonNotFound, + Message: exception.ComparisonNotFoundMsg, + Params: map[string]interface{}{ + "comparisonId": comparisonId, + "packageId": versionEnt.PackageId, + "version": versionEnt.Version, + "revision": versionEnt.Revision, + "previousPackageId": previousVersionEnt.PackageId, + "previousVersion": previousVersionEnt.Version, + "previousRevision": previousVersionEnt.Revision, + }, + } + } + searchQuery := entity.ChangelogSearchQueryEntity{ + ComparisonId: comparisonId, + ApiType: apiType, + ApiKind: searchReq.ApiKind, + TextFilter: searchReq.TextFilter, + DocumentSlug: searchReq.DocumentSlug, + Tags: searchReq.Tags, + EmptyTag: searchReq.EmptyTag, + RefPackageId: searchReq.RefPackageId, + Limit: searchReq.Limit, + Offset: searchReq.Offset, + EmptyGroup: searchReq.EmptyGroup, + Group: searchReq.Group, + GroupPackageId: versionEnt.PackageId, + GroupVersion: versionEnt.Version, + GroupRevision: versionEnt.Revision, + Severities: searchReq.Severities, + ApiAudience: searchReq.ApiAudience, + } + operationComparisons := make([]interface{}, 0) + changelogOperationEnts, err := o.operationRepository.GetChangelog(searchQuery) + if err != nil { + return nil, err + } + + packageVersions := make(map[string][]string, 0) + for _, changelogOperationEnt := range changelogOperationEnts { + operationComparisons = append(operationComparisons, entity.MakeOperationComparisonChangelogView(changelogOperationEnt)) + if packageRefKey := view.MakePackageRefKey(changelogOperationEnt.PackageId, changelogOperationEnt.Version, changelogOperationEnt.Revision); packageRefKey != "" { + packageVersions[changelogOperationEnt.PackageId] = append(packageVersions[changelogOperationEnt.PackageId], view.MakeVersionRefKey(changelogOperationEnt.Version, changelogOperationEnt.Revision)) + } + if previousPackageRefKey := view.MakePackageRefKey(changelogOperationEnt.PreviousPackageId, changelogOperationEnt.PreviousVersion, changelogOperationEnt.PreviousRevision); previousPackageRefKey != "" { + packageVersions[changelogOperationEnt.PreviousPackageId] = append(packageVersions[changelogOperationEnt.PreviousPackageId], view.MakeVersionRefKey(changelogOperationEnt.PreviousVersion, changelogOperationEnt.PreviousRevision)) + } + } + packagesRefs, err := o.packageVersionEnrichmentService.GetPackageVersionRefsMap(packageVersions) + if err != nil { + return nil, err + } + changelog := &view.VersionChangesView{ + PreviousVersion: view.MakeVersionRefKey(previousVersionEnt.Version, previousVersionEnt.Revision), + PreviousVersionPackageId: previousVersionEnt.PackageId, + Operations: operationComparisons, + Packages: packagesRefs, + } + return changelog, nil +} + +// deprecated +func (o operationServiceImpl) SearchForOperations_deprecated(searchReq view.SearchQueryReq) (*view.SearchResult_deprecated, error) { + searchQuery, err := entity.MakeOperationSearchQueryEntity(&searchReq) + if err != nil { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidSearchParameters, + Message: exception.InvalidSearchParametersMsg, + Params: map[string]interface{}{"error": err.Error()}, + } + } + err = setOperationSearchParams(searchReq.OperationSearchParams, searchQuery) + if err != nil { + return nil, err + } + //todo maybe move to envs + searchQuery.OperationSearchWeight = entity.OperationSearchWeight{ + ScopeWeight: 13, + TitleWeight: 3, + OpenCountWeight: 0.2, + } + searchQuery.VersionStatusSearchWeight = entity.VersionStatusSearchWeight{ + VersionReleaseStatus: string(view.Release), + VersionReleaseStatusWeight: 4, + VersionDraftStatus: string(view.Draft), + VersionDraftStatusWeight: 0.6, + VersionArchivedStatus: string(view.Archived), + VersionArchivedStatusWeight: 0.1, + } + operationEntities, err := o.operationRepository.SearchForOperations_deprecated(searchQuery) + if err != nil { + return nil, err + } + operations := make([]view.OperationSearchResult_deprecated, 0) + for _, ent := range operationEntities { + operations = append(operations, entity.MakeOperationSearchResultView_deprecated(ent)) + } + + return &view.SearchResult_deprecated{Operations: &operations}, nil +} + +func (o operationServiceImpl) SearchForOperations(searchReq view.SearchQueryReq) (*view.SearchResult, error) { + searchQuery, err := entity.MakeOperationSearchQueryEntity(&searchReq) + if err != nil { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidSearchParameters, + Message: exception.InvalidSearchParametersMsg, + Params: map[string]interface{}{"error": err.Error()}, + } + } + err = setOperationSearchParams(searchReq.OperationSearchParams, searchQuery) + if err != nil { + return nil, err + } + //todo maybe move to envs + searchQuery.OperationSearchWeight = entity.OperationSearchWeight{ + ScopeWeight: 13, + TitleWeight: 3, + OpenCountWeight: 0.2, + } + searchQuery.VersionStatusSearchWeight = entity.VersionStatusSearchWeight{ + VersionReleaseStatus: string(view.Release), + VersionReleaseStatusWeight: 4, + VersionDraftStatus: string(view.Draft), + VersionDraftStatusWeight: 0.6, + VersionArchivedStatus: string(view.Archived), + VersionArchivedStatusWeight: 0.1, + } + operationEntities, err := o.operationRepository.SearchForOperations(searchQuery) + if err != nil { + return nil, err + } + operations := make([]interface{}, 0) + for _, ent := range operationEntities { + operations = append(operations, entity.MakeOperationSearchResultView(ent)) + } + + return &view.SearchResult{Operations: &operations}, nil +} + +func setOperationSearchParams(operationParams *view.OperationSearchParams, searchQuery *entity.OperationSearchQuery) error { + if operationParams == nil { + searchQuery.FilterAll = true + return nil + } + switch operationParams.ApiType { + case string(view.RestApiType): + return setRestOperationSearchParams(operationParams, searchQuery) + case string(view.GraphqlApiType): + return setGraphqlOperationSearchParams(operationParams, searchQuery) + default: + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidSearchParameters, + Message: exception.InvalidSearchParametersMsg, + Params: map[string]interface{}{"error": fmt.Sprintf("%v apiType is not supported", operationParams.ApiType)}, + } + } +} + +func setRestOperationSearchParams(restOperationParams *view.OperationSearchParams, searchQuery *entity.OperationSearchQuery) error { + searchQuery.ApiType = restOperationParams.ApiType + searchQuery.Methods = append(searchQuery.Methods, restOperationParams.Methods...) + if len(restOperationParams.Scopes)+len(restOperationParams.DetailedScopes) == 0 { + searchQuery.FilterAll = true + } else { + for _, s := range restOperationParams.Scopes { + switch s { + case view.RestScopeRequest: + searchQuery.FilterRequest = true + case view.RestScopeResponse: + searchQuery.FilterResponse = true + default: + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidSearchParameters, + Message: exception.InvalidSearchParametersMsg, + Params: map[string]interface{}{"error": fmt.Sprintf("scope %v is invalid for %v apiType", s, restOperationParams.ApiType)}, + } + } + } + if searchQuery.FilterRequest && searchQuery.FilterResponse { + searchQuery.FilterRequest = false + searchQuery.FilterResponse = false + } + for _, s := range restOperationParams.DetailedScopes { + switch s { + case view.RestScopeAnnotation: + searchQuery.FilterAnnotation = true + case view.RestScopeExamples: + searchQuery.FilterExamples = true + case view.RestScopeProperties: + searchQuery.FilterProperties = true + default: + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidSearchParameters, + Message: exception.InvalidSearchParametersMsg, + Params: map[string]interface{}{"error": fmt.Sprintf("detailed scope %v is invalid", s)}, + } + } + } + if searchQuery.FilterAnnotation && searchQuery.FilterExamples && searchQuery.FilterProperties { + searchQuery.FilterAnnotation = false + searchQuery.FilterExamples = false + searchQuery.FilterProperties = false + } + if !searchQuery.FilterRequest && !searchQuery.FilterResponse && + !searchQuery.FilterAnnotation && !searchQuery.FilterExamples && !searchQuery.FilterProperties { + searchQuery.FilterAll = true + } + } + return nil +} + +func setGraphqlOperationSearchParams(graphqlOperationParams *view.OperationSearchParams, searchQuery *entity.OperationSearchQuery) error { + searchQuery.ApiType = graphqlOperationParams.ApiType + for _, operationType := range graphqlOperationParams.OperationTypes { + if !view.ValidGraphQLOperationType(operationType) { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidSearchParameters, + Message: exception.InvalidSearchParametersMsg, + Params: map[string]interface{}{"error": fmt.Sprintf("operation type %v is invalid for %v apiType", operationType, graphqlOperationParams.ApiType)}, + } + } + } + searchQuery.OperationTypes = append(searchQuery.OperationTypes, graphqlOperationParams.OperationTypes...) + if len(graphqlOperationParams.Scopes) == 0 { + searchQuery.FilterAll = true + } else { + for _, s := range graphqlOperationParams.Scopes { + switch s { + case view.GraphqlScopeAnnotation: + searchQuery.FilterAnnotation = true + case view.GraphqlScopeArgument: + searchQuery.FilterArgument = true + case view.GraphqlScopeProperty: + searchQuery.FilterProperty = true + default: + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidSearchParameters, + Message: exception.InvalidSearchParametersMsg, + Params: map[string]interface{}{"error": fmt.Sprintf("scope %v is invalid for %v apiType", s, graphqlOperationParams.ApiType)}, + } + } + } + } + return nil +} + +func (o operationServiceImpl) GetOperationModelUsages(packageId string, version string, apiType string, operationId string, modelName string) (*view.OperationModelUsages, error) { + versionEnt, err := o.publishedRepo.GetVersion(packageId, version) + if err != nil { + return nil, err + } + if versionEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": version, "packageId": packageId}, + } + } + operationEnt, err := o.operationRepository.GetOperationById(versionEnt.PackageId, versionEnt.Version, versionEnt.Revision, apiType, operationId) + if err != nil { + return nil, err + } + if operationEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.OperationNotFound, + Message: exception.OperationNotFoundMsg, + Params: map[string]interface{}{"operationId": operationId, "version": version, "packageId": packageId}, + } + } + modelHash, modelExists := operationEnt.Models[modelName] + if !modelExists { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.OperationModelNotFound, + Message: exception.OperationModelNotFoundMsg, + Params: map[string]interface{}{"operationId": operationId, "modelName": modelName}, + } + } + operationsWithModel, err := o.operationRepository.GetOperationsByModelHash(versionEnt.PackageId, versionEnt.Version, versionEnt.Revision, apiType, modelHash) + if err != nil { + return nil, err + } + modelUsages := make([]view.OperationModels, 0) + for _, operation := range operationsWithModel { + modelUsages = append(modelUsages, view.OperationModels{ + OperationId: operation.OperationId, + ModelNames: operation.Models, + }) + } + return &view.OperationModelUsages{ModelUsages: modelUsages}, nil +} diff --git a/qubership-apihub-service/service/PackageService.go b/qubership-apihub-service/service/PackageService.go new file mode 100644 index 0000000..b47bca6 --- /dev/null +++ b/qubership-apihub-service/service/PackageService.go @@ -0,0 +1,857 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "fmt" + "net/http" + "regexp" + "strings" + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + log "github.com/sirupsen/logrus" +) + +type PackageService interface { + CreatePackage(ctx context.SecurityContext, packg view.SimplePackage) (*view.SimplePackage, error) + GetPackage(ctx context.SecurityContext, id string, withParents bool) (*view.SimplePackage, error) + GetPackagesList(ctx context.SecurityContext, req view.PackageListReq) (*view.Packages, error) + UpdatePackage(ctx context.SecurityContext, packg *view.PatchPackageReq, packageId string) (*view.SimplePackage, error) + DeletePackage(ctx context.SecurityContext, id string) error + FavorPackage(ctx context.SecurityContext, id string) error + DisfavorPackage(ctx context.SecurityContext, id string) error + GetPackageStatus(id string) (*view.Status, error) + GetPackageName(id string) (string, error) + PackageExists(packageId string) (bool, error) + GetAvailableVersionPublishStatuses(ctx context.SecurityContext, packageId string) (*view.Statuses, error) + RecalculateOperationGroups(ctx context.SecurityContext, packageId string) error + CalculateOperationGroups(packageId string, groupingPrefix string) (*view.CalculatedOperationGroups, error) +} + +func NewPackageService(gitClientProvider GitClientProvider, + repo repository.PrjGrpIntRepository, + favoritesRepo repository.FavoritesRepository, + publishedRepo repository.PublishedRepository, + versionService VersionService, + roleService RoleService, + atService ActivityTrackingService, + operationGroupService OperationGroupService, + userRepo repository.UserRepository, + ptHandler PackageTransitionHandler, + systemInfoService SystemInfoService) PackageService { + return &packageServiceImpl{ + gitClientProvider: gitClientProvider, + pRepo: repo, + favoritesRepo: favoritesRepo, + publishedRepo: publishedRepo, + versionService: versionService, + roleService: roleService, + atService: atService, + operationGroupService: operationGroupService, + userRepo: userRepo, + ptHandler: ptHandler, + systemInfoService: systemInfoService, + } +} + +type packageServiceImpl struct { + gitClientProvider GitClientProvider + pRepo repository.PrjGrpIntRepository + favoritesRepo repository.FavoritesRepository + publishedRepo repository.PublishedRepository + versionService VersionService + roleService RoleService + atService ActivityTrackingService + operationGroupService OperationGroupService + userRepo repository.UserRepository + ptHandler PackageTransitionHandler + systemInfoService SystemInfoService +} + +func (p packageServiceImpl) CreatePackage(ctx context.SecurityContext, packg view.SimplePackage) (*view.SimplePackage, error) { + if !validPackageKind(packg.Kind) { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.IncorrectPackageKind, + Message: exception.IncorrectPackageKindMsg, + Params: map[string]interface{}{"kind": packg.Kind}, + } + } + if packg.Kind == entity.KIND_WORKSPACE { + packg.ParentId = "" + } + if packg.ParentId != "" { + existingEnt, err := p.publishedRepo.GetPackage(packg.ParentId) + if err != nil { + return nil, err + } + if existingEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"parentId": packg.ParentId}, + } + } + packg.Id = packg.ParentId + "." + packg.Alias + + if packg.ExcludeFromSearch == nil { + packg.ExcludeFromSearch = &existingEnt.ExcludeFromSearch + } else { + if existingEnt.ExcludeFromSearch && !*packg.ExcludeFromSearch { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.UnableToChangeExcludeFromSearch, + Message: exception.UnableToChangeExcludeFromSearchMsg, + } + } + } + } else { + if packg.Kind == entity.KIND_GROUP || packg.Kind == entity.KIND_PACKAGE || packg.Kind == entity.KIND_DASHBOARD { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.PackageParentIsMissing, + Message: exception.PackageParentIsMissingMsg, + } + } + packg.Id = packg.Alias + if packg.ExcludeFromSearch == nil { + excludeFromSearchDefaultValue := false + packg.ExcludeFromSearch = &excludeFromSearchDefaultValue + } + } + existingPackage, err := p.publishedRepo.GetPackageIncludingDeleted(packg.Id) + if err != nil { + return nil, err + } + if existingPackage != nil { + return nil, &exception.CustomError{ + Status: http.StatusConflict, + Code: exception.PackageAlreadyExists, + Message: exception.PackageAlreadyExistsMsg, + Params: map[string]interface{}{"id": packg.Id}, + } + } + packageIdReserved, err := p.userRepo.PrivatePackageIdExists(packg.Id) + if err != nil { + return nil, err + } + if packageIdReserved { + return nil, &exception.CustomError{ + Status: http.StatusConflict, + Code: exception.PackageAlreadyExists, + Message: exception.PackageAlreadyExistsMsg, + Params: map[string]interface{}{"id": packg.Id}, + } + } + transitionId, err := p.ptHandler.HandleMissingPackageId(packg.Id) + if err != nil { + return nil, fmt.Errorf("failed to check if package id %s transition exists during creation: %w", packg.Id, err) + } + if transitionId != "" { + return nil, &exception.CustomError{ + Status: http.StatusConflict, + Code: exception.PackageRedirectExists, + Message: exception.PackageRedirectExistsMsg, + Params: map[string]interface{}{"id": packg.Id}, + } + } + + if packg.Kind == entity.KIND_GROUP || packg.Kind == entity.KIND_WORKSPACE { + packg.ServiceName = "" + } + if packg.ServiceName != "" { + err := p.checkServiceNameAvailability(packg.Id, packg.ServiceName) + if err != nil { + return nil, err + } + } + if packg.RestGroupingPrefix != "" { + err := validatePackageGroupingPrefix(packg.RestGroupingPrefix) + if err != nil { + return nil, err + } + } + + packg.CreatedAt = time.Now() + packg.CreatedBy = ctx.GetUserId() + if packg.DefaultRole == "" { + packg.DefaultRole = view.ViewerRoleId + } + if packg.ParentId != "" { + err = p.roleService.ValidateDefaultRole(ctx, packg.ParentId, packg.DefaultRole) + if err != nil { + return nil, err + } + } else { + roleExists, err := p.roleService.PackageRoleExists(packg.DefaultRole) + if err != nil { + return nil, err + } + if !roleExists { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.RoleNotFound, + Message: exception.RoleNotFoundMsg, + Params: map[string]interface{}{"role": packg.DefaultRole}, + } + } + } + if packg.ReleaseVersionPattern != "" { + _, err := regexp.Compile(packg.ReleaseVersionPattern) + if err != nil { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidReleaseVersionPatternFormat, + Message: exception.InvalidReleaseVersionPatternFormatMsg, + Params: map[string]interface{}{"pattern": packg.ReleaseVersionPattern}, + Debug: err.Error(), + } + } + } else { + packg.ReleaseVersionPattern = p.systemInfoService.GetReleaseVersionPattern() + } + err = p.publishedRepo.CreatePackage(entity.MakePackageEntity(&packg)) + if err != nil { + return nil, err + } + + p.atService.TrackEvent(view.ActivityTrackingEvent{ + Type: view.ATETCreatePackage, + Data: nil, + PackageId: packg.Id, + Date: time.Now(), + UserId: packg.CreatedBy, + }) + + parents, err := p.getParents(packg.Id) + if err != nil { + return nil, err + } + packg.Parents = parents + + isFavorite, err := p.favoritesRepo.IsFavoritePackage(ctx.GetUserId(), packg.Id) + if err != nil { + return nil, err + } + packg.IsFavorite = isFavorite + userPermissions, err := p.roleService.GetPermissionsForPackage(ctx, packg.Id) + if err != nil { + return nil, err + } + packg.UserPermissions = userPermissions + return &packg, err +} + +func (p packageServiceImpl) checkServiceNameAvailability(packageId string, serviceName string) error { + serviceOwnerPackageId, err := p.publishedRepo.GetServiceOwner(utils.GetPackageWorkspaceId(packageId), serviceName) + if err != nil { + return err + } + if serviceOwnerPackageId != "" && serviceOwnerPackageId != packageId { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.ServiceNameAlreadyTaken, + Message: exception.ServiceNameAlreadyTakenMsg, + Params: map[string]interface{}{"serviceName": serviceName, "packageId": serviceOwnerPackageId}, + } + } + return nil +} +func (p packageServiceImpl) PackageExists(packageId string) (bool, error) { + ent, err := p.publishedRepo.GetPackage(packageId) + if err != nil { + return false, err + } + if ent == nil { + return false, nil + } else { + return true, nil + } +} + +func (p packageServiceImpl) GetPackage(ctx context.SecurityContext, id string, withParents bool) (*view.SimplePackage, error) { + ent, err := p.publishedRepo.GetPackage(id) + if err != nil { + return nil, err + } + if ent == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"packageId": id}, + } + } + var parentPackages []view.ParentPackageInfo + if withParents { + parents, err := p.publishedRepo.GetParentsForPackage(id) + if err != nil { + return nil, err + } + + for _, grp := range parents { + if grp.Id != id { + userPermissions, err := p.roleService.GetPermissionsForPackage(ctx, grp.Id) + if err != nil { + return nil, err + } + parentPackage := *entity.MakePackageParentView(&grp) + hasReadPermission := utils.SliceContains(userPermissions, string(view.ReadPermission)) + parentPackage.HasReadPermission = &hasReadPermission + parentPackages = append(parentPackages, parentPackage) + } + } + } else { + parentPackages = nil + } + + isFavorite, err := p.favoritesRepo.IsFavoritePackage(ctx.GetUserId(), id) + if err != nil { + return nil, err + } + + userPermissions, err := p.roleService.GetPermissionsForPackage(ctx, id) + if err != nil { + return nil, err + } + packageView := entity.MakeSimplePackageView(ent, parentPackages, isFavorite, userPermissions) + if packageView.DefaultReleaseVersion != "" { + latestRevision, err := p.publishedRepo.GetLatestRevision(ent.Id, packageView.DefaultReleaseVersion) + if err != nil { + return nil, err + } + packageView.DefaultVersion = view.MakeVersionRefKey(packageView.DefaultReleaseVersion, latestRevision) + } else { + packageView.DefaultVersion, err = p.versionService.GetDefaultVersion(packageView.Id) + if err != nil { + return nil, err + } + } + + return packageView, nil +} + +func (p packageServiceImpl) GetPackagesList(ctx context.SecurityContext, searchReq view.PackageListReq) (*view.Packages, error) { + var err error + result := make([]view.PackagesInfo, 0) + var entities []entity.PackageEntity + skipped := 0 + if len(searchReq.Kind) == 0 { + searchReq.Kind = []string{entity.KIND_WORKSPACE} + } + entities, err = p.publishedRepo.GetFilteredPackagesWithOffset(searchReq, ctx.GetUserId()) + if err != nil { + return nil, err + } + if err != nil { + log.Error("Failed to get packages: ", err.Error()) + return nil, err + } + for _, ent := range entities { + var parents []view.ParentPackageInfo = nil + if searchReq.ShowParents { + parents, err = p.getParents(ent.Id) + if err != nil { + return nil, err + } + } + + var isFavorite = true + if !searchReq.OnlyFavorite { + isFavorite, err = p.favoritesRepo.IsFavoritePackage(ctx.GetUserId(), ent.Id) + if err != nil { + return nil, err + } + } + + permissions, err := p.roleService.GetPermissionsForPackage(ctx, ent.Id) + if err != nil { + return nil, err + } + //do not show private packages + //todo move this restriction to db query + if !utils.SliceContains(permissions, string(view.ReadPermission)) { + skipped++ + continue + } + + var lastReleaseVersionDetails *view.VersionDetails + if searchReq.LastReleaseVersionDetails { + defaultReleaseVersion := ent.DefaultReleaseVersion + if defaultReleaseVersion == "" { + defaultReleaseVersion, err = p.versionService.GetDefaultVersion(ent.Id) + if err != nil { + return nil, err + } + } + if defaultReleaseVersion != "" { + lastReleaseVersionDetails, err = p.versionService.GetVersionDetails(ent.Id, defaultReleaseVersion) + if err != nil { + return nil, err + } + } + } + + packagesInfo := entity.MakePackagesInfo(&ent, lastReleaseVersionDetails, parents, isFavorite, permissions) + result = append(result, *packagesInfo) + } + if skipped != 0 { + searchReq.Offset = searchReq.Offset + searchReq.Limit + searchReq.Limit = skipped + extraPackages, err := p.GetPackagesList(ctx, searchReq) + if err != nil { + return nil, err + } + result = append(result, extraPackages.Packages...) + } + + return &view.Packages{Packages: result}, nil +} + +func (p packageServiceImpl) UpdatePackage(ctx context.SecurityContext, packg *view.PatchPackageReq, packageId string) (*view.SimplePackage, error) { + existingEnt, err := p.publishedRepo.GetPackage(packageId) + if err != nil { + return nil, err + } + if existingEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"packageId": packageId}, + } + } + + if existingEnt.DefaultRole == view.NoneRoleId && existingEnt.ParentId == "" { + if !p.roleService.IsSysadm(ctx) { + return nil, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + Debug: exception.PrivateWorkspaceNotModifiableMsg, + } + } + } + + if packg.DefaultRole != nil && existingEnt.DefaultRole != *packg.DefaultRole { + err = p.roleService.ValidateDefaultRole(ctx, existingEnt.Id, *packg.DefaultRole) + if err != nil { + return nil, err + } + } + + if packg.DefaultReleaseVersion != nil && *packg.DefaultReleaseVersion != "" { + versionName, revision, err := SplitVersionRevision(*packg.DefaultReleaseVersion) + if err != nil { + return nil, err + } + version, err := p.publishedRepo.GetVersion(packageId, versionName) + if err != nil { + return nil, err + } + if version == nil { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": versionName, "packageId": packageId}, + } + } + if revision != 0 && revision != version.Revision { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.DefaultReleaseVersionHasNotLatestRevision, + Message: exception.DefaultReleaseVersionHasNotLatestRevisionMsg, + } + } + if version.Status != string(view.Release) { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.DefaultReleaseVersionIsNotReleased, + Message: exception.DefaultReleaseVersionIsNotReleasedMsg, + Params: map[string]interface{}{"version": versionName}, + } + } + *packg.DefaultReleaseVersion = versionName + } + if packg.ServiceName != nil { + if existingEnt.ServiceName != "" && existingEnt.ServiceName != *packg.ServiceName { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.ServiceNameCantBeModified, + Message: exception.ServiceNameCantBeModifiedMsg, + } + } + if *packg.ServiceName != "" { + err := p.checkServiceNameAvailability(existingEnt.Id, *packg.ServiceName) + if err != nil { + return nil, err + } + } + } + if packg.ReleaseVersionPattern != nil && *packg.ReleaseVersionPattern != "" { + _, err := regexp.Compile(*packg.ReleaseVersionPattern) + if err != nil { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidReleaseVersionPatternFormat, + Message: exception.InvalidReleaseVersionPatternFormatMsg, + Params: map[string]interface{}{"pattern": *packg.ReleaseVersionPattern}, + Debug: err.Error(), + } + } + } + if packg.ExcludeFromSearch != nil { + if *packg.ExcludeFromSearch != existingEnt.ExcludeFromSearch { + if existingEnt.ParentId != "" { + parentEnt, err := p.publishedRepo.GetPackage(existingEnt.ParentId) + if err != nil { + return nil, err + } + if parentEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"packageId": existingEnt.ParentId}, + } + } + if parentEnt.ExcludeFromSearch && !*packg.ExcludeFromSearch { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.UnableToChangeExcludeFromSearch, + Message: exception.UnableToChangeExcludeFromSearchMsg, + } + } + } + } + } + if packg.RestGroupingPrefix != nil && existingEnt.RestGroupingPrefix != *packg.RestGroupingPrefix { + err := validatePackageGroupingPrefix(*packg.RestGroupingPrefix) + if err != nil { + return nil, err + } + } + + ent := entity.MakeSimplePackageUpdateEntity(existingEnt, packg) + + res, err := p.publishedRepo.UpdatePackage(ent) + if err != nil { + return nil, err + } + + dataMap := map[string]interface{}{} + + meta := make([]string, 0) + if packg.Name != nil { + meta = append(meta, "name") + } + if packg.Description != nil { + meta = append(meta, "description") + } + if packg.ServiceName != nil { + meta = append(meta, "serviceName") + } + if packg.ImageUrl != nil { + meta = append(meta, "imageUrl") + } + if packg.DefaultRole != nil { + meta = append(meta, "defaultRole") + } + if packg.DefaultReleaseVersion != nil { + meta = append(meta, "defaultReleaseVersion") + } + if packg.ReleaseVersionPattern != nil { + meta = append(meta, "releaseVersionPattern") + } + if packg.RestGroupingPrefix != nil { + meta = append(meta, "restGroupingPrefix") + } + dataMap["packageMeta"] = meta + + p.atService.TrackEvent(view.ActivityTrackingEvent{ + Type: view.ATETPatchPackageMeta, + Data: dataMap, + PackageId: packageId, + Date: time.Now(), + UserId: ctx.GetUserId(), + }) + + parents, err := p.getParents(res.Id) + if err != nil { + return nil, err + } + + isFavorite, err := p.favoritesRepo.IsFavoritePackage(ctx.GetUserId(), res.Id) + if err != nil { + return nil, err + } + userRole, err := p.roleService.GetPermissionsForPackage(ctx, res.Id) + if err != nil { + return nil, err + } + return entity.MakeSimplePackageView(res, parents, isFavorite, userRole), err +} + +func (p packageServiceImpl) DeletePackage(ctx context.SecurityContext, id string) error { + ent, err := p.publishedRepo.GetPackage(id) + if err != nil { + return err + } + if ent == nil { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"packageId": id}, + } + } + if ent.DefaultRole == view.NoneRoleId && ent.ParentId == "" { + if !p.roleService.IsSysadm(ctx) { + return &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + Debug: exception.PrivateWorkspaceNotModifiableMsg, + } + } + } + err = p.publishedRepo.DeletePackage(id, ctx.GetUserId()) + if err != nil { + return err + } + + p.atService.TrackEvent(view.ActivityTrackingEvent{ + Type: view.ATETDeletePackage, + Data: nil, + PackageId: id, + Date: time.Now(), + UserId: ctx.GetUserId(), + }) + + return nil +} + +func (p packageServiceImpl) FavorPackage(ctx context.SecurityContext, id string) error { + userId := ctx.GetUserId() + ent, err := p.publishedRepo.GetPackage(id) + if err != nil { + return err + } + if ent == nil { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"packageId": id}, + } + } + favorite, err := p.favoritesRepo.IsFavoritePackage(userId, id) + if err != nil { + return err + } + if favorite { + return nil + } + err = p.favoritesRepo.AddPackageToFavorites(userId, id) + if err != nil { + return err + } + return nil +} + +func (p packageServiceImpl) DisfavorPackage(ctx context.SecurityContext, id string) error { + userId := ctx.GetUserId() + ent, err := p.publishedRepo.GetPackage(id) + if err != nil { + return err + } + if ent == nil { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"packageId": id}, + } + } + favorite, err := p.favoritesRepo.IsFavoritePackage(userId, id) + if err != nil { + return err + } + if !favorite { + return nil + } + err = p.favoritesRepo.RemovePackageFromFavorites(userId, id) + if err != nil { + return err + } + return nil +} + +func (p packageServiceImpl) GetPackageName(id string) (string, error) { + ent, err := p.publishedRepo.GetPackage(id) + if err != nil { + return "", err + } + if ent != nil { + return ent.Name, nil + } + return "", &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"id": id}, + } + +} + +func (p packageServiceImpl) GetPackageStatus(id string) (*view.Status, error) { + ent, err := p.publishedRepo.GetPackage(id) + if err != nil { + return nil, err + } + if ent != nil { + return &view.Status{Status: "exists"}, nil + } + deletedEnt, err := p.publishedRepo.GetDeletedPackage(id) + if err != nil { + return nil, err + } + if deletedEnt != nil { + return &view.Status{Status: "deleted"}, nil + } + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"id": id}, + } +} + +func (p packageServiceImpl) getParents(packageId string) ([]view.ParentPackageInfo, error) { + parents, err := p.publishedRepo.GetParentsForPackage(packageId) + if err != nil { + return nil, err + } + var result []view.ParentPackageInfo + for _, grp := range parents { + result = append(result, *entity.MakePackageParentView(&grp)) + } + return result, err +} + +func validPackageKind(kind string) bool { + if kind != entity.KIND_GROUP && kind != entity.KIND_PACKAGE && kind != entity.KIND_WORKSPACE && kind != entity.KIND_DASHBOARD { + return false + } + return true +} + +func validatePackageGroupingPrefix(groupingPrefix string) error { + if groupingPrefix == "" { + return nil + } + //todo do we need this validation? + if !strings.HasSuffix(groupingPrefix, `/`) { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.InvalidGroupingPrefix, + Message: exception.InvalidGroupingPrefixMsg, + Params: map[string]interface{}{"error": "groupingPrefix must end with / "}, + } + } + + if strings.Count(groupingPrefix, view.PackageGroupingPrefixWildcard) != 1 { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.InvalidGroupingPrefix, + Message: exception.InvalidGroupingPrefixMsg, + Params: map[string]interface{}{"error": fmt.Sprintf("groupingPrefix must contain exactly one %v", view.PackageGroupingPrefixWildcard)}, + } + } + return nil +} + +func (p packageServiceImpl) GetAvailableVersionPublishStatuses(ctx context.SecurityContext, packageId string) (*view.Statuses, error) { + statusesForPublish, err := p.roleService.GetAvailableVersionPublishStatuses(ctx, packageId) + if err != nil { + return nil, err + } + return &view.Statuses{Statuses: statusesForPublish}, err +} + +func (p packageServiceImpl) RecalculateOperationGroups(ctx context.SecurityContext, packageId string) error { + packageEnt, err := p.publishedRepo.GetPackage(packageId) + if err != nil { + return err + } + if packageEnt == nil { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"packageId": packageId}, + } + } + err = p.publishedRepo.RecalculatePackageOperationGroups(packageEnt.Id, view.MakePackageGroupingPrefixRegex(packageEnt.RestGroupingPrefix), "", ctx.GetUserId()) + if err != nil { + return err + } + return nil +} + +func (p packageServiceImpl) CalculateOperationGroups(packageId string, groupingPrefix string) (*view.CalculatedOperationGroups, error) { + packageEnt, err := p.publishedRepo.GetPackage(packageId) + if err != nil { + return nil, err + } + if packageEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"packageId": packageId}, + } + } + defaultVersion, err := p.versionService.GetDefaultVersion(packageId) + if err != nil { + return nil, err + } + if defaultVersion == "" { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.DefaultVersionNotFound, + Message: exception.DefaultVersionNotFoundMsg, + Params: map[string]interface{}{"packageId": packageId}, + } + } + err = validatePackageGroupingPrefix(groupingPrefix) + if err != nil { + return nil, err + } + groupingPrefix = view.MakePackageGroupingPrefixRegex(groupingPrefix) + groups, err := p.operationGroupService.CalculateOperationGroups(packageId, defaultVersion, groupingPrefix) + if err != nil { + return nil, err + } + return &view.CalculatedOperationGroups{Groups: groups}, nil +} diff --git a/qubership-apihub-service/service/PackageTransitionHandler.go b/qubership-apihub-service/service/PackageTransitionHandler.go new file mode 100644 index 0000000..de3b8ee --- /dev/null +++ b/qubership-apihub-service/service/PackageTransitionHandler.go @@ -0,0 +1,41 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + log "github.com/sirupsen/logrus" +) + +type PackageTransitionHandler interface { + HandleMissingPackageId(id string) (string, error) +} + +func NewPackageTransitionHandler(repo repository.TransitionRepository) PackageTransitionHandler { + return &packageTransitionHandlerImpl{repo: repo} +} + +type packageTransitionHandlerImpl struct { + repo repository.TransitionRepository +} + +func (p packageTransitionHandlerImpl) HandleMissingPackageId(id string) (string, error) { + newId, err := p.repo.GetNewPackageId(id) + if err != nil { + return "", err + } + log.Debugf("Transition handler: new package id %s found for %s", newId, id) + return newId, nil +} diff --git a/qubership-apihub-service/service/PackageVersionEnrichmentService.go b/qubership-apihub-service/service/PackageVersionEnrichmentService.go new file mode 100644 index 0000000..39db733 --- /dev/null +++ b/qubership-apihub-service/service/PackageVersionEnrichmentService.go @@ -0,0 +1,53 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type PackageVersionEnrichmentService interface { + GetPackageVersionRefsMap(packageRefs map[string][]string) (map[string]view.PackageVersionRef, error) +} + +func NewPackageVersionEnrichmentService(publishedRepo repository.PublishedRepository) PackageVersionEnrichmentService { + return packageVersionEnrichmentServiceImpl{publishedRepo: publishedRepo} +} + +type packageVersionEnrichmentServiceImpl struct { + publishedRepo repository.PublishedRepository +} + +func (p packageVersionEnrichmentServiceImpl) GetPackageVersionRefsMap(packageRefs map[string][]string) (map[string]view.PackageVersionRef, error) { + packageVersionRefs := make(map[string]view.PackageVersionRef) + for packageId, versions := range packageRefs { + uniqueVersions := utils.UniqueSet(versions) + for _, version := range uniqueVersions { + richPackageVersion, err := p.publishedRepo.GetRichPackageVersion(packageId, version) + if err != nil { + return nil, err + } + if richPackageVersion != nil { + packageAndVersionData := entity.MakePackageVersionRef(richPackageVersion) + refId := view.MakePackageRefKey(richPackageVersion.PackageId, richPackageVersion.Version, richPackageVersion.Revision) + packageVersionRefs[refId] = packageAndVersionData + } + } + } + return packageVersionRefs, nil +} diff --git a/qubership-apihub-service/service/PortalService.go b/qubership-apihub-service/service/PortalService.go new file mode 100644 index 0000000..d767050 --- /dev/null +++ b/qubership-apihub-service/service/PortalService.go @@ -0,0 +1,584 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "archive/zip" + "bytes" + "encoding/json" + "fmt" + "html" + "io/ioutil" + "net/http" + "sort" + "strings" + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type PortalService interface { + GenerateInteractivePageForPublishedFile(packageId string, versionName string, fileId string) ([]byte, string, error) + GenerateInteractivePageForPublishedVersion(packageId string, versionName string) ([]byte, string, error) + GenerateInteractivePageForTransformedDocuments(packageId, version string, transformedDocuments entity.TransformedContentDataEntity) ([]byte, error) +} + +func NewPortalService(basePath string, publishedService PublishedService, publishedRepository repository.PublishedRepository, prjGrpIntRepo repository.PrjGrpIntRepository) PortalService { + return &portalServiceImpl{basePath: basePath, publishedService: publishedService, publishedRepository: publishedRepository, prjGrpIntRepo: prjGrpIntRepo} +} + +type portalServiceImpl struct { + basePath string + publishedService PublishedService + publishedRepository repository.PublishedRepository + prjGrpIntRepo repository.PrjGrpIntRepository +} + +const singlePageAssetPath = "/static/templates/single_page.html" + +const scriptAssetPath = "/static/templates/scripts/apispec-view.js" +const indexAssetPath = "/static/templates/index.html" +const pageAssetPath = "/static/templates/page.html" +const lsAssetPath = "/static/templates/ls.html" +const logoAssetPath = "/static/templates/resources/corporatelogo.png" +const stylesAssetPath = "/static/templates/resources/styles.css" +const mdLibPath = "/static/templates/scripts/markdown-it.min.js" + +func (p portalServiceImpl) GenerateInteractivePageForPublishedFile(packageId string, versionName string, slug string) ([]byte, string, error) { + packageEnt, err := p.publishedRepository.GetPackage(packageId) + if err != nil { + return nil, "", err + } + + file, data, err := p.publishedService.GetLatestContentDataBySlug(packageId, versionName, slug) + if err != nil { + return nil, "", err + } + if !isProcessable(file) { + return nil, "", &exception.CustomError{ + Status: http.StatusGone, + Code: exception.UnableToGenerateInteractiveDoc, + Message: exception.UnableToGenerateInteractiveDocMsg, + Params: map[string]interface{}{"$file": file.Slug}, + } + } + + if !isProcessable(file) { + return nil, "", fmt.Errorf("file type is not supperted for export") + } + + zipBuf := bytes.Buffer{} + + zw := zip.NewWriter(&zipBuf) + + scriptAssetFile, err := ioutil.ReadFile(p.basePath + scriptAssetPath) + if err != nil { + return nil, "", err + } + + singlePageAssetFile, err := ioutil.ReadFile(p.basePath + singlePageAssetPath) + if err != nil { + return nil, "", err + } + + lsAssetFile, err := ioutil.ReadFile(p.basePath + lsAssetPath) + if err != nil { + return nil, "", err + } + + logoAssetFile, err := ioutil.ReadFile(p.basePath + logoAssetPath) + if err != nil { + return nil, "", err + } + stylesAssetFile, err := ioutil.ReadFile(p.basePath + stylesAssetPath) + if err != nil { + return nil, "", err + } + + result := generateLs(string(lsAssetFile), packageEnt.Name, file.Version) + err = addFileToZip(zw, "ls.html", []byte(result)) + if err != nil { + return nil, "", err + } + + // add static resources + err = addFileToZip(zw, "resources/corporatelogo.png", logoAssetFile) + if err != nil { + return nil, "", err + } + err = addFileToZip(zw, "resources/styles.css", stylesAssetFile) + if err != nil { + return nil, "", err + } + + spec := string(data.Data) + spec = html.EscapeString(spec) + + result = generateSinglePage(string(singlePageAssetFile), packageEnt.Name, file.Version, file.Title, string(scriptAssetFile), spec) + + err = addFileToZip(zw, "index.html", []byte(result)) + if err != nil { + return nil, "", err + } + + err = zw.Close() + if err != nil { + return nil, "", err + } + + return zipBuf.Bytes(), slug + ".zip", nil +} + +func (p portalServiceImpl) GenerateInteractivePageForPublishedVersion(packageId string, versionName string) ([]byte, string, error) { + packageEnt, err := p.publishedRepository.GetPackage(packageId) + if err != nil { + return nil, "", err + } + if packageEnt == nil { + return nil, "", &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"packageId": packageId}, + } + } + if packageEnt.Kind == entity.KIND_GROUP { + return nil, "", &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.GroupDocGenerationUnsupported, + Message: exception.GroupDocGenerationUnsupportedMsg, + } + } + + repoUrl := "" + projectEnt, err := p.prjGrpIntRepo.GetById(packageId) + if err != nil { + return nil, "", err + } + if projectEnt != nil { + repoUrl = projectEnt.RepositoryUrl + } + + version, err := p.publishedRepository.GetVersion(packageId, versionName) + if err != nil { + return nil, "", err + } + if version == nil { + return nil, "", &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedVersionNotFound, + Message: exception.PublishedVersionNotFoundMsg, + Params: map[string]interface{}{"version": versionName}, + } + } + + versionFiles, err := p.publishedRepository.GetRevisionContent(packageId, version.Version, version.Revision) + if err != nil { + return nil, "", err + } + + scriptAssetFile, err := ioutil.ReadFile(p.basePath + scriptAssetPath) + if err != nil { + return nil, "", err + } + scriptAsset := string(scriptAssetFile) + + indexAssetFile, err := ioutil.ReadFile(p.basePath + indexAssetPath) + if err != nil { + return nil, "", err + } + indexAsset := string(indexAssetFile) + + pageAssetFile, err := ioutil.ReadFile(p.basePath + pageAssetPath) + if err != nil { + return nil, "", err + } + pageAsset := string(pageAssetFile) + + lsAssetFile, err := ioutil.ReadFile(p.basePath + lsAssetPath) + if err != nil { + return nil, "", err + } + + logoAssetFile, err := ioutil.ReadFile(p.basePath + logoAssetPath) + if err != nil { + return nil, "", err + } + + stylesAssetFile, err := ioutil.ReadFile(p.basePath + stylesAssetPath) + if err != nil { + return nil, "", err + } + + mdLibFile, err := ioutil.ReadFile(p.basePath + mdLibPath) + if err != nil { + return nil, "", err + } + + zipBuf := bytes.Buffer{} + + zw := zip.NewWriter(&zipBuf) + + sort.Slice(versionFiles, func(i, j int) bool { + return versionFiles[i].Title < versionFiles[j].Title + }) + + var readmeHtml string + var mdLibHtml string + var fileList []view.FileMetadata + generatedHtmls := map[string]bool{} + for _, file := range versionFiles { + cd, err := p.publishedRepository.GetContentData(packageId, file.Checksum) + if err != nil { + return nil, "", err + } + + if !isProcessable(entity.MakePublishedContentView(&file)) { + // include file to result plain list as is + err = addFileToZip(zw, generateFlatName(file.Name, file.Slug), cd.Data) //TODO: need to use title here? + if err != nil { + return nil, "", err + } + if strings.HasSuffix(strings.ToLower(file.FileId), "readme.md") { // include readme as special chapter + readmeHtml = fmt.Sprintf("
\n
\n
\n
\n ", + html.EscapeString(string(cd.Data))) + mdLibHtml = fmt.Sprintf("", string(mdLibFile)) + } + } else { + spec := string(cd.Data) + spec = html.EscapeString(spec) + + result := generatePage(pageAsset, packageEnt.Name, version.Version, file.Title, scriptAsset, spec) + + err = addFileToZip(zw, file.Slug+".html", []byte(result)) //TODO: need to use title here? + if err != nil { + return nil, "", err + } + + generatedHtmls[file.Slug] = true + } + + fileList = append(fileList, view.FileMetadata{ + Type: file.DataType, + Name: file.Title, + Format: file.Format, + Slug: file.Slug, + Labels: file.Metadata.GetLabels(), + }) + } + + result := generateIndex(indexAsset, packageEnt.Name, version.Version, mdLibHtml, readmeHtml, fileList, generatedHtmls) + err = addFileToZip(zw, "index.html", []byte(result)) + + if err != nil { + return nil, "", err + } + + result = generateLs(string(lsAssetFile), packageEnt.Name, version.Version) + err = addFileToZip(zw, "ls.html", []byte(result)) + if err != nil { + return nil, "", err + } + + // add static resources + err = addFileToZip(zw, "resources/corporatelogo.png", logoAssetFile) + + if err != nil { + return nil, "", err + } + err = addFileToZip(zw, "resources/styles.css", stylesAssetFile) + if err != nil { + return nil, "", err + } + + mdBytes, err := generateMetadata(repoUrl, version, fileList) + if err != nil { + return nil, "", err + } + err = addFileToZip(zw, "metadata.json", mdBytes) + if err != nil { + return nil, "", err + } + + err = zw.Close() + if err != nil { + return nil, "", err + } + + attachmentVersionName, err := p.getVersionNameForAttachmentName(packageId, versionName) + if err != nil { + return nil, "", err + } + filename := packageEnt.Name + "_" + attachmentVersionName + ".zip" + + return zipBuf.Bytes(), filename, nil +} + +func (p portalServiceImpl) GenerateInteractivePageForTransformedDocuments(packageId, version string, transformedDocuments entity.TransformedContentDataEntity) ([]byte, error) { + packageEnt, err := p.publishedRepository.GetPackage(packageId) + if err != nil { + return nil, err + } + if packageEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"packageId": packageId}, + } + } + if packageEnt.Kind == entity.KIND_GROUP { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.GroupDocGenerationUnsupported, + Message: exception.GroupDocGenerationUnsupportedMsg, + } + } + zipReader, err := zip.NewReader(bytes.NewReader(transformedDocuments.Data), int64(len(transformedDocuments.Data))) + if err != nil { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackageArchive, + Message: exception.InvalidPackageArchiveMsg, + Params: map[string]interface{}{"error": err.Error()}, + } + } + scriptAssetFile, err := ioutil.ReadFile(p.basePath + scriptAssetPath) + if err != nil { + return nil, err + } + scriptAsset := string(scriptAssetFile) + + indexAssetFile, err := ioutil.ReadFile(p.basePath + indexAssetPath) + if err != nil { + return nil, err + } + indexAsset := string(indexAssetFile) + + pageAssetFile, err := ioutil.ReadFile(p.basePath + pageAssetPath) + if err != nil { + return nil, err + } + pageAsset := string(pageAssetFile) + + lsAssetFile, err := ioutil.ReadFile(p.basePath + lsAssetPath) + if err != nil { + return nil, err + } + + logoAssetFile, err := ioutil.ReadFile(p.basePath + logoAssetPath) + if err != nil { + return nil, err + } + + stylesAssetFile, err := ioutil.ReadFile(p.basePath + stylesAssetPath) + if err != nil { + return nil, err + } + + mdLibFile, err := ioutil.ReadFile(p.basePath + mdLibPath) + if err != nil { + return nil, err + } + + zipBuf := bytes.Buffer{} + + zw := zip.NewWriter(&zipBuf) + + var readmeHtml string + var mdLibHtml string + var fileList []view.FileMetadata + generatedHtmls := map[string]bool{} + for _, file := range zipReader.File { + zipFile, err := readZipFile(file) + if err != nil { + return nil, err + } + documentInfo := getDocumentByName(file.Name, transformedDocuments.DocumentsInfo) + if documentInfo == nil { + return nil, nil // todo return error + } + if !isProcessableFileName(file.Name) { + // include file to result plain list as is + err = addFileToZip(zw, generateFlatName(file.Name, documentInfo.Slug), zipFile) //TODO: need to use title here? + if err != nil { + return nil, err + } + if strings.HasSuffix(strings.ToLower(documentInfo.FileId), "readme.md") { // include readme as special chapter + readmeHtml = fmt.Sprintf("
\n
\n
\n
\n ", + html.EscapeString(string(zipFile))) + mdLibHtml = fmt.Sprintf("", string(mdLibFile)) + } + } else { + spec := string(zipFile) + spec = html.EscapeString(spec) + + result := generatePage(pageAsset, packageEnt.Name, version, documentInfo.Title, scriptAsset, spec) + + err = addFileToZip(zw, documentInfo.Slug+".html", []byte(result)) //TODO: need to use title here? + if err != nil { + return nil, err + } + + generatedHtmls[documentInfo.Slug] = true + } + fileList = append(fileList, view.FileMetadata{ + //Type: documentInfo.DataType, //todo + Name: documentInfo.Title, + Format: documentInfo.Format, + Slug: documentInfo.Slug, + //Labels: documentInfo.Metadata.GetLabels(), //todo + }) + } + + result := generateIndex(indexAsset, packageEnt.Name, version, mdLibHtml, readmeHtml, fileList, generatedHtmls) + err = addFileToZip(zw, "index.html", []byte(result)) + + if err != nil { + return nil, err + } + + result = generateLs(string(lsAssetFile), packageEnt.Name, version) + err = addFileToZip(zw, "ls.html", []byte(result)) + if err != nil { + return nil, err + } + + // add static resources + err = addFileToZip(zw, "resources/corporatelogo.png", logoAssetFile) + + if err != nil { + return nil, err + } + err = addFileToZip(zw, "resources/styles.css", stylesAssetFile) + if err != nil { + return nil, err + } + + err = zw.Close() + if err != nil { + return nil, err + } + + return zipBuf.Bytes(), nil +} + +func getDocumentByName(name string, documents []view.PackageDocument) *view.PackageDocument { + for _, document := range documents { + if document.Filename == name { + return &document + } + } + return nil +} + +func isProcessable(file *view.PublishedContent) bool { + return isProcessableFileName(file.Name) +} +func isProcessableFileName(fileName string) bool { + if !(strings.HasSuffix(fileName, ".json") || strings.HasSuffix(fileName, ".yaml") || strings.HasSuffix(fileName, ".yml")) { + return false + } + // TODO: check content type or not? + return true +} + +func generateFlatName(name string, slug string) string { + parts := strings.Split(name, ".") + if len(parts) > 0 { + ext := parts[len(parts)-1] + return strings.TrimSuffix(slug, "-"+ext) + "." + ext + } else { + return slug + } +} + +func generateSinglePage(template string, projectName string, version string, fileTitle string, script string, spec string) string { + return fmt.Sprintf(template, fileTitle, script, projectName, version, spec, makeProjectTitle(projectName, version)) +} + +func generatePage(template string, projectName string, version string, fileTitle string, script string, spec string) string { + return fmt.Sprintf(template, fileTitle, script, projectName, version, fileTitle, spec, makeProjectTitle(projectName, version)) +} + +func generateIndex(template string, projectName string, version string, mdJs string, readmeHtml string, fileList []view.FileMetadata, generatedHtmls map[string]bool) string { + htmlList := "" + for _, file := range fileList { + exists, _ := generatedHtmls[file.Slug] + if exists { + htmlList += fmt.Sprintf("
  • %s
  • \n", file.Slug+".html", file.Name) + } + } + + return fmt.Sprintf(template, projectName, mdJs, projectName, version, readmeHtml, htmlList, makeProjectTitle(projectName, version)) +} + +func generateLs(template string, projectName string, version string) string { + return fmt.Sprintf(template, projectName, version, makeProjectTitle(projectName, version)) +} + +func generateMetadata(repositoryUrl string, version *entity.PublishedVersionEntity, fileList []view.FileMetadata) ([]byte, error) { + metadataObj := view.VersionDocMetadata{ + GitLink: repositoryUrl, + Branch: version.Metadata.GetBranchName(), + DateOfPublication: version.PublishedAt.Format(time.RFC3339), + CommitId: version.Metadata.GetCommitId(), + Version: version.Version, + Revision: version.Revision, + User: "", // TODO add user to version + Labels: version.Metadata.GetLabels(), + Files: fileList, + } + + mdBytes, err := json.MarshalIndent(metadataObj, "", " ") + if err != nil { + return nil, err + } + return mdBytes, err +} + +func addFileToZip(zw *zip.Writer, name string, content []byte) error { + mdFw, err := zw.Create(name) + if err != nil { + return err + } + _, err = mdFw.Write(content) + if err != nil { + return err + } + return nil +} + +func makeProjectTitle(projectName string, version string) string { + return projectName + " " + version +} + +func (p portalServiceImpl) getVersionNameForAttachmentName(packageId, version string) (string, error) { + latestRevision, err := p.publishedRepository.GetLatestRevision(packageId, version) + if err != nil { + return "", err + } + versionName, versionRevision, err := SplitVersionRevision(version) + if err != nil { + return "", err + } + if latestRevision == versionRevision { + return versionName, nil + } + return version, nil +} diff --git a/qubership-apihub-service/service/PrivateUserPackageService.go b/qubership-apihub-service/service/PrivateUserPackageService.go new file mode 100644 index 0000000..701d4fc --- /dev/null +++ b/qubership-apihub-service/service/PrivateUserPackageService.go @@ -0,0 +1,213 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "fmt" + "net/http" + "strconv" + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + "github.com/gosimple/slug" +) + +type PrivateUserPackageService interface { + GenerateUserPrivatePackageId(userId string) (string, error) + CreatePrivateUserPackage(ctx context.SecurityContext, userId string) (*view.SimplePackage, error) + GetPrivateUserPackage(userId string) (*view.SimplePackage, error) + PrivatePackageIdIsTaken(packageId string) (bool, error) +} + +func NewPrivateUserPackageService( + publishedRepo repository.PublishedRepository, + userRepo repository.UserRepository, + roleRepository repository.RoleRepository, + favoritesRepo repository.FavoritesRepository, +) PrivateUserPackageService { + return &privateUserPackageServiceImpl{ + publishedRepo: publishedRepo, + userRepo: userRepo, + roleRepository: roleRepository, + favoritesRepo: favoritesRepo, + } +} + +type privateUserPackageServiceImpl struct { + publishedRepo repository.PublishedRepository + userRepo repository.UserRepository + roleRepository repository.RoleRepository + favoritesRepo repository.FavoritesRepository +} + +func (p privateUserPackageServiceImpl) GenerateUserPrivatePackageId(userId string) (string, error) { + userIdSlug := slug.Make(userId) + privatePackageId := userIdSlug + privatePackageIdTaken, err := p.userRepo.PrivatePackageIdExists(privatePackageId) + if err != nil { + return "", err + } + i := 1 + for privatePackageIdTaken { + privatePackageId = userIdSlug + "-" + strconv.Itoa(i) + privatePackageIdTaken, err = p.userRepo.PrivatePackageIdExists(privatePackageId) + if err != nil { + return "", err + } + i++ + } + packageEnt, err := p.publishedRepo.GetPackageIncludingDeleted(privatePackageId) + if err != nil { + return "", err + } + for packageEnt != nil { + i++ + privatePackageId = userIdSlug + "-" + strconv.Itoa(i) + packageEnt, err = p.publishedRepo.GetPackageIncludingDeleted(privatePackageId) + if err != nil { + return "", err + } + } + return privatePackageId, nil +} + +func (p privateUserPackageServiceImpl) CreatePrivateUserPackage(ctx context.SecurityContext, userId string) (*view.SimplePackage, error) { + userEnt, err := p.userRepo.GetUserById(userId) + if err != nil { + return nil, err + } + if userEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.UserNotFound, + Message: exception.UserNotFoundMsg, + Params: map[string]interface{}{"userId": userId}, + } + } + packageEnt, err := p.publishedRepo.GetPackageIncludingDeleted(userEnt.PrivatePackageId) + if err != nil { + return nil, err + } + if packageEnt != nil { + if packageEnt.DeletedAt != nil { + // restore workspace package + packageEnt.DeletedAt = nil + packageEnt.DeletedBy = "" + resEnt, err := p.publishedRepo.UpdatePackage(packageEnt) + if err != nil { + return nil, err + } + userPermissions, err := p.roleRepository.GetUserPermissions(packageEnt.Id, userId) + if err != nil { + return nil, err + } + return entity.MakeSimplePackageView(resEnt, nil, false, userPermissions), nil + } else { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.SinglePrivatePackageAllowed, + Message: exception.SinglePrivatePackageAllowedMsg, + } + } + } + newPrivatePackageEnt := &entity.PackageEntity{ + Id: userEnt.PrivatePackageId, + Kind: entity.KIND_WORKSPACE, + Name: fmt.Sprintf(`%v's private workspace`, userEnt.Username), + ParentId: "", + Alias: userEnt.PrivatePackageId, + DefaultRole: view.NoneRoleId, + ExcludeFromSearch: true, + CreatedAt: time.Now(), + CreatedBy: ctx.GetUserId(), + } + userRoleIds := []string{view.AdminRoleId} + userPackageMemberEnt := &entity.PackageMemberRoleEntity{ + PackageId: userEnt.PrivatePackageId, + UserId: userEnt.Id, + Roles: userRoleIds, + CreatedAt: time.Now(), + CreatedBy: ctx.GetUserId(), + } + err = p.publishedRepo.CreatePrivatePackageForUser(newPrivatePackageEnt, userPackageMemberEnt) + if err != nil { + return nil, err + } + + userPermissions, err := p.roleRepository.GetUserPermissions(newPrivatePackageEnt.Id, userId) + if err != nil { + return nil, err + } + + return entity.MakeSimplePackageView(newPrivatePackageEnt, nil, false, userPermissions), nil +} + +func (p privateUserPackageServiceImpl) GetPrivateUserPackage(userId string) (*view.SimplePackage, error) { + userEnt, err := p.userRepo.GetUserById(userId) + if err != nil { + return nil, err + } + if userEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.UserNotFound, + Message: exception.UserNotFoundMsg, + Params: map[string]interface{}{"userId": userId}, + } + } + packageEnt, err := p.publishedRepo.GetPackage(userEnt.PrivatePackageId) + if err != nil { + return nil, err + } + if packageEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PrivateWorkspaceIdDoesntExist, + Message: exception.PrivateWorkspaceIdDoesntExistMsg, + Params: map[string]interface{}{"userId": userId}, + } + } + userPermissions, err := p.roleRepository.GetUserPermissions(packageEnt.Id, userId) + if err != nil { + return nil, err + } + isFavorite, err := p.favoritesRepo.IsFavoritePackage(userId, packageEnt.Id) + if err != nil { + return nil, err + } + return entity.MakeSimplePackageView(packageEnt, nil, isFavorite, userPermissions), nil +} + +func (p privateUserPackageServiceImpl) PrivatePackageIdIsTaken(packageId string) (bool, error) { + privatePackageIdReserved, err := p.userRepo.PrivatePackageIdExists(packageId) + if err != nil { + return false, err + } + if privatePackageIdReserved { + return true, nil + } + packageEnt, err := p.publishedRepo.GetPackageIncludingDeleted(packageId) + if err != nil { + return false, err + } + if packageEnt != nil { + return true, nil + } + return false, nil +} diff --git a/qubership-apihub-service/service/ProjectService.go b/qubership-apihub-service/service/ProjectService.go new file mode 100644 index 0000000..4d1f003 --- /dev/null +++ b/qubership-apihub-service/service/ProjectService.go @@ -0,0 +1,408 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + goctx "context" + "fmt" + "net/http" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type ProjectService interface { + AddProject(ctx context.SecurityContext, project *view.Project, groupAlias string) (*view.Project, error) + GetProject(ctx context.SecurityContext, id string) (*view.Project, error) + GetProjectsForGroup(ctx context.SecurityContext, groupId string) ([]view.Project, error) + GetFilteredProjects(ctx context.SecurityContext, filter string, groupId string, onlyFavorite bool) ([]view.Project, error) + UpdateProject(ctx context.SecurityContext, project *view.Project) (*view.Project, error) + DeleteProject(ctx context.SecurityContext, id string) error + FavorProject(ctx context.SecurityContext, id string) error + DisfavorProject(ctx context.SecurityContext, id string) error +} + +func NewProjectService(gitClientProvider GitClientProvider, + repo repository.PrjGrpIntRepository, + favoritesRepo repository.FavoritesRepository, + publishedRepo repository.PublishedRepository) ProjectService { + return &projectServiceImpl{ + gitClientProvider: gitClientProvider, + pRepo: repo, + favoritesRepo: favoritesRepo, + publishedRepo: publishedRepo, + } +} + +type projectServiceImpl struct { + gitClientProvider GitClientProvider + pRepo repository.PrjGrpIntRepository + favoritesRepo repository.FavoritesRepository + publishedRepo repository.PublishedRepository +} + +func (p projectServiceImpl) AddProject(ctx context.SecurityContext, project *view.Project, groupAlias string) (*view.Project, error) { + goCtx := context.CreateContextWithSecurity(goctx.Background(), ctx) // TODO: should be context from the request + goCtx = context.CreateContextWithStacktrace(goCtx, fmt.Sprintf("AddProject(%+v,%s)", project, groupAlias)) + + project.Id = groupAlias + "." + project.Alias + ent, err := p.pRepo.GetById(project.Id) + if err != nil { + return nil, err + } + + if ent == nil { + ent, err = p.pRepo.GetDeletedEntity(project.Id) + if err != nil { + return nil, err + } + } + if ent != nil { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.ProjectAliasAlreadyExists, + Message: exception.ProjectAliasAlreadyExistsMsg, + Params: map[string]interface{}{"alias": project.Alias}, + } + } + gitClient, err := p.gitClientProvider.GetUserClient(project.Integration.Type, ctx.GetUserId()) + if err != nil { + return nil, fmt.Errorf("failed to get git client: %v", err) + } + repoName, repoUrl, err := gitClient.GetRepoNameAndUrl(goCtx, project.Integration.RepositoryId) + if err != nil { + return nil, err + } + project.Integration.RepositoryName = repoName + project.Integration.RepositoryUrl = repoUrl + resultProjectEntity, err := p.pRepo.Create(entity.MakePrjIntEntity(project)) + if err != nil { + return nil, err + } + + groups, err := p.getParentGroups(resultProjectEntity.GroupId) + if err != nil { + return nil, err + } + + projectView := entity.MakeProjectView(resultProjectEntity, false, groups) + + return projectView, nil +} + +func (p projectServiceImpl) GetProject(ctx context.SecurityContext, id string) (*view.Project, error) { + exists, err := p.pRepo.Exists(id) + if err != nil { + return nil, err + } + if !exists { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ProjectNotFound, + Message: exception.ProjectNotFoundMsg, + Params: map[string]interface{}{"projectId": id}, + } + } + projectEntity, err := p.pRepo.GetById(id) + if err != nil { + return nil, err + } + + versionEntity, err := p.publishedRepo.GetLastVersion(projectEntity.PackageId) + if err != nil { + return nil, err + } + if versionEntity != nil { + projectEntity.LastVersion = versionEntity.Version + } + + groups, err := p.getParentGroups(id) + if err != nil { + return nil, err + } + isFavorite, err := p.favoritesRepo.IsFavoriteProject(ctx.GetUserId(), id) + if err != nil { + return nil, err + } + projectView := entity.MakeProjectView(projectEntity, isFavorite, groups) + return projectView, nil +} + +func (p projectServiceImpl) GetProjectsForGroup(ctx context.SecurityContext, groupId string) ([]view.Project, error) { + result := make([]view.Project, 0) + entities, err := p.pRepo.GetProjectsForGroup(groupId) + if err != nil { + return nil, err + } + for _, ent := range entities { + groups, err := p.getParentGroups(ent.GroupId) + if err != nil { + return nil, err + } + isFavorite, err := p.favoritesRepo.IsFavoriteProject(ctx.GetUserId(), ent.Id) + if err != nil { + return nil, err + } + versionEntity, err := p.publishedRepo.GetLastVersion(ent.PackageId) + if err != nil { + return nil, err + } + if versionEntity != nil { + ent.LastVersion = versionEntity.Version + } + result = append(result, *entity.MakeProjectView(&ent, isFavorite, groups)) + } + return result, nil +} + +func (p projectServiceImpl) GetFilteredProjects(ctx context.SecurityContext, filter string, groupId string, onlyFavorite bool) ([]view.Project, error) { + result := make([]view.Project, 0) + entities, err := p.pRepo.GetFilteredProjects(filter, groupId) + if err != nil { + return nil, err + } + for _, ent := range entities { + groups, err := p.getParentGroups(ent.Id) + if err != nil { + return nil, err + } + isFavorite, err := p.favoritesRepo.IsFavoriteProject(ctx.GetUserId(), ent.Id) + if err != nil { + return nil, err + } + versionEntity, err := p.publishedRepo.GetLastVersion(ent.PackageId) + if err != nil { + return nil, err + } + if versionEntity != nil { + ent.LastVersion = versionEntity.Version + } + + projectView := *entity.MakeProjectView(&ent, isFavorite, groups) + + if !onlyFavorite || (onlyFavorite && isFavorite) { // TODO: need to handle via repository + result = append(result, projectView) + } + } + //todo paging + return result, nil +} + +func (p projectServiceImpl) UpdateProject(ctx context.SecurityContext, project *view.Project) (*view.Project, error) { + goCtx := context.CreateContextWithSecurity(goctx.Background(), ctx) // TODO: should be context from the request + goCtx = context.CreateContextWithStacktrace(goCtx, fmt.Sprintf("UpdateProject(%+v)", project)) + + existingPrj, err := p.pRepo.GetById(project.Id) + if err != nil { + return nil, err + } + if existingPrj == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ProjectNotFound, + Message: exception.ProjectNotFoundMsg, + Params: map[string]interface{}{"projectId": project.Id}, + } + } + + if existingPrj.GroupId != project.GroupId { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.ParentGroupIdCantBeModified, + Message: exception.ParentGroupIdCantBeModifiedMsg, + } + } + + if existingPrj.Alias != project.Alias { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.AliasCantBeModified, + Message: exception.AliasCantBeModifiedMsg, + } + } + + newPrj := entity.MakePrjIntUpdateEntity(project, existingPrj) + + if project.PackageId != existingPrj.PackageId { + if project.PackageId != "" { + projectByPackageId, err := p.pRepo.GetByPackageId(project.PackageId) + if err != nil { + return nil, err + } + if projectByPackageId != nil { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.PackageAlreadyTaken, + Message: exception.PackageAlreadyTakenMsg, + Params: map[string]interface{}{"packageId": project.PackageId, "projectId": projectByPackageId.Id}, + } + } + packageById, err := p.publishedRepo.GetPackage(project.PackageId) + if err != nil { + return nil, err + } + if packageById == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageDoesntExists, + Message: exception.PackageDoesntExistsMsg, + Params: map[string]interface{}{"packageId": project.PackageId}, + } + } + if packageById.Kind != entity.KIND_PACKAGE { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.PackageKindIsNotAllowed, + Message: exception.PackageKindIsNotAllowedMsg, + Params: map[string]interface{}{"packageId": project.PackageId, "kind": packageById.Kind}, + } + } + } + } + + if existingPrj.RepositoryId != newPrj.RepositoryId { + intType, err := view.GitIntegrationTypeFromStr(newPrj.IntegrationType) + if err != nil { + return nil, err + } + + gitClient, err := p.gitClientProvider.GetUserClient(intType, ctx.GetUserId()) + if err != nil { + return nil, fmt.Errorf("failed to get git client: %v", err) + } + repoName, repoUrl, err := gitClient.GetRepoNameAndUrl(goCtx, newPrj.RepositoryId) + if err != nil { + return nil, err + } + + newPrj.RepositoryName = repoName + newPrj.RepositoryUrl = repoUrl + } + + res, err := p.pRepo.Update(newPrj) + if err != nil { + return nil, err + } + + isFavorite, err := p.favoritesRepo.IsFavoriteProject(ctx.GetUserId(), project.Id) + if err != nil { + return nil, err + } + + existingGroups, err := p.getParentGroups(project.Id) + if err != nil { + return nil, err + } + + versionEntity, err := p.publishedRepo.GetLastVersion(res.PackageId) + if err != nil { + return nil, err + } + if versionEntity != nil { + res.LastVersion = versionEntity.Version + } + + projectView := entity.MakeProjectView(res, isFavorite, existingGroups) + + return projectView, nil +} + +func (p projectServiceImpl) DeleteProject(ctx context.SecurityContext, id string) error { + exists, err := p.pRepo.Exists(id) + if err != nil { + return err + } + if !exists { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ProjectNotFound, + Message: exception.ProjectNotFoundMsg, + Params: map[string]interface{}{"projectId": id}, + } + } + return p.pRepo.Delete(id, ctx.GetUserId()) +} + +func (p projectServiceImpl) FavorProject(ctx context.SecurityContext, id string) error { + userId := ctx.GetUserId() + + favorite, err := p.favoritesRepo.IsFavoriteProject(userId, id) + if err != nil { + return err + } + + if favorite { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.AlreadyFavored, + Message: exception.AlreadyFavoredMsg, + Params: map[string]interface{}{"id": id, "user": userId}, + } + } + err = p.favoritesRepo.AddProjectToFavorites(userId, id) + if err != nil { + return err + } + return nil +} + +func (p projectServiceImpl) DisfavorProject(ctx context.SecurityContext, id string) error { + exists, err := p.pRepo.Exists(id) + if err != nil { + return err + } + if !exists { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ProjectNotFound, + Message: exception.ProjectNotFoundMsg, + Params: map[string]interface{}{"projectId": id}, + } + } + userId := ctx.GetUserId() + favorite, err := p.favoritesRepo.IsFavoriteProject(userId, id) + if err != nil { + return err + } + if !favorite { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.NotFavored, + Message: exception.NotFavoredMsg, + Params: map[string]interface{}{"id": id, "user": userId}, + } + } + err = p.favoritesRepo.RemoveProjectFromFavorites(userId, id) + if err != nil { + return err + } + return nil +} + +func (p projectServiceImpl) getParentGroups(id string) ([]view.Group, error) { + groups, err := p.publishedRepo.GetParentPackageGroups(id) + if err != nil { + return nil, err + } + var result []view.Group + for _, grp := range groups { + result = append(result, *entity.MakePackageGroupView(&grp)) + } + return result, err +} diff --git a/qubership-apihub-service/service/PublishedService.go b/qubership-apihub-service/service/PublishedService.go new file mode 100644 index 0000000..1949f68 --- /dev/null +++ b/qubership-apihub-service/service/PublishedService.go @@ -0,0 +1,1630 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "archive/zip" + "bytes" + ctx "context" + "crypto/sha512" + "encoding/hex" + "encoding/json" + "fmt" + "io/ioutil" + "net/http" + "sort" + "strconv" + "strings" + "time" + + "github.com/google/uuid" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/archive" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/metrics" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/service/validation" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + + log "github.com/sirupsen/logrus" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type PublishedService interface { + GetPackageVersions(packageId string) (*view.PublishedVersions, error) + GetVersion(packageId string, versionName string, importFiles bool, dependFiles bool) (*view.PublishedVersion, error) + GetVersionSources(packageId string, versionName string) ([]byte, error) + GetPublishedVersionSourceDataConfig(packageId string, versionName string) (*view.PublishedVersionSourceDataConfig, error) + GetPublishedVersionBuildConfig(packageId string, versionName string) (*view.BuildConfig, error) + GetLatestContentData_to_delete(packageId string, versionName string, contentId string) (*view.PublishedContent, *view.ContentData, error) + GetLatestContentBySlug(packageId string, versionName string, slug string) (*view.PublishedContent, error) + GetLatestContentDataBySlug(packageId string, versionName string, slug string) (*view.PublishedContent, *view.ContentData, error) + PackagePublished(packageId string) (bool, error) + VersionPublished(packageId string, versionName string) (bool, error) + DeleteVersion(ctx context.SecurityContext, packageId string, versionName string) error + + GetSharedFile(shareId string) ([]byte, error) + SharePublishedFile(packageId string, versionName string, slug string) (*view.SharedUrlResult_deprecated, error) + GetFilteredPackages(ctx context.SecurityContext, filter string, groupId string, onlyFavorite bool) ([]view.Package, error) + GetPackagesByServiceName(ctx context.SecurityContext, serviceName string) ([]view.Package, error) + GetPackageById(ctx context.SecurityContext, id string) (*view.Package, error) + SaveBuildResult_deprecated(packageId string, archiveData []byte, publishId string, availableVersionStatuses []string) error + SaveBuildResult(packageId string, archiveData []byte, publishId string, availableVersionStatuses []string) error +} + +func NewPublishedService(branchService BranchService, + versionRepo repository.PublishedRepository, + projectsRepo repository.PrjGrpIntRepository, + buildRepository repository.BuildRepository, + gitClientProvider GitClientProvider, + websocketService WsBranchService, + favoritesRepo repository.FavoritesRepository, + operationRepo repository.OperationRepository, + atService ActivityTrackingService, + monitoringService MonitoringService, + minioStorageService MinioStorageService, + systemInfoService SystemInfoService) PublishedService { + return &publishedServiceImpl{ + branchService: branchService, + publishedRepo: versionRepo, + projectsRepo: projectsRepo, + buildRepository: buildRepository, + gitClientProvider: gitClientProvider, + websocketService: websocketService, + favoritesRepo: favoritesRepo, + operationRepo: operationRepo, + atService: atService, + monitoringService: monitoringService, + minioStorageService: minioStorageService, + systemInfoService: systemInfoService, + publishedValidator: validation.NewPublishedValidator(versionRepo), + } +} + +type publishedServiceImpl struct { + branchService BranchService + publishedRepo repository.PublishedRepository + projectsRepo repository.PrjGrpIntRepository + buildRepository repository.BuildRepository + gitClientProvider GitClientProvider + websocketService WsBranchService + favoritesRepo repository.FavoritesRepository + operationRepo repository.OperationRepository + atService ActivityTrackingService + monitoringService MonitoringService + minioStorageService MinioStorageService + systemInfoService SystemInfoService + publishedValidator validation.PublishedValidator +} + +func (p publishedServiceImpl) GetPackageVersions(packageId string) (*view.PublishedVersions, error) { + versions := make([]view.PublishedVersion, 0) + ents, err := p.publishedRepo.GetPackageVersions(packageId, "") + if err != nil { + return nil, err + } + for _, ent := range ents { + contentEnts, err := p.publishedRepo.GetRevisionContent(packageId, ent.Version, ent.Revision) + if err != nil { + return nil, err + } + refEnts, err := p.publishedRepo.GetRevisionRefs(packageId, ent.Version, ent.Revision) + if err != nil { + return nil, err + } + refViews, err := p.makeRefsView(refEnts) + if err != nil { + return nil, err + } + version := entity.MakePublishedVersionView(&ent, contentEnts, refViews) + versions = append(versions, *version) + } + return &view.PublishedVersions{Versions: versions}, nil +} + +func (p publishedServiceImpl) GetVersion(packageId string, versionName string, importFiles bool, dependFiles bool) (*view.PublishedVersion, error) { + ent, err := p.getLatestRevision(packageId, versionName) + if err != nil { + return nil, err + } + + filesEntsMap := make(map[string]entity.PublishedContentEntity) + packageFiles, err := p.publishedRepo.GetRevisionContent(packageId, versionName, ent.Revision) + if err != nil { + return nil, err + } + + for _, file := range packageFiles { + tmp := file + filesEntsMap[p.createRefFileId(file)] = tmp + } + + refEntsInRefBlock, refEntsForFileBlock, err := p.evaluateRefsTree(packageId, versionName, ent.Revision) + if err != nil { + return nil, err + } + + resultRefsViews, err := p.makeRefsView(refEntsInRefBlock) + if err != nil { + return nil, err + } + + sort.Slice(resultRefsViews, func(aIndex, bIndex int) bool { + a := resultRefsViews[aIndex] + b := resultRefsViews[bIndex] + aKey := fmt.Sprintf("%s@@%s@@%s", a.Kind, a.PackageId, a.Version) + bKey := fmt.Sprintf("%s@@%s@@%s", b.Kind, b.PackageId, b.Version) + return aKey < bKey + }) + + for _, ref := range refEntsForFileBlock { + refEnt, err := p.getLatestRevision(ref.RefPackageId, ref.RefVersion) + if err != nil { + return nil, err + } + + filesFromRefPackage, err := p.publishedRepo.GetRevisionContent(ref.RefPackageId, ref.RefVersion, refEnt.Revision) + if err != nil { + return nil, err + } + + for _, file := range filesFromRefPackage { + tmp := file + tmp.ReferenceId = ref.RefPackageId + filesEntsMap[p.createRefFileId(file)] = tmp + } + } + + resultFilesEnts := make([]entity.PublishedContentEntity, 0) + for _, file := range filesEntsMap { + tmp := file + resultFilesEnts = append(resultFilesEnts, tmp) + } + + sort.Slice(resultFilesEnts, func(aIndex, bIndex int) bool { + a := resultFilesEnts[aIndex] + b := resultFilesEnts[bIndex] + aKey := fmt.Sprintf("%s@@%v@@%s", a.ReferenceId, a.Index, a.Name) + bKey := fmt.Sprintf("%s@@%v@@%s", b.ReferenceId, b.Index, b.Name) + return aKey < bKey + }) + + version := entity.MakePublishedVersionView(ent, resultFilesEnts, resultRefsViews) + return version, nil +} + +func (p publishedServiceImpl) getLatestRevision(packageId string, versionName string) (*entity.PublishedVersionEntity, error) { + ent, err := p.publishedRepo.GetVersion(packageId, versionName) + if err != nil { + return nil, err + } + + if ent == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedVersionNotFound, + Message: exception.PublishedVersionNotFoundMsg, + Params: map[string]interface{}{"version": versionName}, + } + } + return ent, nil +} + +func (p publishedServiceImpl) evaluateRefsTree(packageId string, versionName string, revision int) ([]entity.PublishedReferenceEntity, []entity.PublishedReferenceEntity, error) { + refEnts, err := p.publishedRepo.GetRevisionRefs(packageId, versionName, revision) + if err != nil { + return nil, nil, err + } + allExpandedReferences := make(map[string]entity.PublishedReferenceContainer) + for _, val := range refEnts { + if err = p.expandRefNode(val, allExpandedReferences); err != nil { + return nil, nil, err + } + } + + refsBlock := make([]entity.PublishedReferenceEntity, 0) + filesBlock := make([]entity.PublishedReferenceEntity, 0) + + for _, container := range allExpandedReferences { + for _, ref := range container.References { + tmp := ref + filesBlock = append(filesBlock, tmp) + refsBlock = append(refsBlock, tmp) + } + } + return refsBlock, filesBlock, nil +} + +func (p publishedServiceImpl) expandRefNode(refNode entity.PublishedReferenceEntity, allReferences map[string]entity.PublishedReferenceContainer) error { + if p.addReferenceContainer(refNode, allReferences) { + ent, err := p.getLatestRevision(refNode.RefPackageId, refNode.RefVersion) + if err != nil { + return err + } + subReferences, err := p.publishedRepo.GetRevisionRefs(refNode.RefPackageId, refNode.RefVersion, ent.Revision) + if err != nil || subReferences == nil { + return err + } + + for _, subRef := range subReferences { + if err := p.expandRefNode(subRef, allReferences); err != nil { + return err + } + } + } + return nil +} + +func (p publishedServiceImpl) addReferenceContainer(ref entity.PublishedReferenceEntity, refMap map[string]entity.PublishedReferenceContainer) bool { + key := p.createRefId(ref) + if c, containerExists := refMap[key]; containerExists { + if _, refExists := c.References[ref.RefVersion]; refExists { + return false + } else { + c.References[ref.RefVersion] = ref + return true + } + } else { + container := entity.PublishedReferenceContainer{References: make(map[string]entity.PublishedReferenceEntity)} + container.References[ref.RefVersion] = ref + refMap[key] = container + return true + } +} + +func (p publishedServiceImpl) createRefId(ref entity.PublishedReferenceEntity) string { + return fmt.Sprintf("%s@@%s@@%s@@%s", ref.PackageId, ref.Version, ref.RefPackageId, ref.RefVersion) +} +func (p publishedServiceImpl) createRefFileId(refFile entity.PublishedContentEntity) string { + return fmt.Sprintf("%s@@%s@@%s@@%s", refFile.PackageId, refFile.Version, refFile.FileId, refFile.ReferenceId) +} + +func (p publishedServiceImpl) GetVersionSources(packageId string, versionName string) ([]byte, error) { + version, err := p.publishedRepo.GetVersion(packageId, versionName) + if err != nil { + return nil, err + } + if version == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedVersionNotFound, + Message: exception.PublishedVersionNotFoundMsg, + Params: map[string]interface{}{"version": versionName}, + } + } + var srcArchive []byte + if p.systemInfoService.IsMinioStorageActive() && !p.systemInfoService.IsMinioStoreOnlyBuildResult() { + publishedSrc, err := p.publishedRepo.GetPublishedSources(packageId, version.Version, version.Revision) + if err != nil { + return nil, err + } + if publishedSrc == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedSourcesDataNotFound, + Message: exception.PublishedSourcesDataNotFoundMsg, + Params: map[string]interface{}{"packageId": packageId, "versionName": versionName}, + } + } + if publishedSrc.ArchiveChecksum != "" { + file, err := p.minioStorageService.GetFile(ctx.Background(), view.PUBLISHED_SOURCES_ARCHIVES_TABLE, publishedSrc.ArchiveChecksum) + if err != nil { + return nil, err + } + srcArchive = file + } + } else { + srcData, err := p.publishedRepo.GetVersionSources(packageId, version.Version, version.Revision) + if err != nil { + return nil, err + } + if srcData == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedSourcesDataNotFound, + Message: exception.PublishedSourcesDataNotFoundMsg, + Params: map[string]interface{}{"packageId": packageId, "versionName": versionName}, + } + } + if len(srcData.Data) <= 0 { + return nil, fmt.Errorf("failed to read sources archive for version: %v", version.Version) + } + srcArchive = srcData.Data + } + if srcArchive == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.SourcesNotFound, + Message: exception.SourcesNotFoundMsg, + Params: map[string]interface{}{"packageId": packageId, "versionName": versionName}, + } + } + return srcArchive, nil +} + +func (p publishedServiceImpl) GetPublishedVersionSourceDataConfig(packageId string, versionName string) (*view.PublishedVersionSourceDataConfig, error) { + version, err := p.publishedRepo.GetVersion(packageId, versionName) + if err != nil { + return nil, err + } + if version == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedVersionNotFound, + Message: exception.PublishedVersionNotFoundMsg, + Params: map[string]interface{}{"version": versionName}, + } + } + srcData := new(entity.PublishedSrcDataConfigEntity) + if p.systemInfoService.IsMinioStorageActive() && !p.systemInfoService.IsMinioStoreOnlyBuildResult() { + publishedSrc, err := p.publishedRepo.GetPublishedSources(packageId, version.Version, version.Revision) + if err != nil { + return nil, err + } + if publishedSrc == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedSourcesDataNotFound, + Message: exception.PublishedSourcesDataNotFoundMsg, + Params: map[string]interface{}{"packageId": packageId, "versionName": versionName}, + } + } + srcData = &entity.PublishedSrcDataConfigEntity{ + PackageId: packageId, + ArchiveChecksum: publishedSrc.ArchiveChecksum, + Config: publishedSrc.Config, + } + if publishedSrc.ArchiveChecksum != "" { + src, err := p.minioStorageService.GetFile(ctx.Background(), view.PUBLISHED_SOURCES_ARCHIVES_TABLE, publishedSrc.ArchiveChecksum) + if err != nil { + return nil, err + } + srcData.Data = src + } + } else { + srcData, err = p.publishedRepo.GetPublishedVersionSourceDataConfig(packageId, version.Version, version.Revision) + if err != nil { + return nil, err + } + if srcData == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedSourcesDataNotFound, + Message: exception.PublishedSourcesDataNotFoundMsg, + Params: map[string]interface{}{"packageId": packageId, "versionName": versionName}, + } + } + } + if srcData.Data == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.SourcesNotFound, + Message: exception.SourcesNotFoundMsg, + Params: map[string]interface{}{"packageId": packageId, "versionName": versionName}, + } + } + + var buildConfig view.BuildConfig + err = json.Unmarshal(srcData.Config, &buildConfig) + if err != nil { + return nil, fmt.Errorf("failed to unmarshal build config from sources: %v", err.Error()) + } + if len(buildConfig.Files)+len(buildConfig.Refs) == 0 { + return nil, fmt.Errorf("empty build config") + } + if len(srcData.Data) <= 0 { + return nil, fmt.Errorf("failed to read sources archive for version: %v", version.Version) + } + return &view.PublishedVersionSourceDataConfig{Config: buildConfig, Sources: srcData.Data}, nil +} + +func (p publishedServiceImpl) GetPublishedVersionBuildConfig(packageId string, versionName string) (*view.BuildConfig, error) { + version, err := p.publishedRepo.GetVersion(packageId, versionName) + if err != nil { + return nil, err + } + if version == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedVersionNotFound, + Message: exception.PublishedVersionNotFoundMsg, + Params: map[string]interface{}{"version": versionName}, + } + } + publishedSrc, err := p.publishedRepo.GetPublishedSources(packageId, version.Version, version.Revision) + if err != nil { + return nil, err + } + if publishedSrc == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedSourcesDataNotFound, + Message: exception.PublishedSourcesDataNotFoundMsg, + Params: map[string]interface{}{"packageId": packageId, "versionName": versionName}, + } + } + + var buildConfig view.BuildConfig + err = json.Unmarshal(publishedSrc.Config, &buildConfig) + if err != nil { + return nil, fmt.Errorf("failed to unmarshal build config from sources: %v", err.Error()) + } + return &buildConfig, nil +} + +func (p publishedServiceImpl) makeRefsView(ents []entity.PublishedReferenceEntity) ([]view.PublishedRef, error) { + result := make([]view.PublishedRef, 0) + for _, ent := range ents { + packageEnt, err := p.publishedRepo.GetPackage(ent.RefPackageId) + if err != nil { + return nil, err + } + if packageEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ReferencedPackageNotFound, + Message: exception.ReferencedPackageNotFoundMsg, + Params: map[string]interface{}{"package": ent.RefPackageId}, + } + } + + version, err := p.publishedRepo.GetVersion(ent.PackageId, ent.Version) + if err != nil { + return nil, err + } + if version == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ReferencedPackageVersionNotFound, + Message: exception.ReferencedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"package": ent.PackageId, "version": ent.Version}, + } + } + result = append(result, view.PublishedRef{ + PackageId: ent.RefPackageId, + Version: ent.RefVersion, + Name: packageEnt.Name, + Alias: packageEnt.Alias, + VersionStatus: version.Status, + Kind: packageEnt.Kind, + }) + } + return result, nil +} + +func (p publishedServiceImpl) GetLatestContentData_to_delete(packageId string, versionName string, contentId string) (*view.PublishedContent, *view.ContentData, error) { + ent, err := p.publishedRepo.GetVersion(packageId, versionName) + if err != nil { + return nil, nil, err + } + if ent == nil { + return nil, nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedVersionNotFound, + Message: exception.PublishedVersionNotFoundMsg, + Params: map[string]interface{}{"version": versionName}, + } + } + + content, err := p.publishedRepo.GetLatestContent(packageId, versionName, contentId) + if err != nil { + return nil, nil, err + } + if content == nil { + return nil, nil, &exception.ContentNotFoundError{ContentId: contentId} + } + + pce, err := p.publishedRepo.GetContentData(packageId, content.Checksum) + if err != nil { + return nil, nil, err + } + if pce == nil { + return nil, nil, &exception.ContentNotFoundError{ContentId: contentId} + } + return entity.MakePublishedContentView(content), entity.MakeContentDataViewPub(content, pce), nil +} + +func (p publishedServiceImpl) GetLatestContentBySlug(packageId string, versionName string, slug string) (*view.PublishedContent, error) { + ent, err := p.publishedRepo.GetVersion(packageId, versionName) + if err != nil { + return nil, err + } + if ent == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedVersionNotFound, + Message: exception.PublishedVersionNotFoundMsg, + Params: map[string]interface{}{"version": versionName}, + } + } + + content, err := p.publishedRepo.GetLatestContentBySlug(packageId, versionName, slug) + if err != nil { + return nil, err + } + if content == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ContentSlugNotFound, + Message: exception.ContentSlugNotFoundMsg, + Params: map[string]interface{}{"contentSlug": slug}, + } + } + return entity.MakePublishedContentView(content), nil +} + +func (p publishedServiceImpl) GetLatestContentDataBySlug(packageId string, versionName string, slug string) (*view.PublishedContent, *view.ContentData, error) { + ent, err := p.publishedRepo.GetVersion(packageId, versionName) + if err != nil { + return nil, nil, err + } + if ent == nil { + return nil, nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedVersionNotFound, + Message: exception.PublishedVersionNotFoundMsg, + Params: map[string]interface{}{"version": versionName}, + } + } + + content, err := p.publishedRepo.GetLatestContentBySlug(packageId, versionName, slug) + if err != nil { + return nil, nil, err + } + if content == nil { + return nil, nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ContentSlugNotFound, + Message: exception.ContentSlugNotFoundMsg, + Params: map[string]interface{}{"contentSlug": slug}, + } + } + + pce, err := p.publishedRepo.GetContentData(packageId, content.Checksum) + if err != nil { + return nil, nil, err + } + if pce == nil { + return nil, nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ContentSlugNotFound, + Message: exception.ContentSlugNotFoundMsg, + Params: map[string]interface{}{"contentSlug": slug}, + } + } + return entity.MakePublishedContentView(content), entity.MakeContentDataViewPub(content, pce), nil +} + +func (p publishedServiceImpl) PackagePublished(packageId string) (bool, error) { + ents, err := p.publishedRepo.GetPackageVersions(packageId, "") + if err != nil { + return false, err + } + return len(ents) > 0, nil +} + +func (p publishedServiceImpl) VersionPublished(packageId string, versionName string) (bool, error) { + ent, err := p.publishedRepo.GetVersionIncludingDeleted(packageId, versionName) + if err != nil { + return false, err + } + return ent != nil, nil +} + +func readZipFile(zf *zip.File) ([]byte, error) { + f, err := zf.Open() + if err != nil { + return nil, err + } + defer f.Close() + return ioutil.ReadAll(f) +} + +func (p publishedServiceImpl) DeleteVersion(ctx context.SecurityContext, packageId string, versionName string) error { + return p.publishedRepo.MarkVersionDeleted(packageId, versionName, ctx.GetUserId()) +} + +func (p publishedServiceImpl) SharePublishedFile(packageId string, versionName string, slug string) (*view.SharedUrlResult_deprecated, error) { + version, err := p.publishedRepo.GetVersion(packageId, versionName) + if err != nil { + return nil, err + } + if version == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedVersionNotFound, + Message: exception.PublishedVersionNotFoundMsg, + Params: map[string]interface{}{"version": versionName}, + } + } + + content, err := p.publishedRepo.GetLatestContentBySlug(packageId, versionName, slug) + if err != nil { + return nil, err + } + + if content == nil { + return nil, &exception.ContentNotFoundError{ContentId: slug} + } + + for attempts := 0; attempts < 100; attempts++ { + sharedIdInfoEntity, err := p.publishedRepo.GetFileSharedInfo(packageId, slug, versionName) + if err != nil { + return nil, err + } + if sharedIdInfoEntity != nil { + return entity.MakeSharedUrlInfo(sharedIdInfoEntity), nil + } + + newSharedUrlInfoEntity := &entity.SharedUrlInfoEntity{ + SharedId: generateSharedId(8), + PackageId: packageId, + Version: versionName, + FileId: slug, // TODO: Slug! + } + if err := p.publishedRepo.CreateFileSharedInfo(newSharedUrlInfoEntity); err != nil { + if customError, ok := err.(*exception.CustomError); ok { + if customError.Code == exception.GeneratedSharedIdIsNotUnique { + continue + } else { + return nil, err + } + } + } else { + return entity.MakeSharedUrlInfo(newSharedUrlInfoEntity), nil + } + } + return nil, fmt.Errorf("failed to generate unique shared id") +} + +func (p publishedServiceImpl) GetSharedFile(sharedId string) ([]byte, error) { + sharedIdInfo, err := p.publishedRepo.GetFileSharedInfoById(sharedId) + if err != nil { + return nil, err + } + if sharedIdInfo == nil { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.SharedIdIsIncorrect, + Message: exception.SharedIdIsIncorrectMsg, + Params: map[string]interface{}{"sharedId": sharedId}, + } + } + version, err := p.publishedRepo.GetVersionIncludingDeleted(sharedIdInfo.PackageId, sharedIdInfo.Version) + if err != nil { + return nil, err + } + if version == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedVersionNotFound, + Message: exception.PublishedVersionNotFoundMsg, + Params: map[string]interface{}{"version": sharedIdInfo.Version}, + } + } + if version.DeletedAt != nil && !version.DeletedAt.IsZero() { + return nil, &exception.CustomError{ + Status: http.StatusGone, + Code: exception.SharedContentUnavailable, + Message: exception.SharedContentUnavailableMsg, + Params: map[string]interface{}{"sharedId": sharedId}, + } + } + + content, err := p.publishedRepo.GetLatestContentBySlug(sharedIdInfo.PackageId, sharedIdInfo.Version, sharedIdInfo.FileId) + if err != nil { + return nil, err + } + if content == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.NoContentFoundForSharedId, + Message: exception.NoContentFoundForSharedIdMsg, + Params: map[string]interface{}{"sharedId": sharedId}, + } + } + + pce, err := p.publishedRepo.GetContentData(content.PackageId, content.Checksum) + if err != nil { + return nil, err + } + if pce == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.NoContentFoundForSharedId, + Message: exception.NoContentFoundForSharedIdMsg, + Params: map[string]interface{}{"sharedId": sharedId}, + } + } + return pce.Data, nil +} + +func (p publishedServiceImpl) GetFilteredPackages(ctx context.SecurityContext, filter string, groupId string, onlyFavorite bool) ([]view.Package, error) { + result := make([]view.Package, 0) + entities, err := p.publishedRepo.GetFilteredPackages(filter, groupId) + if err != nil { + return nil, err + } + var groups []view.Group + if groupId != "" { + groups, err = p.getParentGroups(groupId) + if err != nil { + return nil, err + } + } + for _, ent := range entities { + if groupId == "" { + groups, err = p.getParentGroups(ent.ParentId) + if err != nil { + return nil, err + } + } + isFavorite, err := p.favoritesRepo.IsFavoritePackage(ctx.GetUserId(), ent.Id) + if err != nil { + return nil, err + } + versionEntity, err := p.publishedRepo.GetLastVersion(ent.Id) + if err != nil { + return nil, err + } + if versionEntity != nil { + ent.LastVersion = versionEntity.Version + } + + packageView := *entity.MakePackageView(&ent, isFavorite, groups) + + if !onlyFavorite || (onlyFavorite && isFavorite) { // TODO: need to handle via repository + result = append(result, packageView) + } + } + //todo paging + return result, nil +} + +func (p publishedServiceImpl) GetPackagesByServiceName(ctx context.SecurityContext, serviceName string) ([]view.Package, error) { + result := make([]view.Package, 0) + packageEnt, err := p.publishedRepo.GetPackageForServiceName(serviceName) + if err != nil { + return nil, err + } + if packageEnt != nil { + groups, err := p.getParentGroups(packageEnt.ParentId) + if err != nil { + return nil, err + } + isFavorite, err := p.favoritesRepo.IsFavoritePackage(ctx.GetUserId(), packageEnt.Id) + if err != nil { + return nil, err + } + versionEntity, err := p.publishedRepo.GetLastVersion(packageEnt.Id) + if err != nil { + return nil, err + } + if versionEntity != nil { + packageEnt.LastVersion = versionEntity.Version + } + + packageView := *entity.MakePackageView(packageEnt, isFavorite, groups) + + result = append(result, packageView) + } + return result, nil +} + +func (p publishedServiceImpl) GetPackageById(ctx context.SecurityContext, id string) (*view.Package, error) { + packageEnt, err := p.publishedRepo.GetPackage(id) + if err != nil { + return nil, err + } + if packageEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"packageId": id}, + } + } + groups, err := p.getParentGroups(packageEnt.Id) + if err != nil { + return nil, err + } + isFavorite, err := p.favoritesRepo.IsFavoritePackage(ctx.GetUserId(), packageEnt.Id) + if err != nil { + return nil, err + } + versionEntity, err := p.publishedRepo.GetLastVersion(packageEnt.Id) + if err != nil { + return nil, err + } + if versionEntity != nil { + packageEnt.LastVersion = versionEntity.Version + } + + packageView := entity.MakePackageView(packageEnt, isFavorite, groups) + return packageView, nil +} + +func (p publishedServiceImpl) getParentGroups(groupId string) ([]view.Group, error) { + groups, err := p.publishedRepo.GetParentPackageGroups(groupId) + if err != nil { + return nil, err + } + var result []view.Group + for _, grp := range groups { + result = append(result, *entity.MakePackageGroupView(&grp)) + } + return result, err +} + +func validatePublishSources(filesFromSourcesArchive map[string]struct{}, filesFromConfig []view.BCFile) error { + for _, fileFromConfig := range filesFromConfig { + if _, exists := filesFromSourcesArchive[fileFromConfig.FileId]; !exists { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.FileMissingFromSources, + Message: exception.FileMissingFromSourcesMsg, + Params: map[string]interface{}{"fileId": fileFromConfig.FileId}, + } + } + } + return nil +} + +func (p publishedServiceImpl) publishPackage(buildArc *archive.BuildResultArchive, buildSrcEnt *entity.BuildSourceEntity, + buildConfig *view.BuildConfig, existingPackage *entity.PackageEntity) error { + + publishStart := time.Now() + start := time.Now() + err := buildArc.ReadPackageDocuments(false) + if err != nil { + return err + } + err = buildArc.ReadPackageComparisons(false) + if err != nil { + return err + } + err = buildArc.ReadPackageOperations(false) + if err != nil { + return err + } + err = buildArc.ReadBuilderNotifications(false) + if err != nil { + return err + } + utils.PerfLog(time.Since(start).Milliseconds(), 400, "publishPackage: zip files read") + + start = time.Now() + if err = p.publishedValidator.ValidatePackage(buildArc, buildConfig); err != nil { + return err + } + log.Debugf("Publishing package with packageId: %v; version: %v", buildArc.PackageInfo.PackageId, buildArc.PackageInfo.Version) + if err = validation.ValidatePublishBuildResult(buildArc); err != nil { + return err + } + + checksumMap := make(map[string]struct{}, 0) + if len(buildSrcEnt.Source) > 0 { + origReader, err := zip.NewReader(bytes.NewReader(buildSrcEnt.Source), int64(len(buildSrcEnt.Source))) + if err != nil { + return fmt.Errorf("failed to read src zip, err: %w", err) + } + for _, fl := range origReader.File { + checksumMap[fl.Name] = struct{}{} + } + } + err = validatePublishSources(checksumMap, buildConfig.Files) + if err != nil { + return err + } + + utils.PerfLog(time.Since(start).Milliseconds(), 200, "publishPackage: validate publishing package") + + start = time.Now() + buildArc.PackageInfo.Version, buildArc.PackageInfo.Revision, err = SplitVersionRevision(buildArc.PackageInfo.Version) + if err != nil { + return err + } + if buildArc.PackageInfo.Revision == 0 { + buildArc.PackageInfo.Revision = 1 + storedVersion, err := p.publishedRepo.GetVersionIncludingDeleted(buildArc.PackageInfo.PackageId, buildArc.PackageInfo.Version) + if err != nil { + return err + } + if storedVersion != nil { + buildArc.PackageInfo.Revision = storedVersion.Revision + 1 + } + } + + buildArc.PackageInfo.PreviousVersion, buildArc.PackageInfo.PreviousVersionRevision, err = SplitVersionRevision(buildArc.PackageInfo.PreviousVersion) + if err != nil { + return err + } + previousVersionRevision := buildArc.PackageInfo.PreviousVersionRevision + if previousVersionRevision == 0 { + if buildArc.PackageInfo.PreviousVersion != "" { + previousVersionPackageId := buildArc.PackageInfo.PackageId + if buildArc.PackageInfo.PreviousVersionPackageId != "" { + previousVersionPackageId = buildArc.PackageInfo.PreviousVersionPackageId + } + previousVersionEnt, err := p.publishedRepo.GetVersionIncludingDeleted(previousVersionPackageId, buildArc.PackageInfo.PreviousVersion) + if err != nil { + return err + } + if previousVersionEnt == nil { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": buildArc.PackageInfo.PreviousVersion, "packageId": previousVersionPackageId}, + } + } + previousVersionRevision = previousVersionEnt.Revision + } + } + + refEntities, err := p.makePublishedReferencesEntities(buildArc.PackageInfo, buildArc.PackageInfo.Refs) + if err != nil { + return err + } + + buildArcEntitiesReader := archive.NewBuildResultToEntitiesReader(buildArc) + + fileEntities, fileDataEntities, err := buildArcEntitiesReader.ReadDocumentsToEntities() + if err != nil { + return err + } + + operationEntities, operationDataEntities, err := buildArcEntitiesReader.ReadOperationsToEntities() + if err != nil { + return err + } + + operationsComparisonEntities, changedOperationEntities, versionComparisonsFromCache, err := buildArcEntitiesReader.ReadOperationComparisonsToEntities() + if err != nil { + return err + } + + builderNotificationsEntities := buildArcEntitiesReader.ReadBuilderNotificationsToEntities(buildSrcEnt.BuildId) + + var publishedSrcEntity *entity.PublishedSrcEntity + var publishedSrcArchiveEntity *entity.PublishedSrcArchiveEntity + + cfgBytes, err := json.Marshal(buildSrcEnt.Config) + if err != nil { + return err + } + + metadataByFile := map[string]interface{}{} + for _, fileEnt := range fileEntities { + merged := entity.Metadata{} + merged.MergeMetadata(fileEnt.Metadata) + metadataByFile[fileEnt.FileId] = merged + } + mdBytes, err := json.Marshal(metadataByFile) + if err != nil { + return err + } + + archiveCS := sha512.Sum512(buildSrcEnt.Source) + archiveCSStr := hex.EncodeToString(archiveCS[:]) + + // create sources entities + publishedSrcEntity = &entity.PublishedSrcEntity{ + PackageId: buildArc.PackageInfo.PackageId, + Version: buildArc.PackageInfo.Version, + Revision: buildArc.PackageInfo.Revision, + Metadata: mdBytes, + Config: cfgBytes, + ArchiveChecksum: archiveCSStr, + } + if p.systemInfoService.IsMinioStorageActive() && !p.systemInfoService.IsMinioStoreOnlyBuildResult() { + minioUploadStart := time.Now() + err = p.minioStorageService.UploadFile(ctx.Background(), view.PUBLISHED_SOURCES_ARCHIVES_TABLE, archiveCSStr, buildSrcEnt.Source) + if err != nil { + return err + } + utils.PerfLog(time.Since(minioUploadStart).Milliseconds(), 100, "publishPackage: upload sources to minio") + } else { + publishedSrcArchiveEntity = &entity.PublishedSrcArchiveEntity{ + Checksum: archiveCSStr, + Data: buildSrcEnt.Source, + } + } + + versionLabels := make([]string, 0) + versionMetadata := entity.Metadata{} + var packageMetadata entity.Metadata + packageMetadata = buildArc.PackageInfo.Metadata + if len(packageMetadata) > 0 { + versionLabels = packageMetadata.GetStringArray("versionLabels") + branchName := packageMetadata.GetStringValue("branchName") + if branchName != "" { + versionMetadata.SetBranchName(branchName) + } + commitId := packageMetadata.GetStringValue("commitId") + if commitId != "" { + versionMetadata.SetCommitId(commitId) + } + repositoryUrl := packageMetadata.GetStringValue("repositoryUrl") + if repositoryUrl != "" { + versionMetadata.SetRepositoryUrl(repositoryUrl) + } + namespace := packageMetadata.GetStringValue("namespace") + if namespace != "" { + versionMetadata.SetNamespace(namespace) + } + cloudUrl := packageMetadata.GetStringValue("cloudUrl") + if cloudUrl != "" { + versionMetadata.SetCloudUrl(cloudUrl) + } + cloudName := packageMetadata.GetStringValue("cloudName") + if cloudName != "" { + versionMetadata.SetCloudName(cloudName) + } + } + + if buildArc.PackageInfo.BuilderVersion != "" { + versionMetadata.SetBuilderVersion(buildArc.PackageInfo.BuilderVersion) + } + + publishedAt := time.Now() + if buildArc.PackageInfo.MigrationBuild && buildArc.PackageInfo.PublishedAt != nil && + !buildArc.PackageInfo.PublishedAt.IsZero() { + publishedAt = *buildArc.PackageInfo.PublishedAt + } + versionEnt := &entity.PublishedVersionEntity{ + PackageId: buildArc.PackageInfo.PackageId, + Version: buildArc.PackageInfo.Version, + PreviousVersion: buildArc.PackageInfo.PreviousVersion, + PreviousVersionPackageId: buildArc.PackageInfo.PreviousVersionPackageId, + Revision: buildArc.PackageInfo.Revision, + Status: buildArc.PackageInfo.Status, + PublishedAt: publishedAt, + DeletedAt: nil, + Metadata: versionMetadata, + Labels: versionLabels, + CreatedBy: buildArc.PackageInfo.CreatedBy, + } + + newServiceName := "" + if buildConfig.ServiceName != "" && (existingPackage.Kind == entity.KIND_PACKAGE || existingPackage.Kind == entity.KIND_DASHBOARD) { + if existingPackage.ServiceName == "" { + serviceOwner, err := p.publishedRepo.GetServiceOwner(utils.GetPackageWorkspaceId(existingPackage.Id), buildConfig.ServiceName) + if err != nil { + return fmt.Errorf("failed to check service owner: %v", err.Error()) + } + if serviceOwner == "" { + newServiceName = buildConfig.ServiceName + } + } else if buildConfig.ServiceName == existingPackage.ServiceName { + newServiceName = "" + } else { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.ServiceNameCantBeModified, + Message: exception.ServiceNameCantBeModifiedMsg, + } + } + } + + utils.PerfLog(time.Since(start).Milliseconds(), 200, "publishPackage: make all version entities") + + start = time.Now() + versionCreationStart := time.Now() + err = p.publishedRepo.CreateVersionWithData( + buildArc.PackageInfo, + buildSrcEnt.BuildId, + versionEnt, + fileEntities, + fileDataEntities, + refEntities, + publishedSrcEntity, + publishedSrcArchiveEntity, + operationEntities, + operationDataEntities, + changedOperationEntities, + builderNotificationsEntities, + operationsComparisonEntities, + newServiceName, + existingPackage, + versionComparisonsFromCache, + ) + utils.PerfLog(time.Since(start).Milliseconds(), 15000, "publishPackage: CreateVersionWithData") + if err != nil { + return err + } + + log.Debugf("Version creation time: %v", time.Since(versionCreationStart).Milliseconds()) + + start = time.Now() + //todo move this recalculation inside publish method to run in the same transaction (after publish method redesign) + err = p.publishedRepo.RecalculateOperationGroups(versionEnt.PackageId, versionEnt.Version, versionEnt.Revision, view.MakePackageGroupingPrefixRegex(existingPackage.RestGroupingPrefix), "", versionEnt.CreatedBy) + if err != nil { + log.Errorf("failed to calculate operations groups for version: %+v: %v", versionEnt, err.Error()) + } + utils.PerfLog(time.Since(start).Milliseconds(), 50, "publishPackage: operations groups calculation") + + if !buildArc.PackageInfo.MigrationBuild { + if versionEnt.Status == string(view.Release) { + p.monitoringService.IncreaseBusinessMetricCounter(buildArc.PackageInfo.CreatedBy, metrics.ReleaseVersionsPublished, versionEnt.PackageId) + } + err = p.reCalculateChangelogs(buildArc.PackageInfo) + if err != nil { + return err + } + dataMap := map[string]interface{}{} + dataMap["version"] = versionEnt.Version + dataMap["status"] = versionEnt.Status + + var eventType view.ATEventType + if buildArc.PackageInfo.Revision > 1 { + eventType = view.ATETPublishNewRevision + } else { + eventType = view.ATETPublishNewVersion + } + dataMap["revision"] = buildArc.PackageInfo.Revision + + p.atService.TrackEvent(view.ActivityTrackingEvent{ + Type: eventType, + Data: dataMap, + PackageId: versionEnt.PackageId, + Date: time.Now(), + UserId: versionEnt.CreatedBy, + }) + } + + utils.PerfLog(time.Since(publishStart).Milliseconds(), 10000, "publishPackage: total package publishing") + return nil +} + +func (p publishedServiceImpl) SaveBuildResult_deprecated(packageId string, archiveData []byte, publishId string, availableVersionStatuses []string) error { + // Update last active time to make sure that the build won't be restarted. Assuming that publication will take < 30 seconds! + // TODO: another option could be different status like "result_processing" for such builds + err := p.buildRepository.UpdateBuildStatus(publishId, view.StatusRunning, "") + if err != nil { + log.Errorf("Failed refresh last active time before publication for build %s with err: %s", publishId, err) + } + + start := time.Now() + zipReader, err := zip.NewReader(bytes.NewReader(archiveData), int64(len(archiveData))) + if err != nil { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackageArchive, + Message: exception.InvalidPackageArchiveMsg, + Params: map[string]interface{}{"error": err.Error()}, + } + } + + buildArc := archive.NewBuildResultArchive(zipReader) + if err := buildArc.ReadPackageInfo(); err != nil { + return err + } + utils.PerfLog(time.Since(start).Milliseconds(), 50, "SaveBuildResult: archive parsing") + + if buildArc.PackageInfo.PackageId != packageId { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackagedFile, + Message: exception.InvalidPackagedFileMsg, + Params: map[string]interface{}{ + "file": "info", + "error": fmt.Sprintf("packageId:%v provided by %v doesn't match packageId:%v requested in path", buildArc.PackageInfo.PackageId, archive.InfoFilePath, packageId), + }, + } + } + + start = time.Now() + buildSrcEnt, err := p.buildRepository.GetBuildSrc(publishId) + if err != nil { + return fmt.Errorf("failed to get build src with err: %w", err) + } + if buildSrcEnt == nil { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BuildSourcesNotFound, + Message: exception.BuildSourcesNotFoundMsg, + Params: map[string]interface{}{"publishId": publishId}, + } + } + + buildConfig, err := view.BuildConfigFromMap(buildSrcEnt.Config, publishId) + if err != nil { + return err + } + utils.PerfLog(time.Since(start).Milliseconds(), 200, "SaveBuildResult: get build src") + + start = time.Now() + err = p.publishedValidator.ValidateBuildResultAgainstConfig(buildArc, buildConfig) + if err != nil { + return err + } + utils.PerfLog(time.Since(start).Milliseconds(), 100, "SaveBuildResult: ValidateBuildResultAgainstConfig") + + start = time.Now() + existingPackage, err := p.publishedRepo.GetPackage(buildArc.PackageInfo.PackageId) + if err != nil { + return err + } + utils.PerfLog(time.Since(start).Milliseconds(), 100, "SaveBuildResult: get existing package") + if existingPackage == nil { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackagedFile, + Message: exception.InvalidPackagedFileMsg, + Params: map[string]interface{}{"file": "info", "error": fmt.Sprintf("package with packageId = '%v' doesn't exist", buildArc.PackageInfo.PackageId)}, + } + } + buildArc.PackageInfo.Kind = existingPackage.Kind + //todo zip check for unknown files + + switch buildArc.PackageInfo.BuildType { + case view.BuildType: + sufficientPrivileges := utils.SliceContains(availableVersionStatuses, buildArc.PackageInfo.Status) + if !sufficientPrivileges && !buildArc.PackageInfo.MigrationBuild { + return &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + } + } + return p.publishPackage(buildArc, buildSrcEnt, buildConfig, existingPackage) + //support view.ReducedSourceSpecificationsType type because of node-service that is not yet ready for v3 publish + //we need view.ReducedSourceSpecificationsType build on node-service for operation group publication + case view.DocumentGroupType_deprecated, view.ReducedSourceSpecificationsType: + return p.publishTransformedDocuments(buildArc, publishId) + case view.ChangelogType: + return p.publishChanges(buildArc, publishId) + default: + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.UnknownBuildType, + Message: exception.UnknownBuildTypeMsg, + Params: map[string]interface{}{"type": buildArc.PackageInfo.BuildType}, + } + } +} + +func (p publishedServiceImpl) SaveBuildResult(packageId string, archiveData []byte, publishId string, availableVersionStatuses []string) error { + // Update last active time to make sure that the build won't be restarted. Assuming that publication will take < 30 seconds! + // TODO: another option could be different status like "result_processing" for such builds + err := p.buildRepository.UpdateBuildStatus(publishId, view.StatusRunning, "") + if err != nil { + log.Errorf("Failed refresh last active time before publication for build %s with err: %s", publishId, err) + } + + start := time.Now() + zipReader, err := zip.NewReader(bytes.NewReader(archiveData), int64(len(archiveData))) + if err != nil { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackageArchive, + Message: exception.InvalidPackageArchiveMsg, + Params: map[string]interface{}{"error": err.Error()}, + } + } + + buildArc := archive.NewBuildResultArchive(zipReader) + if err := buildArc.ReadPackageInfo(); err != nil { + return err + } + utils.PerfLog(time.Since(start).Milliseconds(), 50, "SaveBuildResult: archive parsing") + + if buildArc.PackageInfo.PackageId != packageId { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackagedFile, + Message: exception.InvalidPackagedFileMsg, + Params: map[string]interface{}{ + "file": "info", + "error": fmt.Sprintf("packageId:%v provided by %v doesn't match packageId:%v requested in path", buildArc.PackageInfo.PackageId, archive.InfoFilePath, packageId), + }, + } + } + + start = time.Now() + buildSrcEnt, err := p.buildRepository.GetBuildSrc(publishId) + if err != nil { + return fmt.Errorf("failed to get build src with err: %w", err) + } + if buildSrcEnt == nil { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.BuildSourcesNotFound, + Message: exception.BuildSourcesNotFoundMsg, + Params: map[string]interface{}{"publishId": publishId}, + } + } + + buildConfig, err := view.BuildConfigFromMap(buildSrcEnt.Config, publishId) + if err != nil { + return err + } + utils.PerfLog(time.Since(start).Milliseconds(), 200, "SaveBuildResult: get build src") + + start = time.Now() + err = p.publishedValidator.ValidateBuildResultAgainstConfig(buildArc, buildConfig) + if err != nil { + return err + } + utils.PerfLog(time.Since(start).Milliseconds(), 100, "SaveBuildResult: ValidateBuildResultAgainstConfig") + + start = time.Now() + existingPackage, err := p.publishedRepo.GetPackage(buildArc.PackageInfo.PackageId) + if err != nil { + return err + } + utils.PerfLog(time.Since(start).Milliseconds(), 100, "SaveBuildResult: get existing package") + if existingPackage == nil { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackagedFile, + Message: exception.InvalidPackagedFileMsg, + Params: map[string]interface{}{"file": "info", "error": fmt.Sprintf("package with packageId = '%v' doesn't exist", buildArc.PackageInfo.PackageId)}, + } + } + buildArc.PackageInfo.Kind = existingPackage.Kind + //todo zip check for unknown files + + switch buildArc.PackageInfo.BuildType { + case view.BuildType: + sufficientPrivileges := utils.SliceContains(availableVersionStatuses, buildArc.PackageInfo.Status) + if !sufficientPrivileges && !buildArc.PackageInfo.MigrationBuild { + return &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + } + } + return p.publishPackage(buildArc, buildSrcEnt, buildConfig, existingPackage) + case view.ChangelogType: + return p.publishChanges(buildArc, publishId) + case view.ReducedSourceSpecificationsType, view.MergedSpecificationType: + return p.publishTransformedDocuments(buildArc, publishId) + default: + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.UnknownBuildType, + Message: exception.UnknownBuildTypeMsg, + Params: map[string]interface{}{"type": buildArc.PackageInfo.BuildType}, + } + } +} + +func (p publishedServiceImpl) makePublishedReferencesEntities(packageInfo view.PackageInfoFile, packageRefs []view.BCRef) ([]*entity.PublishedReferenceEntity, error) { + uniqueRefs := make(map[string]struct{}, 0) + publishedReferences := make([]*entity.PublishedReferenceEntity, 0) + for _, ref := range packageRefs { + refVersion, err := p.publishedRepo.GetVersionIncludingDeleted(ref.RefId, ref.Version) + if err != nil { + return nil, err + } + if refVersion == nil { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.ReferencedPackageVersionNotFound, + Message: exception.ReferencedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"package": ref.RefId, "version": ref.Version}, + } + } + refEntity := &entity.PublishedReferenceEntity{ + PackageId: packageInfo.PackageId, + Version: packageInfo.Version, + Revision: packageInfo.Revision, + RefPackageId: refVersion.PackageId, + RefVersion: refVersion.Version, + RefRevision: refVersion.Revision, + Excluded: ref.Excluded, + } + if ref.ParentRefId != "" { + parentRefVersion, err := p.publishedRepo.GetVersionIncludingDeleted(ref.ParentRefId, ref.ParentVersion) + if err != nil { + return nil, err + } + if parentRefVersion == nil { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.ReferencedPackageVersionNotFound, + Message: exception.ReferencedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"package": ref.ParentRefId, "version": ref.ParentVersion}, + } + } + refEntity.ParentRefPackageId = parentRefVersion.PackageId + refEntity.ParentRefVersion = parentRefVersion.Version + refEntity.ParentRefRevision = parentRefVersion.Revision + } + + refEntityKey := makePublishedReferenceUniqueKey(refEntity) + if _, exists := uniqueRefs[refEntityKey]; exists { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.DuplicateReference, + Message: exception.DuplicateReferenceMsg, + Params: map[string]interface{}{"refId": ref.RefId, "refVersion": ref.Version}, + } + } + uniqueRefs[refEntityKey] = struct{}{} + publishedReferences = append(publishedReferences, refEntity) + } + return publishedReferences, nil +} + +func makePublishedReferenceUniqueKey(entity *entity.PublishedReferenceEntity) string { + return fmt.Sprintf(`%v|@@|%v|@@|%v|@@|%v|@@|%v|@@|%v`, entity.RefPackageId, entity.RefVersion, entity.RefRevision, entity.ParentRefPackageId, entity.ParentRefVersion, entity.ParentRefRevision) +} + +func (p publishedServiceImpl) reCalculateChangelogs(packageInfo view.PackageInfoFile) error { + versions, err := p.publishedRepo.GetVersionsByPreviousVersion(packageInfo.PackageId, packageInfo.Version) + if err != nil { + return err + } + var buildConfig view.BuildConfig + for _, version := range versions { + buildConfig = view.BuildConfig{ + PackageId: version.PackageId, + Version: version.Version, + PreviousVersion: version.PreviousVersion, + PreviousVersionPackageId: version.PreviousVersionPackageId, + BuildType: view.ChangelogType, + CreatedBy: packageInfo.CreatedBy, + PublishedAt: time.Now(), + } + err := p.createChangelogBuild(buildConfig) + if err != nil { + return err + } + } + return nil +} + +func (p publishedServiceImpl) publishChanges(buildArc *archive.BuildResultArchive, publishId string) error { + var err error + if err = buildArc.ReadPackageComparisons(false); err != nil { + return err + } + + if err = validation.ValidatePublishBuildResult(buildArc); err != nil { + return err + } + + operationChangesCreationStart := time.Now() + buildArc.PackageInfo.Version, buildArc.PackageInfo.Revision, err = SplitVersionRevision(buildArc.PackageInfo.Version) + if err != nil { + return err + } + buildArc.PackageInfo.PreviousVersion, buildArc.PackageInfo.PreviousVersionRevision, err = SplitVersionRevision(buildArc.PackageInfo.PreviousVersion) + if err != nil { + return err + } + if err := p.publishedValidator.ValidateChanges(buildArc); err != nil { + return err + } + if len(buildArc.PackageComparisons.Comparisons) == 0 { + return nil + } + + buildArcEntitiesReader := archive.NewBuildResultToEntitiesReader(buildArc) + versionComparisonEntities, operationComparisonEntities, versionComparisonsFromCache, err := buildArcEntitiesReader.ReadOperationComparisonsToEntities() + if err != nil { + return err + } + + err = p.publishedRepo.SaveVersionChanges(buildArc.PackageInfo, publishId, operationComparisonEntities, versionComparisonEntities, versionComparisonsFromCache) + if err != nil { + return err + } + log.Debugf("Operation changes creation time: %v", time.Since(operationChangesCreationStart).Milliseconds()) + return nil +} + +func (p publishedServiceImpl) publishTransformedDocuments(buildArc *archive.BuildResultArchive, publishId string) error { + var err error + if err = buildArc.ReadPackageDocuments(true); err != nil { + return err + } + if err = validation.ValidatePublishBuildResult(buildArc); err != nil { + return err + } + buildArc.PackageInfo.Version, buildArc.PackageInfo.Revision, err = SplitVersionRevision(buildArc.PackageInfo.Version) + if err != nil { + return err + } + + buildArcEntitiesReader := archive.NewBuildResultToEntitiesReader(buildArc) + transformedDocumentsEntity, err := buildArcEntitiesReader.ReadTransformedDocumentsToEntity() + if err != nil { + return err + } + return p.publishedRepo.SaveTransformedDocument(transformedDocumentsEntity, publishId) +} + +func SplitVersionRevision(version string) (string, int, error) { + if !strings.Contains(version, "@") { + return version, 0, nil + } + versionSplit := strings.Split(version, "@") + if len(versionSplit) != 2 { + return "", -1, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidRevisionFormat, + Message: exception.InvalidRevisionFormatMsg, + Params: map[string]interface{}{"version": version}, + } + } + versionName := versionSplit[0] + versionRevisionStr := versionSplit[1] + versionRevision, err := strconv.Atoi(versionRevisionStr) + if err != nil { + return "", -1, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidRevisionFormat, + Message: exception.InvalidRevisionFormatMsg, + Params: map[string]interface{}{"version": version}, + Debug: err.Error(), + } + } + if versionRevision <= 0 { + return "", -1, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidRevisionFormat, + Message: exception.InvalidRevisionFormatMsg, + Params: map[string]interface{}{"version": version}, + } + } + return versionName, versionRevision, nil +} + +func (p publishedServiceImpl) createChangelogBuild(config view.BuildConfig) error { //todo folder refactoring is needed. Use buildService.CreateChangelogBuild() after it + status := view.StatusNotStarted + + buildId := config.PublishId + if buildId == "" { + buildId = uuid.New().String() + } + + buildEnt := entity.BuildEntity{ + BuildId: buildId, + Status: string(status), + Details: "", + + PackageId: config.PackageId, + Version: config.Version, + + CreatedBy: config.CreatedBy, + RestartCount: 0, + Priority: -1, + } + + confAsMap, err := view.BuildConfigToMap(config) + if err != nil { + return err + } + + sourceEnt := entity.BuildSourceEntity{ + BuildId: buildEnt.BuildId, + Config: *confAsMap, + } + + err = p.buildRepository.StoreBuild(buildEnt, sourceEnt, nil) + if err != nil { + return err + } + return nil +} diff --git a/qubership-apihub-service/service/RefResolverService.go b/qubership-apihub-service/service/RefResolverService.go new file mode 100644 index 0000000..7942bfc --- /dev/null +++ b/qubership-apihub-service/service/RefResolverService.go @@ -0,0 +1,165 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "fmt" + "net/http" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type RefResolverService interface { + CalculateBuildConfigRefs(refs []view.BCRef, resolveRefs bool, resolveConflicts bool) ([]view.BCRef, error) +} + +func NewRefResolverService(publishedRepo repository.PublishedRepository) RefResolverService { + return &refResolverServiceImpl{ + publishedRepo: publishedRepo, + } +} + +type refResolverServiceImpl struct { + publishedRepo repository.PublishedRepository +} + +func (r *refResolverServiceImpl) CalculateBuildConfigRefs(refs []view.BCRef, resolveRefs bool, resolveConflicts bool) ([]view.BCRef, error) { + validRefs := make(map[string]struct{}, 0) + if resolveRefs { + uniqueRefs := make(map[string]struct{}, 0) + for i := range refs { + ref := &refs[i] + versionEnt, err := r.publishedRepo.GetVersion(ref.RefId, ref.Version) + if err != nil { + return nil, err + } + if versionEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.ReferencedPackageVersionNotFound, + Message: exception.ReferencedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"package": ref.RefId, "version": ref.Version}, + } + } + if ref.ParentRefId != "" { + parentVersionEnt, err := r.publishedRepo.GetVersion(ref.ParentRefId, ref.ParentVersion) + if err != nil { + return nil, err + } + if parentVersionEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.ReferencedPackageVersionNotFound, + Message: exception.ReferencedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"package": ref.ParentRefId, "version": ref.ParentVersion}, + } + } + //add revision to version name + ref.ParentVersion = view.MakeVersionRefKey(parentVersionEnt.Version, parentVersionEnt.Revision) + } + //add revision to version name + ref.Version = view.MakeVersionRefKey(versionEnt.Version, versionEnt.Revision) + validRefs[makeConfigRefUniqueKey(*ref)] = struct{}{} + childRefs, err := r.publishedRepo.GetVersionRefsV3(versionEnt.PackageId, versionEnt.Version, versionEnt.Revision) + if err != nil { + return nil, err + } + for _, childRef := range childRefs { + configRef := view.BCRef{ + RefId: childRef.RefPackageId, + Version: view.MakeVersionRefKey(childRef.RefVersion, childRef.RefRevision), + ParentRefId: childRef.ParentRefPackageId, + ParentVersion: view.MakeVersionRefKey(childRef.ParentRefVersion, childRef.ParentRefRevision), + } + if configRef.ParentRefId == "" { + configRef.ParentRefId = ref.RefId + } + if configRef.ParentVersion == "" { + configRef.ParentVersion = ref.Version + } + uniqueRefKey := makeConfigRefUniqueKey(configRef) + if _, exists := uniqueRefs[uniqueRefKey]; exists { + continue + } + uniqueRefs[uniqueRefKey] = struct{}{} + refs = append(refs, configRef) + } + } + } + + uniqueRefs := make(map[string]struct{}, 0) + uniquePackageRefs := make(map[string]struct{}, 0) + for i := range refs { + ref := &refs[i] + versionEnt, err := r.publishedRepo.GetVersion(ref.RefId, ref.Version) + if err != nil { + return nil, err + } + if versionEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.ReferencedPackageVersionNotFound, + Message: exception.ReferencedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"package": ref.RefId, "version": ref.Version}, + } + } + if ref.ParentRefId != "" { + parentVersionEnt, err := r.publishedRepo.GetVersion(ref.ParentRefId, ref.ParentVersion) + if err != nil { + return nil, err + } + if parentVersionEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.ReferencedPackageVersionNotFound, + Message: exception.ReferencedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"package": ref.ParentRefId, "version": ref.ParentVersion}, + } + } + ref.ParentVersion = view.MakeVersionRefKey(parentVersionEnt.Version, parentVersionEnt.Revision) + } + ref.Version = view.MakeVersionRefKey(versionEnt.Version, versionEnt.Revision) + uniqueRefKey := makeConfigRefUniqueKey(*ref) + if _, exists := uniqueRefs[uniqueRefKey]; exists { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.DuplicateReference, + Message: exception.DuplicateReferenceMsg, + Params: map[string]interface{}{"refId": ref.RefId, "refVersion": ref.Version}, + } + } + uniqueRefs[uniqueRefKey] = struct{}{} + if _, packageRefExists := uniquePackageRefs[ref.RefId]; packageRefExists && !ref.Excluded { + if resolveConflicts { + ref.Excluded = true + } else { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.MultiplePackageReference, + Message: exception.MultiplePackageReferenceMsg, + Params: map[string]interface{}{"refId": ref.RefId}, + } + } + } + uniquePackageRefs[ref.RefId] = struct{}{} + } + return refs, nil +} + +func makeConfigRefUniqueKey(ref view.BCRef) string { + return fmt.Sprintf(`%v|@@|%v|@@|%v|@@|%v`, ref.RefId, ref.Version, ref.ParentRefId, ref.ParentVersion) +} diff --git a/qubership-apihub-service/service/RoleService.go b/qubership-apihub-service/service/RoleService.go new file mode 100644 index 0000000..64e7f28 --- /dev/null +++ b/qubership-apihub-service/service/RoleService.go @@ -0,0 +1,1074 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "fmt" + "net/http" + "regexp" + "sort" + "strings" + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + "github.com/gosimple/slug" +) + +type RoleService interface { + AddPackageMembers(ctx context.SecurityContext, packageId string, emails []string, roleIds []string) (*view.PackageMembers, error) + DeletePackageMember(ctx context.SecurityContext, packageId string, userId string) (*view.PackageMember, error) + UpdatePackageMember(ctx context.SecurityContext, packageId string, userId string, roleId string, action string) error + GetPackageMembers(packageId string) (*view.PackageMembers, error) + GetPermissionsForPackage(ctx context.SecurityContext, packageId string) ([]string, error) + GetUserPackagePromoteStatuses(packageIds []string, userId string) (*view.AvailablePackagePromoteStatuses, error) + GetAvailableVersionPublishStatuses(ctx context.SecurityContext, packageId string) ([]string, error) + HasRequiredPermissions(ctx context.SecurityContext, packageId string, requiredPermissions ...view.RolePermission) (bool, error) + HasManageVersionPermission(ctx context.SecurityContext, packageId string, versionStatuses ...string) (bool, error) + ValidateDefaultRole(ctx context.SecurityContext, packageId string, roleId string) error + PackageRoleExists(roleId string) (bool, error) + CreateRole(role string, permissions []string) (*view.PackageRole, error) + DeleteRole(roleId string) error + GetAvailablePackageRoles(ctx context.SecurityContext, packageId string, excludeNone bool) (*view.PackageRoles, error) + GetExistingRolesExcludingNone() (*view.PackageRoles, error) + GetExistingPermissions() (*view.Permissions, error) + SetRolePermissions(roleId string, permissions []string) error + SetRoleOrder(roles []string) error + GetUserSystemRole(userId string) (string, error) + SetUserSystemRole(userId string, roleId string) error + IsSysadm(ctx context.SecurityContext) bool + GetSystemAdministrators() (*view.Admins, error) + AddSystemAdministrator(userId string) (*view.Admins, error) + DeleteSystemAdministrator(userId string) error +} + +func NewRoleService(roleRepository repository.RoleRepository, userService UserService, atService ActivityTrackingService, publishedRepo repository.PublishedRepository) RoleService { + return roleServiceImpl{roleRepository: roleRepository, userService: userService, atService: atService, publishedRepo: publishedRepo} +} + +type roleServiceImpl struct { + roleRepository repository.RoleRepository + userService UserService + atService ActivityTrackingService + publishedRepo repository.PublishedRepository +} + +func (r roleServiceImpl) AddPackageMembers(ctx context.SecurityContext, packageId string, emails []string, roleIds []string) (*view.PackageMembers, error) { + packageEnt, err := r.publishedRepo.GetPackage(packageId) + if err != nil { + return nil, err + } + if packageEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"packageId": packageId}, + } + } + if packageEnt.DefaultRole == view.NoneRoleId && packageEnt.ParentId == "" { + if !r.IsSysadm(ctx) { + return nil, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + Debug: exception.PrivateWorkspaceNotModifiableMsg, + } + } + } + + err = r.validatePackageMemberRoles(ctx, packageId, roleIds) + if err != nil { + return nil, err + } + + usersEmailMap, err := r.userService.GetUsersEmailMap(emails) + if err != nil { + return nil, err + } + nonExistentEmails := make([]string, 0) + userIds := make([]string, 0) + for _, email := range emails { + user, exists := usersEmailMap[email] + if exists { + userIds = append(userIds, user.Id) + } else { + nonExistentEmails = append(nonExistentEmails, email) + } + } + + for _, nonExistentEmail := range nonExistentEmails { + ldapUsers, err := r.userService.SearchUsersInLdap(view.LdapSearchFilterReq{FilterToValue: map[string]string{view.Mail: nonExistentEmail}, Limit: 1}, true) + if err != nil { + return nil, err + } + if ldapUsers == nil { + continue + } + + if len(ldapUsers.Users) == 0 { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.UserByEmailNotFound, + Message: exception.UserByEmailNotFoundMsg, + Params: map[string]interface{}{"email": nonExistentEmail}, + } + } + user := ldapUsers.Users[0] + + err = r.userService.StoreUserAvatar(user.Id, user.Avatar) + if err != nil { + return nil, err + } + externalUser := view.User{ + Id: user.Id, + Name: user.Name, + Email: user.Email, + AvatarUrl: fmt.Sprintf("/api/v2/users/%s/profile/avatar", user.Id), + } + createdUser, err := r.userService.GetOrCreateUserForIntegration(externalUser, view.ExternalLdapIntegration) + if err != nil { + return nil, err + } + userIds = append(userIds, createdUser.Id) + } + + err = r.addRolesForPackageMembers(ctx, packageId, userIds, roleIds) + if err != nil { + return nil, err + } + + usersMap, err := r.userService.GetUsersIdMap(userIds) + if err != nil { + return nil, err + } + + for _, addedUsrId := range userIds { + dataMap := map[string]interface{}{} + dataMap["memberId"] = addedUsrId + dataMap["memberName"] = usersMap[addedUsrId].Name + var roleViews []view.EventRoleView + for _, roleId := range roleIds { + roleEnt, err := r.roleRepository.GetRole(roleId) + if err != nil { + return nil, err + } + roleViews = append(roleViews, view.EventRoleView{ + RoleId: roleId, + Role: roleEnt.Role, + }) + } + dataMap["roles"] = roleViews + r.atService.TrackEvent(view.ActivityTrackingEvent{ + Type: view.ATETGrantRole, + Data: dataMap, + PackageId: packageId, + Date: time.Now(), + UserId: ctx.GetUserId(), + }) + } + + return r.GetPackageMembers(packageId) +} + +func (r roleServiceImpl) UpdatePackageMember(ctx context.SecurityContext, packageId string, userIdToUpdate string, roleId string, action string) error { + packageEnt, err := r.publishedRepo.GetPackage(packageId) + if err != nil { + return err + } + if packageEnt == nil { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"packageId": packageId}, + } + } + if packageEnt.DefaultRole == view.NoneRoleId && packageEnt.ParentId == "" { + if !r.IsSysadm(ctx) { + return &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + Debug: exception.PrivateWorkspaceNotModifiableMsg, + } + } + } + err = r.validatePackageMemberRoles(ctx, packageId, []string{roleId}) + if err != nil { + return err + } + switch action { + case view.ActionAddRole: + err = r.addRoleForPackageMember(ctx, packageId, userIdToUpdate, roleId) + case view.ActionRemoveRole: + err = r.deleteRoleForPackageMember(ctx, packageId, userIdToUpdate, roleId) + default: + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.UnsupportedMemberUpdateAction, + Message: exception.UnsupportedMemberUpdateActionMsg, + Params: map[string]interface{}{"action": action}, + } + } + if err != nil { + return err + } + + user, err := r.userService.GetUserFromDB(userIdToUpdate) + if err != nil { + return err + } + dataMap := map[string]interface{}{} + dataMap["memberId"] = userIdToUpdate + dataMap["memberName"] = user.Name + dataMap["roleId"] = roleId + dataMap["action"] = action + r.atService.TrackEvent(view.ActivityTrackingEvent{ + Type: view.ATETUpdateRole, + Data: dataMap, + PackageId: packageId, + Date: time.Now(), + UserId: ctx.GetUserId(), + }) + + return nil +} + +func (r roleServiceImpl) DeletePackageMember(ctx context.SecurityContext, packageId string, userId string) (*view.PackageMember, error) { + packageEnt, err := r.publishedRepo.GetPackage(packageId) + if err != nil { + return nil, err + } + if packageEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"packageId": packageId}, + } + } + if packageEnt.DefaultRole == view.NoneRoleId && packageEnt.ParentId == "" { + if !r.IsSysadm(ctx) { + return nil, &exception.CustomError{ + Status: http.StatusForbidden, + Code: exception.InsufficientPrivileges, + Message: exception.InsufficientPrivilegesMsg, + Debug: exception.PrivateWorkspaceNotModifiableMsg, + } + } + } + packageMember, err := r.roleRepository.GetDirectPackageMember(packageId, userId) + if err != nil { + return nil, err + } + if packageMember == nil { + user, err := r.userService.GetUserFromDB(userId) + if err != nil { + return nil, err + } + if user == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.UserNotFound, + Message: exception.UserNotFoundMsg, + Params: map[string]interface{}{"userId": userId}, + } + } + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.UserWithNoRoles, + Message: exception.UserWithNoRolesMsg, + Params: map[string]interface{}{"user": user.Name, "packageId": packageId}, + } + } + + err = r.validatePackageMemberRoles(ctx, packageId, packageMember.Roles) + if err != nil { + return nil, err + } + + err = r.roleRepository.DeleteDirectPackageMember(packageId, userId) + if err != nil { + return nil, err + } + + user, err := r.userService.GetUserFromDB(userId) + if err != nil { + return nil, err + } + + dataMap := map[string]interface{}{} + dataMap["memberId"] = userId + dataMap["memberName"] = user.Name + var roleViews []view.EventRoleView + for _, roleId := range packageMember.Roles { + roleEnt, err := r.roleRepository.GetRole(roleId) + if err != nil { + return nil, err + } + roleViews = append(roleViews, view.EventRoleView{ + RoleId: roleId, + Role: roleEnt.Role, + }) + } + dataMap["roles"] = roleViews + + r.atService.TrackEvent(view.ActivityTrackingEvent{ + Type: view.ATETDeleteRole, + Data: dataMap, + PackageId: packageId, + Date: time.Now(), + UserId: ctx.GetUserId(), + }) + + effectiveMemberRoles, err := r.roleRepository.GetPackageRolesHierarchyForUser(packageId, userId) + if err != nil { + return nil, err + } + if len(effectiveMemberRoles) != 0 { + packageMemverView := entity.MakePackageMemberView(packageId, effectiveMemberRoles) + return &packageMemverView, nil + } + + return nil, nil +} + +func (r roleServiceImpl) deleteRoleForPackageMember(ctx context.SecurityContext, packageId string, userId string, roleId string) error { + packageMember, err := r.roleRepository.GetDirectPackageMember(packageId, userId) + if err != nil { + return err + } + if packageMember == nil || !utils.SliceContains(packageMember.Roles, roleId) { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.MemberRoleNotFound, + Message: exception.MemberRoleNotFoundMsg, + Params: map[string]interface{}{"userId": userId, "packageId": packageId, "roleId": roleId}, + } + } + return r.roleRepository.RemoveRoleFromPackageMember(packageId, userId, roleId) +} + +func (r roleServiceImpl) addRoleForPackageMember(ctx context.SecurityContext, packageId string, userId string, roleId string) error { + return r.addRolesForPackageMembers(ctx, packageId, []string{userId}, []string{roleId}) +} + +func (r roleServiceImpl) addRolesForPackageMembers(ctx context.SecurityContext, packageId string, userIds []string, roleIds []string) error { + usersMap, err := r.userService.GetUsersIdMap(userIds) + if err != nil { + return err + } + if len(usersMap) != len(userIds) { + incorrectUserIds := make([]string, 0) + for _, userId := range userIds { + if _, exists := usersMap[userId]; !exists { + incorrectUserIds = append(incorrectUserIds, userId) + } + } + if len(incorrectUserIds) != 0 { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.UsersNotFound, + Message: exception.UsersNotFoundMsg, + Params: map[string]interface{}{"users": strings.Join(incorrectUserIds, ", ")}, + } + } + } + packageMembers, err := r.getEffectivePackageMembersMap(packageId) + if err != nil { + return err + } + packageDirectMembers, err := r.getDirectPackageMembersMap(packageId) + if err != nil { + return err + } + directMemberEntites := make([]entity.PackageMemberRoleEntity, 0) + timeNow := time.Now() + for _, userId := range userIds { + rolesToSet := make([]string, 0) + + if packageMemberRoles, exists := packageMembers[userId]; exists { + for _, roleId := range roleIds { + if !roleExists(packageMemberRoles, roleId) { + rolesToSet = append(rolesToSet, roleId) + } + } + } else { + rolesToSet = roleIds + } + + if len(rolesToSet) == 0 { + continue + } + + directMember, exists := packageDirectMembers[userId] + if !exists { + directMemberEntites = append(directMemberEntites, entity.PackageMemberRoleEntity{ + PackageId: packageId, + UserId: userId, + Roles: rolesToSet, + CreatedAt: timeNow, + CreatedBy: ctx.GetUserId(), + }) + continue + } + directMember.Roles = rolesToSet + directMember.UpdatedAt = &timeNow + directMember.UpdatedBy = ctx.GetUserId() + directMemberEntites = append(directMemberEntites, directMember) + } + err = r.roleRepository.AddPackageMemberRoles(directMemberEntites) + if err != nil { + return err + } + return nil +} + +func roleExists(roles []entity.PackageMemberRoleRichEntity, roleId string) bool { + for _, memberRoleEntity := range roles { + if memberRoleEntity.RoleId == roleId { + return true + } + } + return false +} + +func (r roleServiceImpl) GetPackageMembers(packageId string) (*view.PackageMembers, error) { + packageEnt, err := r.publishedRepo.GetPackage(packageId) + if err != nil { + return nil, err + } + if packageEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"packageId": packageId}, + } + } + packageMembers, err := r.getEffectivePackageMembersMap(packageId) + if err != nil { + return nil, err + } + packageMembersView := make([]view.PackageMember, 0) + for _, packageMember := range packageMembers { + memberView := entity.MakePackageMemberView(packageId, packageMember) + packageMembersView = append(packageMembersView, memberView) + } + sort.Slice(packageMembersView, func(i, j int) bool { + return packageMembersView[i].User.Name < packageMembersView[j].User.Name + }) + return &view.PackageMembers{Members: packageMembersView}, nil +} + +func (r roleServiceImpl) getEffectivePackageMembersMap(packageId string) (map[string][]entity.PackageMemberRoleRichEntity, error) { + packageMembers, err := r.roleRepository.GetPackageHierarchyMembers(packageId) + if err != nil { + return nil, err + } + membersMap := make(map[string][]entity.PackageMemberRoleRichEntity, 0) + for _, memberEntity := range packageMembers { + if memberRoles, exists := membersMap[memberEntity.UserId]; exists { + membersMap[memberEntity.UserId] = append(memberRoles, memberEntity) + } else { + membersMap[memberEntity.UserId] = []entity.PackageMemberRoleRichEntity{memberEntity} + } + } + return membersMap, nil +} + +func (r roleServiceImpl) getDirectPackageMembersMap(packageId string) (map[string]entity.PackageMemberRoleEntity, error) { + packageMembers, err := r.roleRepository.GetDirectPackageMembers(packageId) + if err != nil { + return nil, err + } + packageMembersMap := make(map[string]entity.PackageMemberRoleEntity, 0) + for _, member := range packageMembers { + packageMembersMap[member.UserId] = member + } + return packageMembersMap, nil +} + +// for agent +func (r roleServiceImpl) GetUserPackagePromoteStatuses(packageIds []string, userId string) (*view.AvailablePackagePromoteStatuses, error) { + userSystemRole, err := r.GetUserSystemRole(userId) + if err != nil { + return nil, err + } + sysadmUser := userSystemRole == view.SysadmRole + + result := make(view.AvailablePackagePromoteStatuses, 0) + for _, packageId := range packageIds { + if sysadmUser { + result[packageId] = []string{ + string(view.Draft), + string(view.Release), + string(view.Archived), + } + continue + } + userPermissions, err := r.getUserPermissionsForPackage(packageId, userId) + if err != nil { + return nil, err + } + result[packageId] = getAvailablePublishStatuses(userPermissions) + } + return &result, nil +} + +func getAvailablePublishStatuses(userPermissions []string) []string { + availablePublishStatuses := make([]string, 0) + if utils.SliceContains(userPermissions, string(view.ManageDraftVersionPermission)) { + availablePublishStatuses = append(availablePublishStatuses, string(view.Draft)) + } + if utils.SliceContains(userPermissions, string(view.ManageReleaseVersionPermission)) { + availablePublishStatuses = append(availablePublishStatuses, string(view.Release)) + } + if utils.SliceContains(userPermissions, string(view.ManageArchivedVersionPermission)) { + availablePublishStatuses = append(availablePublishStatuses, string(view.Archived)) + } + return availablePublishStatuses +} + +func (r roleServiceImpl) GetAvailableVersionPublishStatuses(ctx context.SecurityContext, packageId string) ([]string, error) { + userPackagePermissions, err := r.GetPermissionsForPackage(ctx, packageId) + if err != nil { + return nil, err + } + return getAvailablePublishStatuses(userPackagePermissions), nil +} + +func (r roleServiceImpl) GetPermissionsForPackage(ctx context.SecurityContext, packageId string) ([]string, error) { + if r.IsSysadm(ctx) { + allPermissions := make([]string, 0) + for _, permission := range view.GetAllRolePermissions() { + allPermissions = append(allPermissions, permission.Id()) + } + return allPermissions, nil + } + if apikeyPackageId := ctx.GetApikeyPackageId(); apikeyPackageId != "" { + apikeyRoles := ctx.GetApikeyRoles() + if apikeyPackageId != packageId && !strings.HasPrefix(packageId, apikeyPackageId+".") && apikeyPackageId != "*" { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"packageId": packageId}, + } + } + apikeyPermissions, err := r.roleRepository.GetPermissionsForRoles(apikeyRoles) + if err != nil { + return nil, err + } + return apikeyPermissions, nil + } + return r.getUserPermissionsForPackage(packageId, ctx.GetUserId()) +} + +func (r roleServiceImpl) getUserPermissionsForPackage(packageId string, userId string) ([]string, error) { + userPermissions, err := r.roleRepository.GetUserPermissions(packageId, userId) + if err != nil { + return nil, err + } + return userPermissions, nil +} + +func (r roleServiceImpl) HasRequiredPermissions(ctx context.SecurityContext, packageId string, requiredPermissions ...view.RolePermission) (bool, error) { + if r.IsSysadm(ctx) { + return true, nil + } + + if apikeyPackageId := ctx.GetApikeyPackageId(); apikeyPackageId != "" { + apikeyRoles := ctx.GetApikeyRoles() + if apikeyPackageId != packageId && !strings.HasPrefix(packageId, apikeyPackageId+".") && apikeyPackageId != "*" { + return false, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"packageId": packageId}, + } + } + apikeyPermissions, err := r.roleRepository.GetPermissionsForRoles(apikeyRoles) + if err != nil { + return false, err + } + for _, requiredPermission := range requiredPermissions { + if !utils.SliceContains(apikeyPermissions, string(requiredPermission)) { + return false, nil + } + } + return true, nil + } + + userPermissions, err := r.getUserPermissionsForPackage(packageId, ctx.GetUserId()) + if err != nil { + return false, err + } + if !utils.SliceContains(userPermissions, string(view.ReadPermission)) { + return false, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"packageId": packageId}, + } + } + for _, requiredPermission := range requiredPermissions { + if !utils.SliceContains(userPermissions, string(requiredPermission)) { + return false, nil + } + } + return true, nil +} + +func (r roleServiceImpl) HasManageVersionPermission(ctx context.SecurityContext, packageId string, versionStatuses ...string) (bool, error) { + if r.IsSysadm(ctx) { + return true, nil + } + requiredPermissions := make([]view.RolePermission, 0) + for _, status := range versionStatuses { + requiredPermissions = append(requiredPermissions, getRequiredPermissionForVersionStatus(status)) + } + hasRequiredPermission, err := r.HasRequiredPermissions(ctx, packageId, requiredPermissions...) + if err != nil { + return false, nil + } + if hasRequiredPermission { + return true, nil + } + + return false, nil +} + +func getRequiredPermissionForVersionStatus(versionStatus string) view.RolePermission { + switch versionStatus { + case string(view.Draft): + return view.ManageDraftVersionPermission + case string(view.Release): + return view.ManageReleaseVersionPermission + case string(view.Archived): + return view.ManageArchivedVersionPermission + default: + return "" + } +} + +// todo move this method to utils or context package? +func (r roleServiceImpl) IsSysadm(ctx context.SecurityContext) bool { + apikeyRoles := ctx.GetApikeyRoles() + if utils.SliceContains(apikeyRoles, view.SysadmRole) { + return true + } + return ctx.GetUserSystemRole() == view.SysadmRole +} + +func (r roleServiceImpl) ValidateDefaultRole(ctx context.SecurityContext, packageId string, roleId string) error { + return r.validatePackageMemberRoles(ctx, packageId, []string{roleId}) +} + +func (r roleServiceImpl) validatePackageMemberRoles(ctx context.SecurityContext, packageId string, roleIds []string) error { + availableRoles, err := r.GetAvailablePackageRoles(ctx, packageId, false) + if err != nil { + return err + } + availableRolesMap := make(map[string]bool, 0) + for _, role := range availableRoles.Roles { + availableRolesMap[role.RoleId] = true + } + for _, roleId := range roleIds { + if exists := availableRolesMap[roleId]; !exists { + roleEnt, err := r.roleRepository.GetRole(roleId) + if err != nil { + return err + } + if roleEnt == nil { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.RoleDoesntExist, + Message: exception.RoleDoesntExistMsg, + Params: map[string]interface{}{"roleId": roleId}, + } + } + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.NotEnoughPermissionsForRole, + Message: exception.NotEnoughPermissionsForRoleMsg, + Params: map[string]interface{}{"roleId": roleId}, + } + } + } + + return nil +} + +func (r roleServiceImpl) PackageRoleExists(roleId string) (bool, error) { + role, err := r.roleRepository.GetRole(roleId) + if err != nil { + return false, err + } + if role == nil { + return false, nil + } + return true, nil +} + +func (r roleServiceImpl) CreateRole(role string, permissions []string) (*view.PackageRole, error) { + err := validateRolePermissionsEnum(permissions) + if err != nil { + return nil, err + } + err = validateRole(role) + if err != nil { + return nil, err + } + allRoles, err := r.roleRepository.GetAllRoles() + if err != nil { + return nil, err + } + newRoleId := slug.Make(role) + viewerRoleRank := 1 + for _, role := range allRoles { + if role.Id == string(view.ViewerRoleId) { + viewerRoleRank = role.Rank + } + if role.Id == newRoleId { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.RoleAlreadyExists, + Message: exception.RoleAlreadyExistsMsg, + Params: map[string]interface{}{"roleId": newRoleId}, + } + } + } + if !utils.SliceContains(permissions, string(view.ReadPermission)) { + permissions = append(permissions, string(view.ReadPermission)) + } + newRoleEntity := entity.RoleEntity{ + Id: newRoleId, + Role: role, + Permissions: permissions, + Rank: viewerRoleRank + 1, + ReadOnly: false, + } + err = r.roleRepository.CreateRole(newRoleEntity) + if err != nil { + return nil, err + } + roleView := entity.MakeRoleView(newRoleEntity) + return &roleView, nil +} + +func (r roleServiceImpl) DeleteRole(roleId string) error { + role, err := r.roleRepository.GetRole(roleId) + if err != nil { + return err + } + if role == nil { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.RoleDoesntExist, + Message: exception.RoleDoesntExistMsg, + Params: map[string]interface{}{"roleId": roleId}, + } + } + if role.ReadOnly { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.RoleNotEditable, + Message: exception.RoleNotEditableMsg, + Params: map[string]interface{}{"roleId": roleId}, + } + } + return r.roleRepository.DeleteRole(roleId) +} + +func (r roleServiceImpl) GetAvailablePackageRoles(ctx context.SecurityContext, packageId string, excludeNone bool) (*view.PackageRoles, error) { + packageEnt, err := r.publishedRepo.GetPackage(packageId) + if err != nil { + return nil, err + } + if packageEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"packageId": packageId}, + } + } + userId := ctx.GetUserId() + var availableRoles []entity.RoleEntity + allRoles, err := r.roleRepository.GetAllRoles() + if err != nil { + return nil, err + } + if r.IsSysadm(ctx) { + availableRoles = allRoles + } else if ctx.GetApikeyPackageId() == packageId || strings.HasPrefix(packageId, ctx.GetApikeyPackageId()+".") || ctx.GetApikeyPackageId() == "*" { + maxRoleRank := -1 + for _, apikeyRoleId := range ctx.GetApikeyRoles() { + for _, role := range allRoles { + if apikeyRoleId == role.Id { + if maxRoleRank < role.Rank { + maxRoleRank = role.Rank + } + } + } + } + for _, role := range allRoles { + if maxRoleRank >= role.Rank { + availableRoles = append(availableRoles, role) + } + } + } else { + availableRoles, err = r.roleRepository.GetAvailablePackageRoles(packageId, userId) + if err != nil { + return nil, err + } + } + result := make([]view.PackageRole, 0) + for _, roleEnt := range availableRoles { + if excludeNone && roleEnt.Id == view.NoneRoleId { + continue + } + result = append(result, entity.MakeRoleView(roleEnt)) + } + return &view.PackageRoles{Roles: result}, nil +} + +func (r roleServiceImpl) GetExistingRolesExcludingNone() (*view.PackageRoles, error) { + existingRoles := make([]view.PackageRole, 0) + allRoles, err := r.roleRepository.GetAllRoles() + if err != nil { + return nil, err + } + for _, role := range allRoles { + if role.Id == view.NoneRoleId { + continue + } + existingRoles = append(existingRoles, entity.MakeRoleView(role)) + } + return &view.PackageRoles{Roles: existingRoles}, nil +} + +func (r roleServiceImpl) GetExistingPermissions() (*view.Permissions, error) { + existingPermissions := make([]view.Permission, 0) + + for _, permission := range view.GetAllRolePermissions() { + existingPermissions = append(existingPermissions, + view.Permission{ + PermissionId: permission.Id(), + Name: permission.Name(), + }) + } + return &view.Permissions{Permissions: existingPermissions}, nil +} + +func (r roleServiceImpl) SetRolePermissions(roleId string, permissions []string) error { + err := validateRolePermissionsEnum(permissions) + if err != nil { + return err + } + role, err := r.roleRepository.GetRole(roleId) + if err != nil { + return err + } + if role == nil { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.RoleDoesntExist, + Message: exception.RoleDoesntExistMsg, + Params: map[string]interface{}{"roleId": roleId}, + } + } + if role.ReadOnly { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.RoleNotEditable, + Message: exception.RoleNotEditableMsg, + Params: map[string]interface{}{"roleId": roleId}, + } + } + if !utils.SliceContains(permissions, string(view.ReadPermission)) { + permissions = append(permissions, string(view.ReadPermission)) + } + return r.roleRepository.UpdateRolePermissions(roleId, permissions) +} + +func (r roleServiceImpl) SetRoleOrder(roles []string) error { + roleEntities, err := r.roleRepository.GetAllRoles() + if err != nil { + return err + } + if len(roles) != len(roleEntities) { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.AllRolesRequired, + Message: exception.AllRolesRequiredMsg, + } + } + roleMap := make(map[string]entity.RoleEntity, 0) + for _, roleEntity := range roleEntities { + roleMap[roleEntity.Id] = roleEntity + if !utils.SliceContains(roles, roleEntity.Id) { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.AllRolesRequired, + Message: exception.AllRolesRequiredMsg, + } + } + } + if roles[0] != view.AdminRoleId { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.RoleNotEditable, + Message: exception.RoleNotEditableMsg, + Params: map[string]interface{}{"roleId": view.AdminRoleId}, + } + } + rolesToUpdate := make([]entity.RoleEntity, 0) + rank := len(roles) - 1 + for index, roleId := range roles { + role := roleMap[roleId] + if role.ReadOnly { + if roleId != string(view.AdminRoleId) && role.Rank != rank-index { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.RoleNotEditable, + Message: exception.RoleNotEditableMsg, + Params: map[string]interface{}{"roleId": roleId}, + } + } + continue + } + rolesToUpdate = append(rolesToUpdate, entity.RoleEntity{Id: roleId, Rank: rank - index}) + } + err = r.roleRepository.SetRoleRanks(rolesToUpdate) + if err != nil { + return err + } + return nil +} + +func validateRolePermissionsEnum(permissions []string) error { + for _, permission := range permissions { + _, err := view.ParseRolePermission(permission) + if err != nil { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidRolePermission, + Message: exception.InvalidRolePermissionMsg, + Params: map[string]interface{}{"permission": permission}, + } + } + } + return nil +} + +func validateRole(role string) error { + roleNamePattern := `^[a-zA-Z0-9 -]+$` + roleNameRegexp := regexp.MustCompile(roleNamePattern) + if !roleNameRegexp.MatchString(role) { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.RoleNameDoesntMatchPattern, + Message: exception.RoleNameDoesntMatchPatternMsg, + Params: map[string]interface{}{"role": role, "pattern": roleNamePattern}, + } + } + return nil +} + +func (r roleServiceImpl) GetUserSystemRole(userId string) (string, error) { + systemRoleEnt, err := r.roleRepository.GetUserSystemRole(userId) + if err != nil { + return "", err + } + if systemRoleEnt == nil { + return "", nil + } + return systemRoleEnt.Role, nil +} + +func (r roleServiceImpl) SetUserSystemRole(userId string, roleId string) error { + return r.roleRepository.SetUserSystemRole(userId, roleId) +} + +func (r roleServiceImpl) GetSystemAdministrators() (*view.Admins, error) { + userEnts, err := r.roleRepository.GetUsersBySystemRole(view.SysadmRole) + if err != nil { + return nil, err + } + users := make([]view.User, 0) + for _, ent := range userEnts { + users = append(users, *entity.MakeUserV2View(&ent)) + } + return &view.Admins{Admins: users}, nil +} + +func (r roleServiceImpl) AddSystemAdministrator(userId string) (*view.Admins, error) { + userEnt, err := r.userService.GetUserFromDB(userId) + if err != nil { + return nil, err + } + if userEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.UserNotFound, + Message: exception.UserNotFoundMsg, + Params: map[string]interface{}{"userId": userId}, + } + } + err = r.SetUserSystemRole(userId, view.SysadmRole) + if err != nil { + return nil, err + } + return r.GetSystemAdministrators() +} + +func (r roleServiceImpl) DeleteSystemAdministrator(userId string) error { + userEnt, err := r.userService.GetUserFromDB(userId) + if err != nil { + return err + } + if userEnt == nil { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.UserNotFound, + Message: exception.UserNotFoundMsg, + Params: map[string]interface{}{"userId": userId}, + } + } + userSystemRole, err := r.GetUserSystemRole(userId) + if err != nil { + return err + } + if userSystemRole != view.SysadmRole { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.SysadmNotFound, + Message: exception.SysadmNotFoundMsg, + Params: map[string]interface{}{"userId": userId}, + } + } + err = r.roleRepository.DeleteUserSystemRole(userId) + if err != nil { + return err + } + return nil +} diff --git a/qubership-apihub-service/service/SearchService.go b/qubership-apihub-service/service/SearchService.go new file mode 100644 index 0000000..e6464c1 --- /dev/null +++ b/qubership-apihub-service/service/SearchService.go @@ -0,0 +1,325 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + goctx "context" + "fmt" + "net/http" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type SearchService interface { + GetFilteredProjects(ctx context.SecurityContext, filter string, groupId string, onlyFavorite bool, onlyPublished bool, limit int, page int) (*view.Projects, error) + // check + GetFilteredPackages(ctx context.SecurityContext, filter string, groupId string, onlyFavorite bool, onlyPublished bool) (*view.Packages_deprecated, error) + // check + GetPackagesByServiceName(ctx context.SecurityContext, serviceName string) (*view.Packages_deprecated, error) + GetProjectBranches(ctx context.SecurityContext, projectId string, filter string) (*view.BranchListView, error) + GetContentHistory(ctx context.SecurityContext, projectId string, branchName string, fileId string, limit int, page int) (*view.Changes, error) + GetContentFromCommit(ctx context.SecurityContext, projectId string, branchName string, fileId string, commitId string) ([]byte, error) + GetContentFromBlobId(ctx context.SecurityContext, projectId string, blobId string) ([]byte, error) + GetBranchHistory_deprecated(ctx context.SecurityContext, projectId string, branchName string, limit int, page int) (*view.Changes, error) + GetPackage(ctx context.SecurityContext, id string) (*view.Package, error) //todo remove this method +} + +func NewSearchService(projectService ProjectService, + versionService PublishedService, + branchService BranchService, + gitClientProvider GitClientProvider, + draftContentService DraftContentService) SearchService { + return &searchServiceImpl{ + projectService: projectService, + versionService: versionService, + branchService: branchService, + gitClientProvider: gitClientProvider, + draftContentService: draftContentService} +} + +type searchServiceImpl struct { + projectService ProjectService + versionService PublishedService + branchService BranchService + gitClientProvider GitClientProvider + draftContentService DraftContentService +} + +func (s searchServiceImpl) GetFilteredProjects(ctx context.SecurityContext, filter string, groupId string, onlyFavorite bool, onlyPublished bool, limit int, page int) (*view.Projects, error) { + projects, err := s.projectService.GetFilteredProjects(ctx, filter, groupId, onlyFavorite) + if err != nil { + return nil, err + } + filteredProjects := make([]view.Project, 0) + if !onlyPublished { + filteredProjects = projects + } else { + for _, project := range projects { + published, err := s.versionService.PackagePublished(project.PackageId) + if err != nil { + return nil, err + } + if published { + filteredProjects = append(filteredProjects, project) + } + } + } + startIndex, endIndex := utils.PaginateList(len(filteredProjects), limit, page) + pagedChanges := filteredProjects[startIndex:endIndex] + return &view.Projects{Projects: pagedChanges}, nil +} + +func (s searchServiceImpl) GetFilteredPackages(ctx context.SecurityContext, filter string, groupId string, onlyFavorite bool, onlyPublished bool) (*view.Packages_deprecated, error) { + packages, err := s.versionService.GetFilteredPackages(ctx, filter, groupId, onlyFavorite) + if err != nil { + return nil, err + } + filteredPackages := make([]view.Package, 0) + if !onlyPublished { + filteredPackages = packages + } else { + for _, pkg := range packages { + published, err := s.versionService.PackagePublished(pkg.Id) + if err != nil { + return nil, err + } + if published { + filteredPackages = append(filteredPackages, pkg) + } + } + } + return &view.Packages_deprecated{Packages: filteredPackages}, nil +} + +func (s searchServiceImpl) GetPackagesByServiceName(ctx context.SecurityContext, serviceName string) (*view.Packages_deprecated, error) { + packages, err := s.versionService.GetPackagesByServiceName(ctx, serviceName) + if err != nil { + return nil, err + } + return &view.Packages_deprecated{Packages: packages}, nil +} + +func (s searchServiceImpl) GetProjectBranches(ctx context.SecurityContext, projectId string, filter string) (*view.BranchListView, error) { + goCtx := context.CreateContextWithSecurity(goctx.Background(), ctx) // TODO: should be context from the request + goCtx = context.CreateContextWithStacktrace(goCtx, fmt.Sprintf("GetProjectBranches(%s,%s)", projectId, filter)) + + project, err := s.projectService.GetProject(ctx, projectId) + if err != nil { + return nil, err + } + limit := 20 + + branches, err := s.branchService.GetProjectBranchesFromGit(goCtx, projectId, filter, limit) + if err != nil { + return nil, err + } + if len(branches) == 0 { + return &view.BranchListView{Branches: []view.BranchItemView{}}, nil + } + if project.PackageId == "" { + return &view.BranchListView{Branches: branches}, nil + } + + versionsObj, err := s.versionService.GetPackageVersions(project.PackageId) + if err != nil { + return nil, err + } + versions := versionsObj.Versions + var result []view.BranchItemView + + for _, branch := range branches { + for _, version := range versions { + if version.BranchName == branch.Name { + tempPublishedAt := version.PublishedAt + branch.PublishedAt = &tempPublishedAt + branch.Status = version.Status + branch.Version = version.Version + break + } + } + result = append(result, branch) + } + return &view.BranchListView{Branches: result}, nil +} + +func (s searchServiceImpl) GetContentHistory(ctx context.SecurityContext, projectId string, branchName string, fileId string, limit int, page int) (*view.Changes, error) { + goCtx := context.CreateContextWithSecurity(goctx.Background(), ctx) // TODO: should be context from the request + goCtx = context.CreateContextWithStacktrace(goCtx, fmt.Sprintf("GetContentHistory(%s,%s,%s,%d,%d)", projectId, branchName, fileId, limit, page)) + + project, err := s.projectService.GetProject(ctx, projectId) + if err != nil { + return nil, err + } + gitClient, err := s.gitClientProvider.GetUserClient(project.Integration.Type, ctx.GetUserId()) + if err != nil { + return nil, fmt.Errorf("failed to get git client: %v", err) + } + gitCommits, err := gitClient.GetCommitsList(goCtx, project.Integration.RepositoryId, branchName, fileId) + if err != nil { + return nil, err + } + if len(gitCommits) == 0 { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.FileNotFound, + Message: exception.FileNotFoundMsg, + Params: map[string]interface{}{"fileId": fileId, "branch": branchName, "projectGitId": projectId}, + } + } + + changes, err := s.getChangesFromCommits(gitCommits) + if err != nil { + return nil, err + } + startIndex, endIndex := utils.PaginateList(len(changes), limit, page) + pagedChanges := changes[startIndex:endIndex] + return &view.Changes{Changes: pagedChanges}, nil +} + +func (s searchServiceImpl) GetBranchHistory_deprecated(ctx context.SecurityContext, projectId string, branchName string, limit int, page int) (*view.Changes, error) { + goCtx := context.CreateContextWithSecurity(goctx.Background(), ctx) // TODO: should be context from the request + goCtx = context.CreateContextWithStacktrace(goCtx, fmt.Sprintf("GetBranchHistory(%s,%s,%d,%d)", projectId, branchName, limit, page)) + + project, err := s.projectService.GetProject(ctx, projectId) + if err != nil { + return nil, err + } + branch, err := s.branchService.GetBranchDetails(goCtx, projectId, branchName) + if err != nil { + return nil, err + } + branch.RemoveFolders() + gitClient, err := s.gitClientProvider.GetUserClient(project.Integration.Type, ctx.GetUserId()) + if err != nil { + return nil, fmt.Errorf("failed to get git client: %v", err) + } + changes := []view.FileChange{} + branchCommits := map[string]bool{} + for _, content := range branch.Files { + gitCommits, err := gitClient.GetCommitsList(goCtx, project.Integration.RepositoryId, branchName, content.FileId) + if err != nil { + continue + } + commitsChanges, err := s.getChangesFromCommits(gitCommits) + if err != nil { + return nil, err + } + //filter duplicates + for _, change := range commitsChanges { + if _, exists := branchCommits[change.CommitId]; !exists { + branchCommits[change.CommitId] = true + changes = append(changes, change) + } + } + } + startIndex, endIndex := utils.PaginateList(len(changes), limit, page) + pagedChanges := changes[startIndex:endIndex] + return &view.Changes{Changes: pagedChanges}, nil +} + +func (s searchServiceImpl) GetContentFromCommit(ctx context.SecurityContext, projectId string, branchName string, fileId string, commitId string) ([]byte, error) { + goCtx := context.CreateContextWithSecurity(goctx.Background(), ctx) // TODO: should be context from the request + goCtx = context.CreateContextWithStacktrace(goCtx, fmt.Sprintf("GetContentFromCommit(%s,%s,%s,%s)", projectId, branchName, fileId, commitId)) + + project, err := s.projectService.GetProject(ctx, projectId) + if err != nil { + return nil, err + } + gitClient, err := s.gitClientProvider.GetUserClient(project.Integration.Type, ctx.GetUserId()) + if err != nil { + return nil, fmt.Errorf("failed to get git client: %v", err) + } + + var content []byte + var gitRef string + if commitId == "latest" { + // get file from latest commit in branch + gitRef = branchName + } else { + // get file from exact commit + gitRef = commitId + } + content, _, _, err = gitClient.GetFileContent(goCtx, project.Integration.RepositoryId, gitRef, fileId) + if err != nil { + return nil, err + } + if content == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.FileByRefNotFound, + Message: exception.FileByRefNotFoundMsg, + Params: map[string]interface{}{"fileId": fileId, "ref": gitRef, "projectGitId": projectId}, + } + } + return content, nil +} + +func (s searchServiceImpl) GetContentFromBlobId(ctx context.SecurityContext, projectId string, blobId string) ([]byte, error) { + goCtx := context.CreateContextWithSecurity(goctx.Background(), ctx) // TODO: should be context from the request + goCtx = context.CreateContextWithStacktrace(goCtx, fmt.Sprintf("GetContentFromBlobId(%s,%s)", projectId, blobId)) + + project, err := s.projectService.GetProject(ctx, projectId) + if err != nil { + return nil, err + } + gitClient, err := s.gitClientProvider.GetUserClient(project.Integration.Type, ctx.GetUserId()) + if err != nil { + return nil, fmt.Errorf("failed to get git client: %v", err) + } + + var content []byte + content, _, err = gitClient.GetFileContentByBlobId(goCtx, project.Integration.RepositoryId, blobId) + if err != nil { + return nil, err + } + if content == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.FileByBlobIdNotFound, + Message: exception.FileByBlobIdNotFoundMsg, + Params: map[string]interface{}{"blobId": blobId, "projectGitId": project.Integration.RepositoryId}, + } + } + return content, nil +} + +func (s searchServiceImpl) getChangesFromCommits(gitCommits []view.GitCommit) ([]view.FileChange, error) { + changes := []view.FileChange{} + for _, commit := range gitCommits { + changes = append(changes, view.FileChange{ + CommitId: commit.Id, + ModifiedBy: view.User{ + //TODO maybe can be extended by getting AvatarUrl from integration + Name: commit.CommitterName, + Email: commit.CommitterEmail, + }, + ModifiedAt: commit.CommittedDate, + Comment: commit.Message}) + } + return changes, nil +} + +// todo remove this method +func (s searchServiceImpl) GetPackage(ctx context.SecurityContext, id string) (*view.Package, error) { + pkg, err := s.versionService.GetPackageById(ctx, id) + if err != nil { + return nil, err + } + return pkg, nil +} diff --git a/qubership-apihub-service/service/SystemInfoService.go b/qubership-apihub-service/service/SystemInfoService.go new file mode 100644 index 0000000..8d3146b --- /dev/null +++ b/qubership-apihub-service/service/SystemInfoService.go @@ -0,0 +1,770 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "encoding/base64" + "fmt" + "os" + "strconv" + "strings" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + log "github.com/sirupsen/logrus" +) + +const ( + JWT_PRIVATE_KEY = "JWT_PRIVATE_KEY" + ARTIFACT_DESCRIPTOR_VERSION = "ARTIFACT_DESCRIPTOR_VERSION" + BASE_PATH = "BASE_PATH" + PRODUCTION_MODE = "PRODUCTION_MODE" + LOG_LEVEL = "LOG_LEVEL" + GITLAB_URL = "GITLAB_URL" + DIFF_SERVICE_URL = "DIFF_SERVICE_URL" + LISTEN_ADDRESS = "LISTEN_ADDRESS" + ORIGIN_ALLOWED = "ORIGIN_ALLOWED" + APIHUB_POSTGRESQL_HOST = "APIHUB_POSTGRESQL_HOST" + APIHUB_POSTGRESQL_PORT = "APIHUB_POSTGRESQL_PORT" + APIHUB_POSTGRESQL_DB_NAME = "APIHUB_POSTGRESQL_DB_NAME" + APIHUB_POSTGRESQL_USERNAME = "APIHUB_POSTGRESQL_USERNAME" + APIHUB_POSTGRESQL_PASSWORD = "APIHUB_POSTGRESQL_PASSWORD" + PG_SSL_MODE = "PG_SSL_MODE" + CLIENT_ID = "CLIENT_ID" + CLIENT_SECRET = "CLIENT_SECRET" + APIHUB_URL = "APIHUB_URL" + PUBLISH_ARCHIVE_SIZE_LIMIT_MB = "PUBLISH_ARCHIVE_SIZE_LIMIT_MB" + PUBLISH_FILE_SIZE_LIMIT_MB = "PUBLISH_FILE_SIZE_LIMIT_MB" + BRANCH_CONTENT_SIZE_LIMIT_MB = "BRANCH_CONTENT_SIZE_LIMIT_MB" + RELEASE_VERSION_PATTERN = "RELEASE_VERSION_PATTERN" + SAML_CRT = "SAML_CRT" + SAML_KEY = "SAML_KEY" + ADFS_METADATA_URL = "ADFS_METADATA_URL" + LDAP_USER = "LDAP_USER" + LDAP_USER_PASSWORD = "LDAP_USER_PASSWORD" + LDAP_SERVER = "LDAP_SERVER" + LDAP_BASE_DN = "LDAP_BASE_DN" + LDAP_ORGANIZATION_UNIT = "LDAP_ORGANIZATION_UNIT" + LDAP_SEARCH_BASE = "LDAP_SEARCH_BASE" + SYSTEM_NOTIFICATION = "SYSTEM_NOTIFICATION" //TODO: replace with db impl + BUILDS_CLEANUP_SCHEDULE = "BUILDS_CLEANUP_SCHEDULE" + INSECURE_PROXY = "INSECURE_PROXY" + METRICS_GETTER_SCHEDULE = "METRICS_GETTER_SCHEDULE" + MONITORING_ENABLED = "MONITORING_ENABLED" + STORAGE_SERVER_USERNAME = "STORAGE_SERVER_USERNAME" + STORAGE_SERVER_PASSWORD = "STORAGE_SERVER_PASSWORD" + STORAGE_SERVER_CRT = "STORAGE_SERVER_CRT" + STORAGE_SERVER_URL = "STORAGE_SERVER_URL" + STORAGE_SERVER_BUCKET_NAME = "STORAGE_SERVER_BUCKET_NAME" + STORAGE_SERVER_ACTIVE = "STORAGE_SERVER_ACTIVE" + STORAGE_SERVER_STORE_ONLY_BUILD_RESULT = "STORAGE_SERVER_STORE_ONLY_BUILD_RESULT" + EXTERNAL_LINKS = "EXTERNAL_LINKS" + DEFAULT_WORKSPACE_ID = "DEFAULT_WORKSPACE_ID" + CUSTOM_PATH_PREFIXES = "CUSTOM_PATH_PREFIXES" + ALLOWED_HOSTS = "ALLOWED_HOSTS" +) + +type SystemInfoService interface { + GetSystemInfo() *view.SystemInfo + Init() error + GetBasePath() string + GetJwtPrivateKey() []byte + IsProductionMode() bool + GetBackendVersion() string + GetLogLevel() string + GetGitlabUrl() string + GetDiffServiceUrl() string + GetListenAddress() string + GetOriginAllowed() string + GetPGHost() string + GetPGPort() int + GetPGDB() string + GetPGUser() string + GetPGPassword() string + GetPGSSLMode() string + GetClientID() string + GetClientSecret() string + GetAPIHubUrl() string + GetPublishArchiveSizeLimitMB() int64 + GetPublishFileSizeLimitMB() int64 + GetBranchContentSizeLimitMB() int64 + GetReleaseVersionPattern() string + GetCredsFromEnv() *view.DbCredentials + GetSamlCrt() string + GetSamlKey() string + GetADFSMetadataUrl() string + GetLdapServer() string + GetLdapUser() string + GetLdapUserPassword() string + GetLdapBaseDN() string + GetLdapOrganizationUnit() string + GetLdapSearchBase() string + GetBuildsCleanupSchedule() string + InsecureProxyEnabled() bool + GetMetricsGetterSchedule() string + MonitoringEnabled() bool + GetMinioAccessKeyId() string + GetMinioSecretAccessKey() string + GetMinioCrt() string + GetMinioEndpoint() string + GetMinioBucketName() string + IsMinioStorageActive() bool + GetMinioStorageCreds() *view.MinioStorageCreds + IsMinioStoreOnlyBuildResult() bool + GetExternalLinks() []string + GetDefaultWorkspaceId() string + GetCustomPathPrefixes() []string + GetAllowedHosts() []string +} + +func (g systemInfoServiceImpl) GetCredsFromEnv() *view.DbCredentials { + return &view.DbCredentials{ + Host: g.GetPGHost(), + Port: g.GetPGPort(), + Database: g.GetPGDB(), + Username: g.GetPGUser(), + Password: g.GetPGPassword(), + SSLMode: g.GetPGSSLMode(), + } +} + +func (s systemInfoServiceImpl) GetMinioStorageCreds() *view.MinioStorageCreds { + return &view.MinioStorageCreds{ + BucketName: s.GetMinioBucketName(), + IsActive: s.IsMinioStorageActive(), + Endpoint: s.GetMinioEndpoint(), + Crt: s.GetMinioCrt(), + AccessKeyId: s.GetMinioAccessKeyId(), + SecretAccessKey: s.GetMinioSecretAccessKey(), + IsOnlyForBuildResult: s.IsMinioStoreOnlyBuildResult(), + } +} + +func NewSystemInfoService() (SystemInfoService, error) { + s := &systemInfoServiceImpl{ + systemInfoMap: make(map[string]interface{})} + if err := s.Init(); err != nil { + log.Error("Failed to read system info: " + err.Error()) + return nil, err + } + return s, nil +} + +type systemInfoServiceImpl struct { + systemInfoMap map[string]interface{} +} + +func (g systemInfoServiceImpl) GetSystemInfo() *view.SystemInfo { + return &view.SystemInfo{ + BackendVersion: g.GetBackendVersion(), + ProductionMode: g.IsProductionMode(), + Notification: g.getSystemNotification(), + ExternalLinks: g.GetExternalLinks(), + } +} + +func (g systemInfoServiceImpl) Init() error { + err := g.setJwtPrivateKey() + if err != nil { + return err + } + g.setBasePath() + if err = g.setProductionMode(); err != nil { + return err + } + g.setBackendVersion() + g.setLogLevel() + g.setGitlabUrl() + g.setDiffServiceUrl() + g.setListenAddress() + g.setOriginAllowed() + g.setPGHost() + if err = g.setPGPort(); err != nil { + return err + } + g.setPGDB() + g.setPGUser() + g.setPGPassword() + g.setPGSSLMode() + g.setClientID() + g.setClientSecret() + g.setAPIHubUrl() + g.setPublishArchiveSizeLimitMB() + g.setPublishFileSizeLimitMB() + g.setBranchContentSizeLimitMB() + g.setReleaseVersionPattern() + g.setSamlCrt() + g.setSamlKey() + g.setADFSMetadataUrl() + g.setLdapServer() + g.setLdapUser() + g.setLdapUserPassword() + g.setLdapBaseDN() + g.setLdapOrganizationUnit() + g.setLdapSearchBase() + g.setSystemNotification() + g.setBuildsCleanupSchedule() + g.setInsecureProxy() + g.setMetricsGetterSchedule() + g.setMonitoringEnabled() + g.setMinioAccessKeyId() + g.setMinioSecretAccessKey() + g.setMinioCrt() + g.setMinioEndpoint() + g.setMinioBucketName() + g.setMinioStorageActive() + g.setMinioOnlyForBuildResult() + g.setExternalLinks() + g.setDefaultWorkspaceId() + g.setCustomPathPrefixes() + g.setAllowedHosts() + + return nil +} + +func (g systemInfoServiceImpl) setBasePath() { + g.systemInfoMap[BASE_PATH] = os.Getenv(BASE_PATH) + if g.systemInfoMap[BASE_PATH] == "" { + g.systemInfoMap[BASE_PATH] = "." + } +} + +func (g systemInfoServiceImpl) setJwtPrivateKey() error { + decodePrivateKey, err := base64.StdEncoding.DecodeString(os.Getenv(JWT_PRIVATE_KEY)) + if err != nil { + return fmt.Errorf("can't decode env JWT_PRIVATE_KEY. Error - %s", err.Error()) + } + if len(decodePrivateKey) == 0 { + return fmt.Errorf("env JWT_PRIVATE_KEY is not set or empty") + } + g.systemInfoMap[JWT_PRIVATE_KEY] = decodePrivateKey + return nil +} + +func (g systemInfoServiceImpl) setProductionMode() error { + envVal := os.Getenv(PRODUCTION_MODE) + if envVal == "" { + envVal = "false" + } + productionMode, err := strconv.ParseBool(envVal) + if err != nil { + return fmt.Errorf("failed to parse %v env value: %v", PRODUCTION_MODE, err.Error()) + } + g.systemInfoMap[PRODUCTION_MODE] = productionMode + return nil +} + +func (g systemInfoServiceImpl) setBackendVersion() { + version := os.Getenv(ARTIFACT_DESCRIPTOR_VERSION) + if version == "" { + version = "unknown" + } + g.systemInfoMap[ARTIFACT_DESCRIPTOR_VERSION] = version +} + +func (g systemInfoServiceImpl) GetBasePath() string { + return g.systemInfoMap[BASE_PATH].(string) +} + +func (g systemInfoServiceImpl) GetJwtPrivateKey() []byte { + return g.systemInfoMap[JWT_PRIVATE_KEY].([]byte) +} + +func (g systemInfoServiceImpl) IsProductionMode() bool { + return g.systemInfoMap[PRODUCTION_MODE].(bool) +} + +func (g systemInfoServiceImpl) GetBackendVersion() string { + return g.systemInfoMap[ARTIFACT_DESCRIPTOR_VERSION].(string) +} + +func (g systemInfoServiceImpl) setLogLevel() { + g.systemInfoMap[LOG_LEVEL] = os.Getenv(LOG_LEVEL) +} + +func (g systemInfoServiceImpl) GetLogLevel() string { + return g.systemInfoMap[LOG_LEVEL].(string) +} + +func (g systemInfoServiceImpl) setGitlabUrl() { + gitlabUrl := os.Getenv(GITLAB_URL) + if gitlabUrl == "" { + gitlabUrl = "https://git.domain.com" + } + g.systemInfoMap[GITLAB_URL] = gitlabUrl +} + +func (g systemInfoServiceImpl) GetGitlabUrl() string { + return g.systemInfoMap[GITLAB_URL].(string) +} + +func (g systemInfoServiceImpl) setDiffServiceUrl() { + nodeServiceUrl := os.Getenv(DIFF_SERVICE_URL) + if nodeServiceUrl == "" { + nodeServiceUrl = "http://localhost:3000" + } + g.systemInfoMap[DIFF_SERVICE_URL] = nodeServiceUrl +} + +func (g systemInfoServiceImpl) GetDiffServiceUrl() string { + return g.systemInfoMap[DIFF_SERVICE_URL].(string) +} + +func (g systemInfoServiceImpl) setListenAddress() { + listenAddr := os.Getenv(LISTEN_ADDRESS) + if listenAddr == "" { + listenAddr = ":8080" + } + g.systemInfoMap[LISTEN_ADDRESS] = listenAddr +} + +func (g systemInfoServiceImpl) GetListenAddress() string { + return g.systemInfoMap[LISTEN_ADDRESS].(string) +} + +func (g systemInfoServiceImpl) setOriginAllowed() { + g.systemInfoMap[ORIGIN_ALLOWED] = os.Getenv(ORIGIN_ALLOWED) +} + +func (g systemInfoServiceImpl) GetOriginAllowed() string { + return g.systemInfoMap[ORIGIN_ALLOWED].(string) +} + +func (g systemInfoServiceImpl) setPGHost() { + host := os.Getenv(APIHUB_POSTGRESQL_HOST) + if host == "" { + host = "localhost" + } + g.systemInfoMap[APIHUB_POSTGRESQL_HOST] = host +} + +func (g systemInfoServiceImpl) GetPGHost() string { + return g.systemInfoMap[APIHUB_POSTGRESQL_HOST].(string) +} + +func (g systemInfoServiceImpl) setPGPort() error { + portStr := os.Getenv(APIHUB_POSTGRESQL_PORT) + var port int + var err error + if portStr == "" { + port = 5432 + } else { + port, err = strconv.Atoi(portStr) + if err != nil { + return fmt.Errorf("failed to parse %v env value: %v", APIHUB_POSTGRESQL_PORT, err.Error()) + } + } + g.systemInfoMap[APIHUB_POSTGRESQL_PORT] = port + return nil +} + +func (g systemInfoServiceImpl) GetPGPort() int { + return g.systemInfoMap[APIHUB_POSTGRESQL_PORT].(int) +} + +func (g systemInfoServiceImpl) setPGDB() { + database := os.Getenv(APIHUB_POSTGRESQL_DB_NAME) + if database == "" { + database = "apihub" + } + g.systemInfoMap[APIHUB_POSTGRESQL_DB_NAME] = database +} + +func (g systemInfoServiceImpl) GetPGDB() string { + return g.systemInfoMap[APIHUB_POSTGRESQL_DB_NAME].(string) +} + +func (g systemInfoServiceImpl) setPGUser() { + user := os.Getenv(APIHUB_POSTGRESQL_USERNAME) + if user == "" { + user = "postgres" + } + g.systemInfoMap[APIHUB_POSTGRESQL_USERNAME] = user +} + +func (g systemInfoServiceImpl) GetPGUser() string { + return g.systemInfoMap[APIHUB_POSTGRESQL_USERNAME].(string) +} + +func (g systemInfoServiceImpl) setPGPassword() { + g.systemInfoMap[APIHUB_POSTGRESQL_PASSWORD] = os.Getenv(APIHUB_POSTGRESQL_PASSWORD) +} + +func (g systemInfoServiceImpl) GetPGPassword() string { + return g.systemInfoMap[APIHUB_POSTGRESQL_PASSWORD].(string) +} + +func (g systemInfoServiceImpl) setPGSSLMode() { + sslMode := os.Getenv(PG_SSL_MODE) + if sslMode == "" { + sslMode = "disable" + } + g.systemInfoMap[PG_SSL_MODE] = sslMode +} + +func (g systemInfoServiceImpl) GetPGSSLMode() string { + return g.systemInfoMap[PG_SSL_MODE].(string) +} + +func (g systemInfoServiceImpl) setClientID() { + g.systemInfoMap[CLIENT_ID] = os.Getenv(CLIENT_ID) +} + +func (g systemInfoServiceImpl) GetClientID() string { + return g.systemInfoMap[CLIENT_ID].(string) +} + +func (g systemInfoServiceImpl) setClientSecret() { + g.systemInfoMap[CLIENT_SECRET] = os.Getenv(CLIENT_SECRET) +} + +func (g systemInfoServiceImpl) GetClientSecret() string { + return g.systemInfoMap[CLIENT_SECRET].(string) +} + +func (g systemInfoServiceImpl) setAPIHubUrl() { + g.systemInfoMap[APIHUB_URL] = os.Getenv(APIHUB_URL) +} + +func (g systemInfoServiceImpl) GetAPIHubUrl() string { + return g.systemInfoMap[APIHUB_URL].(string) +} + +func (g systemInfoServiceImpl) setPublishArchiveSizeLimitMB() { + var bytesInMb int64 = 1048576 + publishArchiveSizeLimit, err := strconv.ParseInt(os.Getenv(PUBLISH_ARCHIVE_SIZE_LIMIT_MB), 0, 64) + if err != nil || publishArchiveSizeLimit == 0 { + publishArchiveSizeLimit = 50 + log.Warnf("PUBLISH_ARCHIVE_SIZE_LIMIT_MB has incorrect value, default=%d is going to be used", 50) + } + g.systemInfoMap[PUBLISH_ARCHIVE_SIZE_LIMIT_MB] = publishArchiveSizeLimit * bytesInMb +} + +func (g systemInfoServiceImpl) GetPublishArchiveSizeLimitMB() int64 { + return g.systemInfoMap[PUBLISH_ARCHIVE_SIZE_LIMIT_MB].(int64) +} + +func (g systemInfoServiceImpl) setPublishFileSizeLimitMB() { + var bytesInMb int64 = 1048576 + publishFileSizeLimit, err := strconv.ParseInt(os.Getenv(PUBLISH_FILE_SIZE_LIMIT_MB), 0, 64) + if err != nil || publishFileSizeLimit == 0 { + publishFileSizeLimit = 15 //15Mb + log.Warnf("PUBLISH_FILE_SIZE_LIMIT_MB has incorrect value, default=%d is going to be used", 15) + } + publishFileSizeLimit = publishFileSizeLimit * bytesInMb + g.systemInfoMap[PUBLISH_FILE_SIZE_LIMIT_MB] = publishFileSizeLimit +} + +func (g systemInfoServiceImpl) setBranchContentSizeLimitMB() { + var bytesInMb int64 = 1048576 + branchContentSizeLimit, err := strconv.ParseInt(os.Getenv(BRANCH_CONTENT_SIZE_LIMIT_MB), 0, 64) + if err != nil || branchContentSizeLimit == 0 { + branchContentSizeLimit = 50 + log.Warnf("BRANCH_CONTENT_SIZE_LIMIT_MB has incorrect value, default=%d is going to be used", 50) + } + g.systemInfoMap[BRANCH_CONTENT_SIZE_LIMIT_MB] = branchContentSizeLimit * bytesInMb +} + +func (g systemInfoServiceImpl) GetBranchContentSizeLimitMB() int64 { + return g.systemInfoMap[BRANCH_CONTENT_SIZE_LIMIT_MB].(int64) +} + +func (g systemInfoServiceImpl) GetPublishFileSizeLimitMB() int64 { + return g.systemInfoMap[PUBLISH_FILE_SIZE_LIMIT_MB].(int64) +} + +func (g systemInfoServiceImpl) setReleaseVersionPattern() { + pattern := os.Getenv(RELEASE_VERSION_PATTERN) + if pattern == "" { + pattern = `^[0-9]{4}[.]{1}[1-4]{1}$` + } + g.systemInfoMap[RELEASE_VERSION_PATTERN] = pattern +} + +func (g systemInfoServiceImpl) GetReleaseVersionPattern() string { + return g.systemInfoMap[RELEASE_VERSION_PATTERN].(string) +} + +func (g systemInfoServiceImpl) setSamlCrt() { + g.systemInfoMap[SAML_CRT] = os.Getenv(SAML_CRT) +} + +func (g systemInfoServiceImpl) GetSamlCrt() string { + return g.systemInfoMap[SAML_CRT].(string) +} + +func (g systemInfoServiceImpl) setSamlKey() { + g.systemInfoMap[SAML_KEY] = os.Getenv(SAML_KEY) +} + +func (g systemInfoServiceImpl) GetSamlKey() string { + return g.systemInfoMap[SAML_KEY].(string) +} + +func (g systemInfoServiceImpl) setADFSMetadataUrl() { + g.systemInfoMap[ADFS_METADATA_URL] = os.Getenv(ADFS_METADATA_URL) +} + +func (g systemInfoServiceImpl) GetADFSMetadataUrl() string { + return g.systemInfoMap[ADFS_METADATA_URL].(string) +} + +func (g systemInfoServiceImpl) setLdapServer() { + ldapServerUrl := os.Getenv(LDAP_SERVER) + if ldapServerUrl == "" { + log.Error("env LDAP_SERVER is not set or empty") + } + g.systemInfoMap[LDAP_SERVER] = os.Getenv(LDAP_SERVER) +} + +func (g systemInfoServiceImpl) GetLdapServer() string { + return g.systemInfoMap[LDAP_SERVER].(string) +} + +func (g systemInfoServiceImpl) setLdapUser() { + ldapUser := os.Getenv(LDAP_USER) + if ldapUser == "" { + log.Error("env LDAP_USER is not set or empty") + } + g.systemInfoMap[LDAP_USER] = os.Getenv(LDAP_USER) +} + +func (g systemInfoServiceImpl) GetLdapUser() string { + return g.systemInfoMap[LDAP_USER].(string) +} + +func (g systemInfoServiceImpl) setLdapUserPassword() { + ldapUserPassword := os.Getenv(LDAP_USER_PASSWORD) + if ldapUserPassword == "" { + log.Error("env LDAP_USER_PASSWORD is not set or empty") + } + g.systemInfoMap[LDAP_USER_PASSWORD] = os.Getenv(LDAP_USER_PASSWORD) +} + +func (g systemInfoServiceImpl) GetLdapUserPassword() string { + return g.systemInfoMap[LDAP_USER_PASSWORD].(string) +} + +func (g systemInfoServiceImpl) setLdapBaseDN() { + ldapBaseDn := os.Getenv(LDAP_BASE_DN) + if ldapBaseDn == "" { + log.Error("env LDAP_BASE_DN is not set or empty") + } + g.systemInfoMap[LDAP_BASE_DN] = ldapBaseDn +} + +func (g systemInfoServiceImpl) setLdapOrganizationUnit() { + ldapOU := os.Getenv(LDAP_ORGANIZATION_UNIT) + if ldapOU == "" { + log.Error("env LDAP_ORGANIZATION_UNIT is not set or empty") + } + g.systemInfoMap[LDAP_ORGANIZATION_UNIT] = ldapOU +} + +func (g systemInfoServiceImpl) setLdapSearchBase() { + ldapSearchBase := os.Getenv(LDAP_SEARCH_BASE) + if ldapSearchBase == "" { + log.Error("env LDAP_SEARCH_BASE is not set or empty") + } + g.systemInfoMap[LDAP_SEARCH_BASE] = ldapSearchBase +} + +func (g systemInfoServiceImpl) GetLdapBaseDN() string { + return g.systemInfoMap[LDAP_BASE_DN].(string) +} + +func (g systemInfoServiceImpl) GetLdapOrganizationUnit() string { + return g.systemInfoMap[LDAP_ORGANIZATION_UNIT].(string) +} + +func (g systemInfoServiceImpl) GetLdapSearchBase() string { + return g.systemInfoMap[LDAP_SEARCH_BASE].(string) +} + +func (g systemInfoServiceImpl) setSystemNotification() { + g.systemInfoMap[SYSTEM_NOTIFICATION] = os.Getenv(SYSTEM_NOTIFICATION) +} + +func (g systemInfoServiceImpl) getSystemNotification() string { + return g.systemInfoMap[SYSTEM_NOTIFICATION].(string) +} + +func (g systemInfoServiceImpl) GetBuildsCleanupSchedule() string { + return g.systemInfoMap[BUILDS_CLEANUP_SCHEDULE].(string) +} + +func (g systemInfoServiceImpl) setBuildsCleanupSchedule() { + g.systemInfoMap[BUILDS_CLEANUP_SCHEDULE] = "0 1 * * 0" // at 01:00 AM on Sunday +} + +func (g systemInfoServiceImpl) setInsecureProxy() { + envVal := os.Getenv(INSECURE_PROXY) + insecureProxy, err := strconv.ParseBool(envVal) + if err != nil { + log.Infof("environment variable %v has invalid value, using false value instead", INSECURE_PROXY) + insecureProxy = false + } + g.systemInfoMap[INSECURE_PROXY] = insecureProxy +} + +func (s systemInfoServiceImpl) InsecureProxyEnabled() bool { + return s.systemInfoMap[INSECURE_PROXY].(bool) +} + +func (g systemInfoServiceImpl) GetMetricsGetterSchedule() string { + return g.systemInfoMap[METRICS_GETTER_SCHEDULE].(string) +} + +func (g systemInfoServiceImpl) setMetricsGetterSchedule() { + g.systemInfoMap[METRICS_GETTER_SCHEDULE] = "* * * * *" // every minute +} + +func (g systemInfoServiceImpl) setMonitoringEnabled() { + envVal := os.Getenv(MONITORING_ENABLED) + monitoringEnabled, err := strconv.ParseBool(envVal) + if err != nil { + log.Infof("environment variable %v has invalid value, using false value instead", MONITORING_ENABLED) + monitoringEnabled = false + } + g.systemInfoMap[MONITORING_ENABLED] = monitoringEnabled +} + +func (s systemInfoServiceImpl) MonitoringEnabled() bool { + return s.systemInfoMap[MONITORING_ENABLED].(bool) +} + +func (g systemInfoServiceImpl) GetMinioAccessKeyId() string { + return g.systemInfoMap[STORAGE_SERVER_USERNAME].(string) +} + +func (g systemInfoServiceImpl) setMinioAccessKeyId() { + g.systemInfoMap[STORAGE_SERVER_USERNAME] = os.Getenv(STORAGE_SERVER_USERNAME) +} + +func (g systemInfoServiceImpl) GetMinioSecretAccessKey() string { + return g.systemInfoMap[STORAGE_SERVER_PASSWORD].(string) +} + +func (g systemInfoServiceImpl) setMinioSecretAccessKey() { + g.systemInfoMap[STORAGE_SERVER_PASSWORD] = os.Getenv(STORAGE_SERVER_PASSWORD) +} + +func (g systemInfoServiceImpl) GetMinioCrt() string { + return g.systemInfoMap[STORAGE_SERVER_CRT].(string) +} + +func (g systemInfoServiceImpl) setMinioCrt() { + g.systemInfoMap[STORAGE_SERVER_CRT] = os.Getenv(STORAGE_SERVER_CRT) +} + +func (g systemInfoServiceImpl) GetMinioEndpoint() string { + return g.systemInfoMap[STORAGE_SERVER_URL].(string) +} + +func (g systemInfoServiceImpl) setMinioEndpoint() { + g.systemInfoMap[STORAGE_SERVER_URL] = os.Getenv(STORAGE_SERVER_URL) +} + +func (g systemInfoServiceImpl) GetMinioBucketName() string { + return g.systemInfoMap[STORAGE_SERVER_BUCKET_NAME].(string) +} + +func (g systemInfoServiceImpl) setMinioBucketName() { + g.systemInfoMap[STORAGE_SERVER_BUCKET_NAME] = os.Getenv(STORAGE_SERVER_BUCKET_NAME) +} + +func (g systemInfoServiceImpl) setMinioStorageActive() { + envVal := os.Getenv(STORAGE_SERVER_ACTIVE) + if envVal == "" { + envVal = "false" + } + val, err := strconv.ParseBool(envVal) + if err != nil { + log.Errorf("failed to parse %v env value: %v. Value by default - false", STORAGE_SERVER_ACTIVE, err.Error()) + val = false + } + g.systemInfoMap[STORAGE_SERVER_ACTIVE] = val +} + +func (g systemInfoServiceImpl) IsMinioStorageActive() bool { + return g.systemInfoMap[STORAGE_SERVER_ACTIVE].(bool) +} + +func (g systemInfoServiceImpl) IsMinioStoreOnlyBuildResult() bool { + return g.systemInfoMap[STORAGE_SERVER_STORE_ONLY_BUILD_RESULT].(bool) +} + +func (g systemInfoServiceImpl) setMinioOnlyForBuildResult() { + envVal := os.Getenv(STORAGE_SERVER_STORE_ONLY_BUILD_RESULT) + if envVal == "" { + envVal = "false" + } + val, err := strconv.ParseBool(envVal) + if err != nil { + log.Errorf("failed to parse %v env value: %v. Value by default - false", STORAGE_SERVER_STORE_ONLY_BUILD_RESULT, err.Error()) + val = false + } + if !g.IsMinioStorageActive() && val == true { + val = false + log.Warnf("%s was set to false because %s had been set to false", STORAGE_SERVER_STORE_ONLY_BUILD_RESULT, STORAGE_SERVER_ACTIVE) + } + g.systemInfoMap[STORAGE_SERVER_STORE_ONLY_BUILD_RESULT] = val +} + +func (g systemInfoServiceImpl) GetExternalLinks() []string { + return g.systemInfoMap[EXTERNAL_LINKS].([]string) +} + +func (g systemInfoServiceImpl) setExternalLinks() { + externalLinksStr := os.Getenv(EXTERNAL_LINKS) + if externalLinksStr != "" { + g.systemInfoMap[EXTERNAL_LINKS] = strings.Split(externalLinksStr, ",") + } else { + g.systemInfoMap[EXTERNAL_LINKS] = []string{} + } +} + +func (g systemInfoServiceImpl) GetDefaultWorkspaceId() string { + return g.systemInfoMap[DEFAULT_WORKSPACE_ID].(string) +} + +func (g systemInfoServiceImpl) setDefaultWorkspaceId() { + g.systemInfoMap[DEFAULT_WORKSPACE_ID] = os.Getenv(DEFAULT_WORKSPACE_ID) +} + +func (g systemInfoServiceImpl) setCustomPathPrefixes() { + prefixes := make([]string, 0) + prefixesStr := os.Getenv("CUSTOM_PATH_PREFIXES") + if prefixesStr != "" { + prefixes = strings.Split(prefixesStr, ",") + } + g.systemInfoMap[CUSTOM_PATH_PREFIXES] = prefixes +} + +func (g systemInfoServiceImpl) GetCustomPathPrefixes() []string { + return g.systemInfoMap[CUSTOM_PATH_PREFIXES].([]string) +} + +func (g systemInfoServiceImpl) setAllowedHosts() { + hosts := make([]string, 0) + hostsStr := os.Getenv("ALLOWED_HOSTS") + if hostsStr != "" { + hosts = strings.Split(hostsStr, ",") + } + g.systemInfoMap[ALLOWED_HOSTS] = hosts +} + +func (g systemInfoServiceImpl) GetAllowedHosts() []string { + return g.systemInfoMap[ALLOWED_HOSTS].([]string) +} diff --git a/qubership-apihub-service/service/TemplateService.go b/qubership-apihub-service/service/TemplateService.go new file mode 100644 index 0000000..651c389 --- /dev/null +++ b/qubership-apihub-service/service/TemplateService.go @@ -0,0 +1,159 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "path" + "strings" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type TemplateService interface { + GetFileTemplate(fileName string, fileType string) string +} + +func NewTemplateService() TemplateService { + return &templateServiceImpl{} +} + +type templateServiceImpl struct { +} + +func (t templateServiceImpl) GetFileTemplate(fileName string, fileType string) string { + ext := strings.ToUpper(path.Ext(fileName)) + switch fileType { + case string(view.OpenAPI30): + if ext == ".JSON" { + return getJsonOpenapiTemplate() + } + if ext == ".YAML" || ext == ".YML" { + return getYamlOpenapiTemplate() + } + case string(view.MD): + return getMarkdownTemplate() + case string(view.JsonSchema): + if ext == ".JSON" { + return getJsonJsonSchemaTemplate() + } + if ext == ".YAML" || ext == ".YML" { + return getYamlJsonSchemaTemplate() + } + } + return "" +} + +func getMarkdownTemplate() string { + return ` + # Title + + The beginning of an awesome article... + ` +} + +func getJsonOpenapiTemplate() string { + // TODO: load template from resource file + return "" +} + +func getYamlOpenapiTemplate() string { + // TODO: load template from resource file + return "" +} + +func getYamlJsonSchemaTemplate() string { + // TODO: load template from resource file + return `invoice: 34843 +date : 2001-01-23 +bill-to: &id001 + given : Chris + family : Dumars + address: + lines: | + 458 Walkman Dr. + Suite #292 + city : Royal Oak + state : MI + postal : 48046 +ship-to: *id001 +product: +- sku : BL394D + quantity : 4 + description : Basketball + price : 450.00 +- sku : BL4438H + quantity : 1 + description : Super Hoop + price : 2392.00 +tax : 251.42 +total: 4443.52 +comments: + Late afternoon is best. + Backup contact is Nancy + Billsmer @ 338-4338. +` +} + +func getJsonJsonSchemaTemplate() string { + // TODO: load template from resource file + return `{ + "$id": "https://example.com/calendar.schema.json", + "$schema": "https://json-schema.org/draft/2020-12/schema", + "description": "A representation of an event", + "type": "object", + "required": [ "dtstart", "summary" ], + "properties": { + "dtstart": { + "type": "string", + "description": "Event starting time" + }, + "dtend": { + "type": "string", + "description": "Event ending time" + }, + "summary": { + "type": "string" + }, + "location": { + "type": "string" + }, + "url": { + "type": "string" + }, + "duration": { + "type": "string", + "description": "Event duration" + }, + "rdate": { + "type": "string", + "description": "Recurrence date" + }, + "rrule": { + "type": "string", + "description": "Recurrence rule" + }, + "category": { + "type": "string" + }, + "description": { + "type": "string" + }, + "geo": { + "$ref": "https://example.com/geographical-location.schema.json" + } + } +} +` +} diff --git a/qubership-apihub-service/service/TokenExpirationHandler.go b/qubership-apihub-service/service/TokenExpirationHandler.go new file mode 100644 index 0000000..114ce32 --- /dev/null +++ b/qubership-apihub-service/service/TokenExpirationHandler.go @@ -0,0 +1,214 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "crypto/tls" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "strconv" + "strings" + "sync" + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/cache" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/client" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + "github.com/buraksezer/olric" + log "github.com/sirupsen/logrus" +) + +func NewTokenExpirationHandler(intRepo repository.GitIntegrationRepository, op cache.OlricProvider, systemInfoService SystemInfoService) client.TokenExpirationHandler { + handler := tokenExpirationHandlerImpl{ + intRepo: intRepo, + op: op, + isReadyWg: sync.WaitGroup{}, + systemInfoService: systemInfoService, + refreshMutex: &sync.RWMutex{}, + } + handler.isReadyWg.Add(1) + utils.SafeAsync(func() { + handler.initGCRevokedUsersDTopic() + }) + return &handler +} + +type tokenExpirationHandlerImpl struct { + intRepo repository.GitIntegrationRepository + op cache.OlricProvider + olricC *olric.Olric + gcRevokedUsersTopic *olric.DTopic + isReadyWg sync.WaitGroup + systemInfoService SystemInfoService + refreshMutex *sync.RWMutex +} + +func (t *tokenExpirationHandlerImpl) TokenExpired(userId string, integrationType view.GitIntegrationType) (string, *time.Time, error) { + userIntegration, err := t.intRepo.GetUserApiKey(integrationType, userId) + if err != nil { + return "", nil, err + } + if userIntegration == nil { + return "", nil, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: fmt.Sprintf("Failed to refresh gitlab access token: integration %v not found for user %v", integrationType, userId), + } + } + if userIntegration.RefreshToken == "" || userIntegration.RedirectUri == "" { + userIntegration.IsRevoked = true + _, err = t.intRepo.SaveUserApiKey(*userIntegration) + if err != nil { + return "", nil, err + } + err = t.PublishToGCRevokedUsersTopic(userId) + if err != nil { + return "", nil, err + } + return "", nil, &exception.CustomError{ + Status: http.StatusFailedDependency, + Code: exception.IntegrationTokenRevoked, + Message: exception.IntegrationTokenRevokedMsg, + Params: map[string]interface{}{"integration": integrationType}, + } + } + + t.refreshMutex.Lock() + defer t.refreshMutex.Unlock() + + client := makeHttpClient() + + data := url.Values{} + data.Set("client_id", t.systemInfoService.GetClientID()) + data.Set("client_secret", t.systemInfoService.GetClientSecret()) + data.Set("redirect_uri", userIntegration.RedirectUri) + data.Set("grant_type", "refresh_token") + data.Set("refresh_token", userIntegration.RefreshToken) + encodedData := data.Encode() + req, err := http.NewRequest("POST", fmt.Sprintf("%s/oauth/token", t.systemInfoService.GetGitlabUrl()), strings.NewReader(encodedData)) + if err != nil { + return "", nil, fmt.Errorf("failed to create http request: %v", err.Error()) + } + req.Header.Add("Content-Type", "application/x-www-form-urlencoded") + req.Header.Add("Content-Length", strconv.Itoa(len(data.Encode()))) + response, err := client.Do(req) + if err != nil { + return "", nil, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: fmt.Sprintf("Failed to refresh gitlab access token: gitlab refresh request failed: %v", err), + } + } + defer response.Body.Close() + body, err := io.ReadAll(response.Body) + if err != nil { + return "", nil, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: fmt.Sprintf("Failed to refresh gitlab access token: response code = %d, failed to read response body: %v", response.StatusCode, err), + } + } + var oauthTokenResponse view.OAuthAccessResponse + if err := json.Unmarshal(body, &oauthTokenResponse); err != nil { + return "", nil, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: fmt.Sprintf("Failed to refresh gitlab access token: failed to unmarshal gitlab response: %v", err), + } + } + + if oauthTokenResponse.Error != "" || response.StatusCode != http.StatusOK { + actualUserIntegration, err := t.intRepo.GetUserApiKey(integrationType, userId) + if err != nil { + return "", nil, err + } + if userIntegration == nil { + return "", nil, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: fmt.Sprintf("Failed to refresh gitlab access token: integration %v not found for user %v", integrationType, userId), + } + } + //check if refresh token has already been refreshed by other request + if actualUserIntegration.RefreshToken != userIntegration.RefreshToken { + return actualUserIntegration.AccessToken, &actualUserIntegration.ExpiresAt, nil + } + + if actualUserIntegration.FailedRefreshAttempts >= failedRefreshThreshold { + userIntegration.IsRevoked = true + _, err = t.intRepo.SaveUserApiKey(*userIntegration) + if err != nil { + return "", nil, err + } + err = t.PublishToGCRevokedUsersTopic(userId) + if err != nil { + return "", nil, err + } + return "", nil, &exception.CustomError{ + Status: http.StatusFailedDependency, + Code: exception.IntegrationTokenRevoked, + Message: exception.IntegrationTokenRevokedMsg, + Params: map[string]interface{}{"integration": integrationType}, + } + } + err = t.intRepo.AddFailedRefreshAttempt(integrationType, userId) + if err != nil { + return "", nil, err + } + return "", nil, fmt.Errorf("gitlab returned error to refresh request: status code = %d, error = %s", response.StatusCode, oauthTokenResponse.Error) + } + userIntegration.RefreshToken = oauthTokenResponse.RefreshToken + userIntegration.AccessToken = oauthTokenResponse.AccessToken + userIntegration.ExpiresAt = view.GetTokenExpirationDate(oauthTokenResponse.ExpiresIn) + userIntegration.FailedRefreshAttempts = 0 + + _, err = t.intRepo.SaveUserApiKey(*userIntegration) + if err != nil { + return "", nil, err + } + + //remove user from cache to update refreshed token + err = t.PublishToGCRevokedUsersTopic(userId) + if err != nil { + return "", nil, err + } + return userIntegration.AccessToken, &userIntegration.ExpiresAt, nil +} + +func (t *tokenExpirationHandlerImpl) initGCRevokedUsersDTopic() { + var err error + t.olricC = t.op.Get() + topicName := GCRevokedUsersTopicName + t.gcRevokedUsersTopic, err = t.olricC.NewDTopic(topicName, 10000, 1) + if err != nil { + log.Errorf("Failed to create DTopic: %s", err.Error()) + } +} + +func (t *tokenExpirationHandlerImpl) PublishToGCRevokedUsersTopic(userId string) error { + err := t.gcRevokedUsersTopic.Publish(userId) + if err != nil { + log.Errorf("Error while publishing the user git client data: %s", err) + return err + } + return nil +} + +func makeHttpClient() *http.Client { + tr := http.Transport{TLSClientConfig: &tls.Config{InsecureSkipVerify: true}} + cl := http.Client{Transport: &tr, Timeout: time.Second * 60} + return &cl +} diff --git a/qubership-apihub-service/service/TokenRevocationHandler.go b/qubership-apihub-service/service/TokenRevocationHandler.go new file mode 100644 index 0000000..8518f4a --- /dev/null +++ b/qubership-apihub-service/service/TokenRevocationHandler.go @@ -0,0 +1,119 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "net/http" + "sync" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/cache" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/client" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + "github.com/buraksezer/olric" + log "github.com/sirupsen/logrus" +) + +func NewTokenRevocationHandler(intRepo repository.GitIntegrationRepository, op cache.OlricProvider) client.TokenRevocationHandler { + trh := tokenRevocationHandlerImpl{ + intRepo: intRepo, + op: op, + isReadyWg: sync.WaitGroup{}, + } + trh.isReadyWg.Add(1) + utils.SafeAsync(func() { + trh.initGCRevokedUsersDTopic() + }) + return &trh +} + +type tokenRevocationHandlerImpl struct { + intRepo repository.GitIntegrationRepository + op cache.OlricProvider + olricC *olric.Olric + gcRevokedUsersTopic *olric.DTopic + isReadyWg sync.WaitGroup +} + +const failedRefreshThreshold = 5 + +func (t *tokenRevocationHandlerImpl) TokenRevoked(userId string, integrationType view.GitIntegrationType) error { + key, err := t.intRepo.GetUserApiKey(integrationType, userId) + if err != nil { + return err + } + key.IsRevoked = true + + _, err = t.intRepo.SaveUserApiKey(*key) + if err != nil { + return err + } + + err = t.PublishToGCRevokedUsersTopic(userId) + if err != nil { + return err + } + + return &exception.CustomError{ + Status: http.StatusFailedDependency, + Code: exception.IntegrationTokenRevoked, + Message: exception.IntegrationTokenRevokedMsg, + Params: map[string]interface{}{"integration": integrationType}, + } +} + +func (t *tokenRevocationHandlerImpl) AuthFailed(userId string, integrationType view.GitIntegrationType) error { + key, err := t.intRepo.GetUserApiKey(integrationType, userId) + if err != nil { + return err + } + key.IsRevoked = true + + _, err = t.intRepo.SaveUserApiKey(*key) + if err != nil { + return err + } + + err = t.PublishToGCRevokedUsersTopic(userId) + if err != nil { + return err + } + return &exception.CustomError{ + Status: http.StatusFailedDependency, + Code: exception.IntegrationTokenAuthFailed, + Message: exception.IntegrationTokenAuthFailedMsg, + } +} + +func (t *tokenRevocationHandlerImpl) initGCRevokedUsersDTopic() { + var err error + t.olricC = t.op.Get() + topicName := GCRevokedUsersTopicName + t.gcRevokedUsersTopic, err = t.olricC.NewDTopic(topicName, 10000, 1) + if err != nil { + log.Errorf("Failed to create DTopic: %s", err.Error()) + } +} + +func (t *tokenRevocationHandlerImpl) PublishToGCRevokedUsersTopic(userId string) error { + err := t.gcRevokedUsersTopic.Publish(userId) + if err != nil { + log.Errorf("Error while publishing the user git client data: %s", err) + return err + } + return nil +} diff --git a/qubership-apihub-service/service/TransformationService.go b/qubership-apihub-service/service/TransformationService.go new file mode 100644 index 0000000..5fc4588 --- /dev/null +++ b/qubership-apihub-service/service/TransformationService.go @@ -0,0 +1,106 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "net/http" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type TransformationService interface { + GetDataForDocumentsTransformation(packageId, version string, filterReq view.DocumentsForTransformationFilterReq) (interface{}, error) +} + +func NewTransformationService(publishedRepo repository.PublishedRepository, operationRepo repository.OperationRepository) TransformationService { + return &transformationServiceImpl{publishedRepo: publishedRepo, operationRepo: operationRepo} +} + +type transformationServiceImpl struct { + publishedRepo repository.PublishedRepository + operationRepo repository.OperationRepository +} + +func (t transformationServiceImpl) GetDataForDocumentsTransformation(packageId, version string, filterReq view.DocumentsForTransformationFilterReq) (interface{}, error) { + versionEnt, err := t.publishedRepo.GetVersion(packageId, version) + if err != nil { + return nil, err + } + if versionEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedVersionNotFound, + Message: exception.PublishedVersionNotFoundMsg, + Params: map[string]interface{}{"version": version}, + } + } + + searchQuery := entity.ContentForDocumentsTransformationSearchQueryEntity{ + Limit: filterReq.Limit, + Offset: filterReq.Offset, + DocumentTypesFilter: view.GetDocumentTypesForApiType(filterReq.ApiType), + OperationGroup: view.MakeOperationGroupId(packageId, versionEnt.Version, versionEnt.Revision, filterReq.ApiType, filterReq.FilterByOperationGroup), + } + existingGroup, err := t.operationRepo.GetOperationGroup(versionEnt.PackageId, versionEnt.Version, versionEnt.Revision, filterReq.ApiType, filterReq.FilterByOperationGroup) + if err != nil { + return nil, err + } + if existingGroup == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.OperationGroupNotFound, + Message: exception.OperationGroupNotFoundMsg, + Params: map[string]interface{}{"groupName": filterReq.FilterByOperationGroup}, + } + } + + operationByGroupEnts, err := t.operationRepo.GetGroupedOperations(versionEnt.PackageId, versionEnt.Version, versionEnt.Revision, filterReq.ApiType, filterReq.FilterByOperationGroup, view.OperationListReq{}) + if err != nil { + return nil, err + } + operationIdsByGroupName := entity.MakeOperationIdsSlice(operationByGroupEnts) + versionDocuments := make([]view.DocumentForTransformationView, 0) + content, err := t.publishedRepo.GetVersionRevisionContentForDocumentsTransformation(packageId, versionEnt.Version, versionEnt.Revision, searchQuery) + if err != nil { + return nil, err + } + for _, versionDocumentEnt := range content { + transformationView := *entity.MakeDocumentForTransformationView(&versionDocumentEnt) + transformationView.IncludedOperationIds = getCommonOperationFromGroupAndDocumentOperations(operationIdsByGroupName, transformationView) + versionDocuments = append(versionDocuments, transformationView) + } + + return &view.DocumentsForTransformationView{Documents: versionDocuments}, nil +} + +func getCommonOperationFromGroupAndDocumentOperations(operationIdsByGroupName []string, document view.DocumentForTransformationView) []string { + commonOperations := make([]string, 0) + hash := make(map[string]struct{}) + + for _, v := range operationIdsByGroupName { + hash[v] = struct{}{} + } + + for _, v := range document.IncludedOperationIds { + if _, ok := hash[v]; ok { + commonOperations = append(commonOperations, v) + } + } + + return commonOperations +} diff --git a/qubership-apihub-service/service/TransitionService.go b/qubership-apihub-service/service/TransitionService.go new file mode 100644 index 0000000..43de8f7 --- /dev/null +++ b/qubership-apihub-service/service/TransitionService.go @@ -0,0 +1,334 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "fmt" + "net/http" + "strings" + + context2 "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + "github.com/google/uuid" + log "github.com/sirupsen/logrus" +) + +type TransitionService interface { + MoveOrRenamePackage(userCtx context2.SecurityContext, fromId string, toId string, overwriteHistory bool) (string, error) + GetMoveStatus(id string) (*view.TransitionStatus, error) + ListCompletedActivities(offset int, limit int) ([]view.TransitionStatus, error) + ListPackageTransitions() ([]view.PackageTransition, error) +} + +func NewTransitionService(transRepo repository.TransitionRepository, pubRepo repository.PublishedRepository) TransitionService { + return &transitionServiceImpl{transRepo: transRepo, pubRepo: pubRepo} +} + +type transitionServiceImpl struct { + transRepo repository.TransitionRepository + pubRepo repository.PublishedRepository +} + +func (p transitionServiceImpl) MoveOrRenamePackage(userCtx context2.SecurityContext, fromId string, toId string, overwriteHistory bool) (string, error) { + if fromId == toId { + return "", fmt.Errorf("incorrect input: from==to") + } + + fromPackage, err := p.pubRepo.GetPackage(fromId) + if err != nil { + return "", err + } + if fromPackage == nil { + return "", &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.FromPackageNotFound, + Message: exception.FromPackageNotFoundMsg, + Params: map[string]interface{}{"packageId": fromId}, + } + } + + toPackage, err := p.pubRepo.GetPackage(toId) + if err != nil { + return "", err + } + if toPackage != nil { + return "", &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ToPackageExists, + Message: exception.ToPackageExistsMsg, + Params: map[string]interface{}{"packageId": fromId}, + } + } + + if !overwriteHistory { + redirectPackageId, err := p.transRepo.GetNewPackageId(toId) + if err != nil { + return "", err + } + if redirectPackageId != "" { + oldIds, err := p.transRepo.GetOldPackageIds(fromId) + if err != nil { + return "", err + } + isRenameBack := false + for _, oldId := range oldIds { + if oldId == toId { // rename package back, it's allowed case + isRenameBack = true + break + } + } + if !isRenameBack { + // new package id is going to overlap existing one which is not allowed + return "", &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ToPackageRedirectExists, + Message: exception.ToPackageRedirectExistsMsg, + Params: map[string]interface{}{"packageId": toId, "newPackageId": redirectPackageId}, + } + } + } + } + + fromParts := strings.Split(fromId, ".") + + toParts := strings.Split(toId, ".") + toWorkspace := len(toParts) == 1 + + isMove := false + isRename := false + + if len(fromParts) != len(toParts) { + isMove = true + isRename = fromParts[len(fromParts)-1] != toParts[len(toParts)-1] + } else { + for i, fromPart := range fromParts { + toPart := toParts[i] + if i == len(fromParts)-1 { + // last id segment, i.e. alias + if fromPart != toPart { + isRename = true + } + } else { + // intermediate id segment, i.e. one of parent ids + if fromPart != toPart { + isMove = true + } + } + } + } + + if isMove && !toWorkspace { + toParentId := strings.Join(toParts[:len(toParts)-1], ".") + toParentPackage, err := p.pubRepo.GetPackage(toParentId) + if err != nil { + return "", err + } + if toParentPackage == nil { + return "", &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ToParentPackageNotFound, + Message: exception.ToParentPackageNotFoundMsg, + Params: map[string]interface{}{"packageId": toParentId}, + } + } + } + + /* + allowed moves(package id will be changed in all cases): + workspace -> workspace (rename, i.e. change alias) + workspace -> group (convert workspace to group, i.e. add parent) + + group -> group (change alias => rename, change parent => move) + group -> workspace (convert group to workspace, i.e. remove parent) + + package -> package (change alias => rename, change parent => move) + + dashboard -> dashboard (change alias => rename, change parent => move) + */ + + id := uuid.New().String() + trType := "" + + switch fromPackage.Kind { + case entity.KIND_WORKSPACE: + if toWorkspace { + trType = "rename_workspace" + } else { + trType = "convert_workspace_to_group" + } + err = p.transRepo.TrackTransitionStarted(userCtx, id, trType, fromId, toId) + if err != nil { + return "", fmt.Errorf("failed to track transition action: %s", err) + } + // TODO: implement async job that will take non-finished transition tasks from DB instead of a direct call + utils.SafeAsync(func() { + objAffected, err := p.transRepo.MoveGroupingPackage(fromId, toId) + if err != nil { + err = p.transRepo.TrackTransitionFailed(id, err.Error()) + if err != nil { + log.Errorf("failed to track transition action: %s", err) + } + } else { + err = p.transRepo.TrackTransitionCompleted(id, objAffected) + if err != nil { + log.Errorf("failed to track transition action: %s", err) + } + } + }) + return id, nil + case entity.KIND_PACKAGE: + if toWorkspace { + return "", fmt.Errorf("convertation of package %s to workspace %s is not supported", fromId, toId) + } else { + if isMove && isRename { + trType = "move_and_rename_package" + } else if isMove { + trType = "move_package" + } else if isRename { + trType = "rename_package" + } + err = p.transRepo.TrackTransitionStarted(userCtx, id, trType, fromId, toId) + if err != nil { + return "", fmt.Errorf("failed to track transition action: %s", err) + } + // TODO: implement async job that will take non-finished transition tasks from DB instead of a direct call + utils.SafeAsync(func() { + objAffected, err := p.transRepo.MovePackage(fromId, toId, overwriteHistory) + if err != nil { + err = p.transRepo.TrackTransitionFailed(id, err.Error()) + if err != nil { + log.Errorf("failed to track transition action: %s", err) + } + } else { + err = p.transRepo.TrackTransitionCompleted(id, objAffected) + if err != nil { + log.Errorf("failed to track transition action: %s", err) + } + } + }) + return id, nil + } + case entity.KIND_GROUP: + if toWorkspace { + trType = "convert_group_to_workspace" + } else { + if isMove && isRename { + trType = "move_and_rename_group" + } else if isMove { + trType = "move_group" + } else if isRename { + trType = "rename_group" + } + } + err = p.transRepo.TrackTransitionStarted(userCtx, id, trType, fromId, toId) + if err != nil { + return "", fmt.Errorf("failed to track transition action: %s", err) + } + // TODO: implement async job that will take non-finished transition tasks from DB instead of a direct call + utils.SafeAsync(func() { + objAffected, err := p.transRepo.MoveGroupingPackage(fromId, toId) + if err != nil { + err = p.transRepo.TrackTransitionFailed(id, err.Error()) + if err != nil { + log.Errorf("failed to track transition action: %s", err) + } + } else { + err = p.transRepo.TrackTransitionCompleted(id, objAffected) + if err != nil { + log.Errorf("failed to track transition action: %s", err) + } + } + }) + return id, nil + case entity.KIND_DASHBOARD: + if toWorkspace { + return "", fmt.Errorf("convertation of dashboard %s to workspace %s is not supported", fromId, toId) + } else { + if isMove && isRename { + trType = "move_and_rename_dashboard" + } else if isMove { + trType = "move_dashboard" + } else if isRename { + trType = "rename_dashboard" + } + err = p.transRepo.TrackTransitionStarted(userCtx, id, trType, fromId, toId) + if err != nil { + return "", fmt.Errorf("failed to track transition action: %s", err) + } + // TODO: implement async job that will take non-finished transition tasks from DB instead of a direct call + utils.SafeAsync(func() { + objAffected, err := p.transRepo.MovePackage(fromId, toId, overwriteHistory) + if err != nil { + err = p.transRepo.TrackTransitionFailed(id, err.Error()) + if err != nil { + log.Errorf("failed to track transition action: %s", err) + } + } else { + err = p.transRepo.TrackTransitionCompleted(id, objAffected) + if err != nil { + log.Errorf("failed to track transition action: %s", err) + } + } + }) + return id, nil + } + default: + return "", fmt.Errorf("unsupported 'from' package kind=%s", fromPackage.Kind) + } +} + +func (p transitionServiceImpl) GetMoveStatus(id string) (*view.TransitionStatus, error) { + ent, err := p.transRepo.GetTransitionStatus(id) + if err != nil { + return nil, err + } + if ent == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.TransitionActivityNotFound, + Message: exception.TransitionActivityNotFoundMsg, + Params: map[string]interface{}{"id": id}, + } + } + return entity.MakeTransitionStatusView(ent), nil +} + +func (p transitionServiceImpl) ListCompletedActivities(completedSerialOffset int, limit int) ([]view.TransitionStatus, error) { + entities, err := p.transRepo.ListCompletedTransitions(completedSerialOffset, limit) + if err != nil { + return nil, err + } + result := make([]view.TransitionStatus, len(entities)) + for i := range entities { + result[i] = *entity.MakeTransitionStatusView(&entities[i]) + } + return result, nil +} + +func (p transitionServiceImpl) ListPackageTransitions() ([]view.PackageTransition, error) { + entities, err := p.transRepo.ListPackageTransitions() + if err != nil { + return nil, err + } + result := make([]view.PackageTransition, len(entities)) + for i := range entities { + result[i] = *entity.MakePackageTransitionView(&entities[i]) + } + return result, nil +} diff --git a/qubership-apihub-service/service/UserService.go b/qubership-apihub-service/service/UserService.go new file mode 100644 index 0000000..adfd518 --- /dev/null +++ b/qubership-apihub-service/service/UserService.go @@ -0,0 +1,559 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "crypto/sha256" + "fmt" + "net/http" + "strconv" + "strings" + + "github.com/go-ldap/ldap" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + "github.com/gosimple/slug" + log "github.com/sirupsen/logrus" + "golang.org/x/crypto/bcrypt" +) + +type UserService interface { + GetUsers(usersListReq view.UsersListReq) (*view.Users, error) + GetUsersByIds(userIds []string) ([]view.User, error) + GetUsersIdMap(userIds []string) (map[string]view.User, error) + GetUsersEmailMap(emails []string) (map[string]view.User, error) + GetUserFromDB(userId string) (*view.User, error) + GetUserByEmail(email string) (*view.User, error) + GetOrCreateUserForIntegration(user view.User, integration view.ExternalIntegration) (*view.User, error) + CreateInternalUser(internalUser *view.InternalUser) (*view.User, error) + StoreUserAvatar(id string, avatar []byte) error + GetUserAvatar(userId string) (*view.UserAvatar, error) + AuthenticateUser(email string, password string) (*view.User, error) + SearchUsersInLdap(ldapSearch view.LdapSearchFilterReq, withAvatars bool) (*view.LdapUsers, error) +} + +func NewUserService(repo repository.UserRepository, gitClientProvider GitClientProvider, systemInfoService SystemInfoService, privateUserPackageService PrivateUserPackageService) UserService { + return &usersServiceImpl{ + repo: repo, + gitClientProvider: gitClientProvider, + systemInfoService: systemInfoService, + privateUserPackageService: privateUserPackageService, + } +} + +type usersServiceImpl struct { + repo repository.UserRepository + gitClientProvider GitClientProvider + systemInfoService SystemInfoService + privateUserPackageService PrivateUserPackageService +} + +func (u usersServiceImpl) saveUserAvatar(userAvatar *view.UserAvatar) error { + return u.repo.SaveUserAvatar(entity.MakeUserAvatarEntity(userAvatar)) +} + +func (u usersServiceImpl) GetUserAvatar(userId string) (*view.UserAvatar, error) { + userAvatarEntity, err := u.repo.GetUserAvatar(userId) + + if err != nil { + return nil, err + } + if userAvatarEntity == nil { + usersFromLdap, err := u.SearchUsersInLdap(view.LdapSearchFilterReq{FilterToValue: map[string]string{view.SAMAccountName: userId}, Limit: 1}, true) + if err != nil { + return nil, err + } + if usersFromLdap == nil || len(usersFromLdap.Users) == 0 { + return nil, nil + } + return &view.UserAvatar{ + Id: userId, + Avatar: usersFromLdap.Users[0].Avatar, + }, nil + } else { + userAvatar := *entity.MakeUserAvatarView(userAvatarEntity) + return &userAvatar, nil + } +} + +func (u usersServiceImpl) StoreUserAvatar(id string, avatar []byte) error { + newAvatarChecksum := sha256.Sum256(avatar) + avatarChanged, err := u.avatarChanged(id, newAvatarChecksum) + if err != nil { + return fmt.Errorf("failed to get user avatar: %v", err.Error()) + } + if avatarChanged { + err = u.saveUserAvatar(&view.UserAvatar{Id: id, Avatar: avatar, Checksum: newAvatarChecksum}) + if err != nil { + return err + } + } + return nil +} + +func (u usersServiceImpl) avatarChanged(id string, newChecksum [32]byte) (bool, error) { + var err error + avatarFromDB, err := u.GetUserAvatar(id) + if err != nil { + return false, err + } + return avatarFromDB == nil || avatarFromDB.Checksum != newChecksum, nil +} + +func (u usersServiceImpl) GetUsers(usersListReq view.UsersListReq) (*view.Users, error) { + result := make([]view.User, 0) + existingEmailsSet := map[string]struct{}{} + + if usersListReq.Filter != "" { + searchResults, err := u.SearchUsersInLdap( + view.LdapSearchFilterReq{ + FilterToValue: map[string]string{view.DisplayName: usersListReq.Filter, + view.Surname: usersListReq.Filter, + view.Mail: usersListReq.Filter}, + Limit: usersListReq.Limit, + }, + false) + if err != nil { + return nil, err + } + if searchResults != nil { + for _, ldapUser := range searchResults.Users { + user := view.User{ + Id: ldapUser.Id, + Name: ldapUser.Name, + Email: strings.ToLower(ldapUser.Email), + AvatarUrl: fmt.Sprintf("/api/v2/users/%s/profile/avatar", ldapUser.Id), + } + result = append(result, user) + existingEmailsSet[user.Email] = struct{}{} + } + } + } + + userEntities, err := u.repo.GetUsers(usersListReq) + if err != nil { + return nil, err + } + + for _, userEntity := range userEntities { + if _, exists := existingEmailsSet[userEntity.Email]; exists { + continue + } + result = append(result, *entity.MakeUserV2View(&userEntity)) + } + + return &view.Users{Users: result}, nil +} + +func (u usersServiceImpl) SearchUsersInLdap(ldapSearchFilterReq view.LdapSearchFilterReq, withAvatars bool) (*view.LdapUsers, error) { + if len(ldapSearchFilterReq.FilterToValue) == 0 { + return nil, nil + } + ldapServerUrl := u.systemInfoService.GetLdapServer() + if ldapServerUrl == "" { + return nil, nil + } + ld, err := ldap.DialURL(ldapServerUrl) + defer ld.Close() + if err != nil { + log.Debugf("[ldap.DialURL()] err -%s", err.Error()) + return nil, &exception.CustomError{ + Status: http.StatusInternalServerError, + Code: exception.LdapConnectionIsNotCorrect, + Message: exception.LdapConnectionIsNotCorrectMsg, + Params: map[string]interface{}{"server": ldapServerUrl, "error": err.Error()}, + } + } + err = ld.Bind( + fmt.Sprintf("cn=%s,%s,%s", + u.systemInfoService.GetLdapUser(), + u.systemInfoService.GetLdapOrganizationUnit(), + u.systemInfoService.GetLdapBaseDN()), + u.systemInfoService.GetLdapUserPassword()) + if err != nil { + log.Debugf("[ ld.Bind()] err -%s", err.Error()) + return nil, &exception.CustomError{ + Status: http.StatusInternalServerError, + Code: exception.LdapConnectionIsNotAllowed, + Message: exception.LdapConnectionIsNotAllowedMsg, + Params: map[string]interface{}{"server": ldapServerUrl, "error": err.Error()}, + } + } + + var subFilter string + for attribute, value := range ldapSearchFilterReq.FilterToValue { + subFilter += fmt.Sprintf("(%s=%s*)", attribute, value) + } + mainFilter := fmt.Sprintf("(&(objectClass=user)(|%s))", subFilter) + searchBase := u.systemInfoService.GetLdapSearchBase() + attributes := []string{view.Mail, view.DisplayName, view.ThumbnailPhoto, view.SAMAccountName} + pagingControl := ldap.NewControlPaging(uint32(ldapSearchFilterReq.Limit)) + controls := []ldap.Control{pagingControl} + searchReq := ldap.NewSearchRequest( + searchBase, + ldap.ScopeWholeSubtree, ldap.DerefAlways, ldapSearchFilterReq.Limit, 0, false, + mainFilter, + attributes, + controls, + ) + result, err := ld.Search(searchReq) + if err != nil { + log.Debugf("[ld.Search() ]failed to query LDAP: %s", err.Error()) + return nil, &exception.CustomError{ + Status: http.StatusInternalServerError, + Code: exception.LdapSearchFailed, + Message: exception.LdapSearchFailedMsg, + Params: map[string]interface{}{"server": ldapServerUrl, "error": err.Error()}, + } + } + users := make([]view.LdapUser, 0) + for _, entry := range result.Entries { + user := view.LdapUser{} + for _, attribute := range entry.Attributes { + switch attribute.Name { + case view.Mail: + user.Email = attribute.Values[0] + case view.DisplayName: + user.Name = attribute.Values[0] + case view.SAMAccountName: + user.Id = attribute.Values[0] + case view.ThumbnailPhoto: + if withAvatars { + user.Avatar = attribute.ByteValues[0] + } + default: + + } + } + users = append(users, user) + } + + return &view.LdapUsers{Users: users}, nil +} + +func (u usersServiceImpl) GetUsersByIds(userIds []string) ([]view.User, error) { + result := make([]view.User, 0) + userEntities, err := u.repo.GetUsersByIds(userIds) + if err != nil { + return nil, err + } + for _, userEntity := range userEntities { + result = append(result, *entity.MakeUserView(&userEntity)) + } + return result, nil +} + +func (u usersServiceImpl) GetUsersIdMap(userIds []string) (map[string]view.User, error) { + result := make(map[string]view.User, 0) + userEntities, err := u.repo.GetUsersByIds(userIds) + if err != nil { + return nil, err + } + for _, userEntity := range userEntities { + result[userEntity.Id] = *entity.MakeUserView(&userEntity) + } + return result, nil +} + +func (u usersServiceImpl) GetUsersEmailMap(emails []string) (map[string]view.User, error) { + result := make(map[string]view.User, 0) + for index := range emails { + emails[index] = strings.ToLower(emails[index]) + } + userEntities, err := u.repo.GetUsersByEmails(emails) + if err != nil { + return nil, err + } + for _, userEntity := range userEntities { + result[userEntity.Email] = *entity.MakeUserView(&userEntity) + } + return result, nil +} + +func (u usersServiceImpl) GetUserFromDB(userId string) (*view.User, error) { + userEntity, err := u.repo.GetUserById(userId) + + if err != nil { + return nil, fmt.Errorf("failed to get user from DB: %v", err) + } + if userEntity != nil { + return entity.MakeUserView(userEntity), nil + } + return nil, nil + +} + +func (u usersServiceImpl) GetUserByEmail(email string) (*view.User, error) { + userEntity, err := u.repo.GetUserByEmail(email) + + if err != nil { + return nil, err + } + if userEntity != nil { + return entity.MakeUserView(userEntity), nil + } + return nil, nil +} + +func (u usersServiceImpl) GetOrCreateUserForIntegration(externalUser view.User, integration view.ExternalIntegration) (*view.User, error) { + if externalUser.Email == "" { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.EmptyParameter, + Message: exception.EmptyParameterMsg, + Params: map[string]interface{}{"param": "email"}, + } + } + externalId := view.GetIntegrationExternalId(externalUser, integration) + if externalId == "" { + return nil, fmt.Errorf("external id is missing for user in '%v' integration", integration) + } + externalIdentity, err := u.repo.GetUserExternalIdentity(string(integration), externalId) + if err != nil { + return nil, err + } + if externalIdentity == nil { + return u.createExternalUser(externalUser, integration) + } + userEnt, err := u.repo.GetUserById(externalIdentity.InternalId) + if err != nil { + return nil, err + } + if userEnt == nil { + return u.createExternalUser(externalUser, integration) + } + if len(userEnt.Password) != 0 { + err = u.repo.ClearUserPassword(userEnt.Id) + if err != nil { + return nil, err + } + } + userEnt, err = u.updateExternalUserInfo(userEnt, externalUser) + if err != nil { + return nil, err + } + + return entity.MakeUserView(userEnt), nil +} + +func (u usersServiceImpl) createExternalUser(externalUser view.User, integration view.ExternalIntegration) (*view.User, error) { + externalId := view.GetIntegrationExternalId(externalUser, integration) + if externalId == "" { + return nil, fmt.Errorf("external id is missing for user in %v integration", integration) + } + existingUser, err := u.repo.GetUserByEmail(externalUser.Email) + if err != nil { + return nil, err + } + if existingUser != nil { + err = u.repo.UpdateUserExternalIdentity(string(integration), externalId, existingUser.Id) + if err != nil { + return nil, err + } + if len(existingUser.Password) != 0 { + err = u.repo.ClearUserPassword(existingUser.Id) + if err != nil { + return nil, err + } + } + existingUser, err = u.updateExternalUserInfo(existingUser, externalUser) + if err != nil { + return nil, err + } + return entity.MakeUserView(existingUser), nil + } + + existingUser, err = u.repo.GetUserById(externalId) + if err != nil { + return nil, err + } + if existingUser != nil { + externalUser.Id, err = u.createUniqueUserId(externalUser.Email) + if err != nil { + return nil, err + } + } + if externalUser.Name == "" { + externalUser.Name = externalUser.Email + } + + err = u.saveExternalUserToDB(&externalUser, integration, externalId) + if err != nil { + return nil, err + } + return &externalUser, nil +} + +func (u usersServiceImpl) updateExternalUserInfo(existingUser *entity.UserEntity, externalUser view.User) (*entity.UserEntity, error) { + userInfoChanged := false + //update name only if user was created without a display name + if existingUser.Username == existingUser.Email && externalUser.Name != existingUser.Username { + existingUser.Username = externalUser.Name + userInfoChanged = true + } + if existingUser.AvatarUrl == "" && externalUser.AvatarUrl != "" { + existingUser.AvatarUrl = externalUser.AvatarUrl + userInfoChanged = true + } + if userInfoChanged { + err := u.repo.UpdateUserInfo(existingUser) + if err != nil { + return nil, err + } + } + return existingUser, nil +} + +func (u usersServiceImpl) saveExternalUserToDB(user *view.User, integration view.ExternalIntegration, externalId string) error { + userPrivatePackageId, err := u.privateUserPackageService.GenerateUserPrivatePackageId(user.Id) + if err != nil { + return err + } + userEntity := entity.MakeExternalUserEntity(user, userPrivatePackageId) + externalIdentityEnt := &entity.ExternalIdentityEntity{Provider: string(integration), InternalId: user.Id, ExternalId: externalId} + return u.repo.SaveExternalUser(userEntity, externalIdentityEnt) +} + +func (u usersServiceImpl) CreateInternalUser(internalUser *view.InternalUser) (*view.User, error) { + //bcrypt max allowed password len + if len([]byte(internalUser.Password)) > 72 { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.PasswordTooLong, + Message: exception.PasswordTooLongMsg, + } + } + err := u.validateEmail(internalUser.Email) + if err != nil { + return nil, err + } + + internalUser.Id, err = u.createUniqueUserId(internalUser.Email) + if err != nil { + return nil, err + } + + if internalUser.Name == "" { + internalUser.Name = internalUser.Email + } + passwordHash, err := createBcryptHashedPassword(internalUser.Password) + if err != nil { + return nil, err + } + userPrivatePackageId := internalUser.PrivateWorkspaceId + if internalUser.PrivateWorkspaceId == "" { + userPrivatePackageId, err = u.privateUserPackageService.GenerateUserPrivatePackageId(internalUser.Id) + if err != nil { + return nil, err + } + } else { + privatePackageIdIsTaken, err := u.privateUserPackageService.PrivatePackageIdIsTaken(internalUser.PrivateWorkspaceId) + if err != nil { + return nil, err + } + if privatePackageIdIsTaken { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.PrivateWorkspaceIdAlreadyTaken, + Message: exception.PrivateWorkspaceIdAlreadyTakenMsg, + Params: map[string]interface{}{"id": internalUser.PrivateWorkspaceId}, + } + } + } + + userEntity := entity.MakeInternalUserEntity(internalUser, passwordHash, userPrivatePackageId) + saved, err := u.repo.SaveInternalUser(userEntity) + if err != nil { + return nil, err + } + if !saved { + return nil, &exception.CustomError{ + Status: http.StatusInternalServerError, + Message: "Failed to create internal user", + } + } + return entity.MakeUserV2View(userEntity), nil +} + +func (u usersServiceImpl) validateEmail(email string) error { + if email == "" { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.EmptyParameter, + Message: exception.EmptyParameterMsg, + Params: map[string]interface{}{"param": "email"}, + } + } + existingUser, err := u.repo.GetUserByEmail(email) + if err != nil { + return err + } + if existingUser != nil { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.EmailAlreadyTaken, + Message: exception.EmailAlreadyTakenMsg, + Params: map[string]interface{}{"email": email}, + } + } + return nil +} + +func (u usersServiceImpl) createUniqueUserId(email string) (string, error) { + userId := slug.Make(email) + existingUser, err := u.repo.GetUserById(userId) + if err != nil { + return "", err + } + if existingUser != nil { + i := 1 + for existingUser != nil { + userId = slug.Make(email + "-" + strconv.Itoa(i)) + existingUser, err = u.repo.GetUserById(userId) + if err != nil { + return "", err + } + i++ + } + } + return userId, nil +} + +func (u usersServiceImpl) AuthenticateUser(email string, password string) (*view.User, error) { + userEntity, err := u.repo.GetUserByEmail(email) + if err != nil { + return nil, err + } + if password == "" || userEntity == nil || len(userEntity.Password) == 0 { + log.Debugf("Local authentication failed for %v", email) + return nil, fmt.Errorf("invalid credentials") + } + err = bcrypt.CompareHashAndPassword(userEntity.Password, []byte(password)) + if err != nil { + log.Debugf("Local authentication failed for %v", email) + return nil, fmt.Errorf("invalid credentials") + } + + return entity.MakeUserView(userEntity), nil +} + +func createBcryptHashedPassword(password string) ([]byte, error) { + hashedPassword, err := bcrypt.GenerateFromPassword([]byte(password), bcrypt.DefaultCost) + return hashedPassword, err +} diff --git a/qubership-apihub-service/service/VersionService.go b/qubership-apihub-service/service/VersionService.go new file mode 100644 index 0000000..da5785f --- /dev/null +++ b/qubership-apihub-service/service/VersionService.go @@ -0,0 +1,2430 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "bufio" + "bytes" + "encoding/csv" + "fmt" + "net/http" + "path" + "regexp" + "strings" + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/google/uuid" + log "github.com/sirupsen/logrus" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/crypto" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type VersionService interface { + SetBuildService(buildService BuildService) + + GetPackageVersionContent_deprecated(packageId string, versionName string, includeSummary bool, includeOperations bool, includeGroups bool) (*view.VersionContent_deprecated, error) + GetPackageVersionContent(packageId string, versionName string, includeSummary bool, includeOperations bool, includeGroups bool) (*view.VersionContent, error) + GetPackageVersionsView_deprecated(req view.VersionListReq) (*view.PublishedVersionsView_deprecated_v2, error) + GetPackageVersionsView(req view.VersionListReq) (*view.PublishedVersionsView, error) + DeleteVersion(ctx context.SecurityContext, packageId string, versionName string) error + PatchVersion(ctx context.SecurityContext, packageId string, versionName string, status *string, versionLabels *[]string) (*view.VersionContent, error) + GetLatestContentDataBySlug(packageId string, versionName string, slug string) (*view.PublishedContent, *view.ContentData, error) + GetLatestDocumentBySlug_deprecated(packageId string, versionName string, slug string) (*view.PublishedDocument_deprecated, error) + GetLatestDocumentBySlug(packageId string, versionName string, slug string) (*view.PublishedDocument, error) + GetLatestDocuments(packageId string, versionName string, skipRefs bool, filterReq view.DocumentsFilterReq) (*view.VersionDocuments, error) + GetSharedFile(sharedFileId string) ([]byte, string, error) + SharePublishedFile(packageId string, versionName string, slug string) (*view.SharedUrlResult, error) + GetVersionValidationChanges(packageId string, versionName string) (*view.VersionValidationChanges, error) + GetVersionValidationProblems(packageId string, versionName string) (*view.VersionValidationProblems, error) + GetDefaultVersion(packageId string) (string, error) + GetVersionDetails(packageId string, versionName string) (*view.VersionDetails, error) + GetVersionReferences(packageId string, versionName string, filterReq view.VersionReferencesReq) (*view.VersionReferences, error) //deprecated + GetVersionReferencesV3(packageId string, versionName string) (*view.VersionReferencesV3, error) + SearchForPackages(searchReq view.SearchQueryReq) (*view.SearchResult, error) + SearchForDocuments(searchReq view.SearchQueryReq) (*view.SearchResult, error) + GetVersionStatus(packageId string, version string) (string, error) + GetLatestRevision(packageId string, versionName string) (int, error) + GetVersionChanges(packageId, version, apiType string, severities []string, changelogCalculationParams view.VersionChangesReq) (*view.VersionChangesView, error) + GetVersionRevisionsList_deprecated(packageId, versionName string, filterReq view.PagingFilterReq) (*view.PackageVersionRevisions_deprecated, error) + GetVersionRevisionsList(packageId, versionName string, filterReq view.PagingFilterReq) (*view.PackageVersionRevisions, error) + GetTransformedDocuments_deprecated(packageId, version, apiType, groupName, format string) ([]byte, error) + GetTransformedDocuments(packageId string, version string, apiType string, groupName string, buildType string, format string) ([]byte, error) + DeleteVersionsRecursively(ctx context.SecurityContext, packageId string, retention time.Time) (string, error) + CopyVersion(ctx context.SecurityContext, packageId string, version string, req view.CopyVersionReq) (string, error) + GetPublishedVersionsHistory(filter view.PublishedVersionHistoryFilter) ([]view.PublishedVersionHistoryView, error) + StartPublishFromCSV(ctx context.SecurityContext, req view.PublishFromCSVReq) (string, error) + GetCSVDashboardPublishStatus(publishId string) (*view.CSVDashboardPublishStatusResponse, error) + GetCSVDashboardPublishReport(publishId string) ([]byte, error) +} + +func NewVersionService(gitClientProvider GitClientProvider, + repo repository.PrjGrpIntRepository, + favoritesRepo repository.FavoritesRepository, + publishedRepo repository.PublishedRepository, + publishedService PublishedService, + operationRepo repository.OperationRepository, + operationService OperationService, + atService ActivityTrackingService, + systemInfoService SystemInfoService, + packageVersionEnrichmentService PackageVersionEnrichmentService, + portalService PortalService, + versionCleanupRepository repository.VersionCleanupRepository, + operationGroupService OperationGroupService) VersionService { + return &versionServiceImpl{ + gitClientProvider: gitClientProvider, + pRepo: repo, + favoritesRepo: favoritesRepo, + publishedRepo: publishedRepo, + publishedService: publishedService, + operationRepo: operationRepo, + operationService: operationService, + atService: atService, + systemInfoService: systemInfoService, + packageVersionEnrichmentService: packageVersionEnrichmentService, + portalService: portalService, + versionCleanupRepository: versionCleanupRepository, + operationGroupService: operationGroupService, + } +} + +type versionServiceImpl struct { + gitClientProvider GitClientProvider + pRepo repository.PrjGrpIntRepository + favoritesRepo repository.FavoritesRepository + publishedRepo repository.PublishedRepository + publishedService PublishedService + operationRepo repository.OperationRepository + operationService OperationService + atService ActivityTrackingService + systemInfoService SystemInfoService + packageVersionEnrichmentService PackageVersionEnrichmentService + portalService PortalService + versionCleanupRepository repository.VersionCleanupRepository + buildService BuildService + operationGroupService OperationGroupService +} + +func (v *versionServiceImpl) SetBuildService(buildService BuildService) { + v.buildService = buildService +} + +func (v versionServiceImpl) SharePublishedFile(packageId string, versionName string, slug string) (*view.SharedUrlResult, error) { + version, err := v.publishedRepo.GetVersion(packageId, versionName) + if err != nil { + return nil, err + } + if version == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedVersionNotFound, + Message: exception.PublishedVersionNotFoundMsg, + Params: map[string]interface{}{"version": versionName}, + } + } + + content, err := v.publishedRepo.GetLatestContentBySlug(packageId, version.Version, slug) + if err != nil { + return nil, err + } + + if content == nil { + return nil, &exception.ContentNotFoundError{ContentId: slug} + } + + for attempts := 0; attempts < 100; attempts++ { + sharedIdInfoEntity, err := v.publishedRepo.GetFileSharedInfo(packageId, slug, version.Version) + if err != nil { + return nil, err + } + if sharedIdInfoEntity != nil { + return entity.MakeSharedUrlInfoV2(sharedIdInfoEntity), nil + } + + newSharedUrlInfoEntity := &entity.SharedUrlInfoEntity{ + SharedId: generateSharedId(8), + PackageId: packageId, + Version: version.Version, + FileId: slug, // TODO: Slug! + } + if err := v.publishedRepo.CreateFileSharedInfo(newSharedUrlInfoEntity); err != nil { + if customError, ok := err.(*exception.CustomError); ok { + if customError.Code == exception.GeneratedSharedIdIsNotUnique { + continue + } else { + return nil, err + } + } + } else { + return entity.MakeSharedUrlInfoV2(newSharedUrlInfoEntity), nil + } + } + return nil, fmt.Errorf("failed to generate unique shared id") +} + +func generateSharedId(size int) string { + rndHash := crypto.CreateRandomHash() + return strings.ToLower(rndHash[:size]) +} + +func (v versionServiceImpl) GetSharedFile(sharedFileId string) ([]byte, string, error) { + sharedFileIdInfo, err := v.publishedRepo.GetFileSharedInfoById(sharedFileId) + if err != nil { + return nil, "", err + } + if sharedFileIdInfo == nil { + return nil, "", &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.SharedFileIdNotFound, + Message: exception.SharedFileIdNotFoundMsg, + Params: map[string]interface{}{"sharedFileId": sharedFileId}, + } + } + version, err := v.publishedRepo.GetVersionIncludingDeleted(sharedFileIdInfo.PackageId, sharedFileIdInfo.Version) + if err != nil { + return nil, "", err + } + if version == nil { + return nil, "", &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedVersionNotFound, + Message: exception.PublishedVersionNotFoundMsg, + Params: map[string]interface{}{"version": sharedFileIdInfo.Version}, + } + } + if version.DeletedAt != nil && !version.DeletedAt.IsZero() { + return nil, "", &exception.CustomError{ + Status: http.StatusGone, + Code: exception.SharedContentUnavailable, + Message: exception.SharedContentUnavailableMsg, + Params: map[string]interface{}{"sharedFileId": sharedFileId}, + } + } + + content, err := v.publishedRepo.GetLatestContentBySlug(sharedFileIdInfo.PackageId, sharedFileIdInfo.Version, sharedFileIdInfo.FileId) + if err != nil { + return nil, "", err + } + if content == nil { + return nil, "", &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.NoContentFoundForSharedId, + Message: exception.NoContentFoundForSharedIdMsg, + Params: map[string]interface{}{"sharedFileId": sharedFileId}, + } + } + + pce, err := v.publishedRepo.GetContentData(content.PackageId, content.Checksum) + if err != nil { + return nil, "", err + } + if pce == nil { + return nil, "", &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.NoContentFoundForSharedId, + Message: exception.NoContentFoundForSharedIdMsg, + Params: map[string]interface{}{"sharedFileId": sharedFileId}, + } + } + + attachmentFileName := content.FileId + if content.Format == view.JsonFormat { + attachmentFileName = fmt.Sprintf("%s.%s", strings.TrimSuffix(content.FileId, path.Ext(content.FileId)), string(view.JsonExtension)) + } + return pce.Data, attachmentFileName, nil +} + +func (v versionServiceImpl) GetLatestDocumentBySlug_deprecated(packageId string, versionName string, slug string) (*view.PublishedDocument_deprecated, error) { + versionEnt, err := v.publishedRepo.GetVersion(packageId, versionName) + if err != nil { + return nil, err + } + if versionEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedVersionNotFound, + Message: exception.PublishedVersionNotFoundMsg, + Params: map[string]interface{}{"version": versionName}, + } + } + + document, err := v.publishedRepo.GetLatestContentBySlug(packageId, versionName, slug) + if err != nil { + return nil, err + } + if document == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ContentSlugNotFound, + Message: exception.ContentSlugNotFoundMsg, + Params: map[string]interface{}{"contentSlug": slug}, + } + } + operationEnts, err := v.operationRepo.GetOperationsByIds(versionEnt.PackageId, versionEnt.Version, versionEnt.Revision, document.OperationIds) + if err != nil { + return nil, err + } + operations := make([]view.DocumentsOperation_deprecated, 0) + for _, operationEnt := range operationEnts { + operations = append(operations, entity.MakeDocumentsOperationView_deprecated(operationEnt)) + } + documentView := entity.MakePublishedDocumentView_deprecated(document) + documentView.Operations = operations + return documentView, nil +} + +func (v versionServiceImpl) GetLatestDocumentBySlug(packageId string, versionName string, slug string) (*view.PublishedDocument, error) { + versionEnt, err := v.publishedRepo.GetVersion(packageId, versionName) + if err != nil { + return nil, err + } + if versionEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedVersionNotFound, + Message: exception.PublishedVersionNotFoundMsg, + Params: map[string]interface{}{"version": versionName}, + } + } + + document, err := v.publishedRepo.GetLatestContentBySlug(packageId, versionName, slug) + if err != nil { + return nil, err + } + if document == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ContentSlugNotFound, + Message: exception.ContentSlugNotFoundMsg, + Params: map[string]interface{}{"contentSlug": slug}, + } + } + operationEnts, err := v.operationRepo.GetOperationsByIds(versionEnt.PackageId, versionEnt.Version, versionEnt.Revision, document.OperationIds) + if err != nil { + return nil, err + } + operations := make([]interface{}, 0) + for _, operationEnt := range operationEnts { + operations = append(operations, entity.MakeDocumentsOperationView(operationEnt)) + } + documentView := entity.MakePublishedDocumentView(document) + documentView.Operations = operations + return documentView, nil +} + +func (v versionServiceImpl) GetLatestDocuments(packageId string, versionName string, skipRefs bool, filterReq view.DocumentsFilterReq) (*view.VersionDocuments, error) { + version, err := v.publishedRepo.GetVersion(packageId, versionName) + if err != nil { + return nil, err + } + if version == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedVersionNotFound, + Message: exception.PublishedVersionNotFoundMsg, + Params: map[string]interface{}{"version": versionName}, + } + } + + searchQuery := entity.PublishedContentSearchQueryEntity{ + TextFilter: filterReq.TextFilter, + Limit: filterReq.Limit, + Offset: filterReq.Offset, + DocumentTypesFilter: view.GetDocumentTypesForApiType(filterReq.ApiType), + } + + versionDocuments := make([]view.PublishedDocumentRefView, 0) + packageVersions := make(map[string][]string, 0) + versionDocumentEnts, err := v.publishedRepo.GetRevisionContentWithLimit(packageId, version.Version, version.Revision, skipRefs, searchQuery) + if err != nil { + return nil, err + } + for _, versionDocumentEnt := range versionDocumentEnts { + tmpEnt := versionDocumentEnt + versionDocuments = append(versionDocuments, *entity.MakePublishedDocumentRefView2(&tmpEnt)) + packageVersions[tmpEnt.PackageId] = append(packageVersions[tmpEnt.PackageId], view.MakeVersionRefKey(tmpEnt.Version, tmpEnt.Revision)) + } + + packagesRefs, err := v.packageVersionEnrichmentService.GetPackageVersionRefsMap(packageVersions) + if err != nil { + return nil, err + } + return &view.VersionDocuments{Documents: versionDocuments, Packages: packagesRefs}, nil +} + +// deprecated +func (v versionServiceImpl) GetVersionReferences(packageId string, versionName string, filterReq view.VersionReferencesReq) (*view.VersionReferences, error) { + version, err := v.publishedRepo.GetVersion(packageId, versionName) + if err != nil { + return nil, err + } + if version == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedVersionNotFound, + Message: exception.PublishedVersionNotFoundMsg, + Params: map[string]interface{}{"version": versionName}, + } + } + versionReferences := make([]view.VersionReference, 0) + searchQuery := entity.PackageVersionSearchQueryEntity{ + PackageId: packageId, + Version: version.Version, + Revision: version.Revision, + TextFilter: filterReq.TextFilter, + Kind: filterReq.Kind, + Limit: filterReq.Limit, + Offset: filterReq.Page * filterReq.Limit, + ShowAllDescendants: filterReq.ShowAllDescendants, + } + + publishedReferences, err := v.publishedRepo.GetVersionRefs(searchQuery) + + if err != nil { + return nil, err + } + + for _, ref := range publishedReferences { + parents, err := v.getParents(ref.PackageId) + if err != nil { + return nil, err + } + versionReferences = append(versionReferences, view.VersionReference{ + RefId: ref.PackageId, + Name: ref.PackageName, + Version: ref.Version, + Revision: ref.Revision, + Status: ref.VersionStatus, + Kind: ref.Kind, + DeletedAt: ref.DeletedAt, + DeletedBy: ref.DeletedBy, + Parents: parents, + }) + } + return &view.VersionReferences{References: versionReferences}, nil +} + +func (v versionServiceImpl) GetVersionReferencesV3(packageId string, versionName string) (*view.VersionReferencesV3, error) { + versionEnt, err := v.publishedRepo.GetVersion(packageId, versionName) + if err != nil { + return nil, err + } + if versionEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedVersionNotFound, + Message: exception.PublishedVersionNotFoundMsg, + Params: map[string]interface{}{"version": versionName}, + } + } + versionReferences := make([]view.VersionReferenceV3, 0) + + publishedReferencesEnts, err := v.publishedRepo.GetVersionRefsV3(versionEnt.PackageId, versionEnt.Version, versionEnt.Revision) + if err != nil { + return nil, err + } + packageVersions := make(map[string][]string, 0) + for _, refEntity := range publishedReferencesEnts { + versionReferences = append(versionReferences, entity.MakePublishedReferenceView(refEntity)) + packageVersions[refEntity.RefPackageId] = append(packageVersions[refEntity.RefPackageId], view.MakeVersionRefKey(refEntity.RefVersion, refEntity.RefRevision)) + } + packagesRefs, err := v.packageVersionEnrichmentService.GetPackageVersionRefsMap(packageVersions) + if err != nil { + return nil, err + } + return &view.VersionReferencesV3{References: versionReferences, Packages: packagesRefs}, nil +} + +func (v versionServiceImpl) getParents(packageId string) ([]view.ParentPackageInfo, error) { + parents, err := v.publishedRepo.GetParentsForPackage(packageId) + if err != nil { + return nil, err + } + var result []view.ParentPackageInfo + for _, grp := range parents { + result = append(result, *entity.MakePackageParentView(&grp)) + } + return result, err +} +func (v versionServiceImpl) GetLatestContentDataBySlug(packageId string, versionName string, slug string) (*view.PublishedContent, *view.ContentData, error) { + ent, err := v.publishedRepo.GetVersion(packageId, versionName) + if err != nil { + return nil, nil, err + } + if ent == nil { + return nil, nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedVersionNotFound, + Message: exception.PublishedVersionNotFoundMsg, + Params: map[string]interface{}{"version": versionName}, + } + } + + content, err := v.publishedRepo.GetRevisionContentBySlug(packageId, ent.Version, slug, ent.Revision) + if err != nil { + return nil, nil, err + } + if content == nil { + return nil, nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ContentSlugNotFound, + Message: exception.ContentSlugNotFoundMsg, + Params: map[string]interface{}{"contentSlug": slug}, + } + } + + pce, err := v.publishedRepo.GetContentData(packageId, content.Checksum) + if err != nil { + return nil, nil, err + } + if pce == nil { + return nil, nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ContentSlugNotFound, + Message: exception.ContentSlugNotFoundMsg, + Params: map[string]interface{}{"contentSlug": slug}, + } + } + return entity.MakePublishedContentView(content), entity.MakeContentDataViewPub(content, pce), nil +} + +func (v versionServiceImpl) DeleteVersion(ctx context.SecurityContext, packageId string, versionName string) error { + version, revision, err := repository.SplitVersionRevision(versionName) + if err != nil { + return err + } + versionEnt, err := v.publishedRepo.GetVersion(packageId, version) + if err != nil { + return err + } + if versionEnt == nil { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": version, "packageId": packageId}, + } + } + if revision != 0 && revision != versionEnt.Revision { + return &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.UnableToDeleteOldRevision, + Message: exception.UnableToDeleteOldRevisionMsg, + } + } + err = v.publishedService.DeleteVersion(ctx, packageId, versionEnt.Version) + if err != nil { + return err + } + dataMap := map[string]interface{}{} + dataMap["version"] = versionEnt.Version + dataMap["revision"] = versionEnt.Revision + dataMap["status"] = versionEnt.Status + + v.atService.TrackEvent(view.ActivityTrackingEvent{ + Type: view.ATETDeleteVersion, + Data: dataMap, + PackageId: packageId, + Date: time.Now(), + UserId: ctx.GetUserId(), + }) + return nil +} + +func (v versionServiceImpl) PatchVersion(ctx context.SecurityContext, packageId string, versionName string, status *string, versionLabels *[]string) (*view.VersionContent, error) { + version, revision, err := repository.SplitVersionRevision(versionName) + if err != nil { + return nil, err + } + versionEnt, err := v.publishedRepo.GetVersion(packageId, version) + if err != nil { + return nil, err + } + if versionEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": versionName, "packageId": packageId}, + } + } + if revision != 0 && revision != versionEnt.Revision { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.UnableToChangeOldRevision, + Message: exception.UnableToChangeOldRevisionMsg, + } + } + dataMap := map[string]interface{}{} + versionMeta := make([]string, 0) + + if status != nil { + newStatus := *status + if newStatus == string(view.Release) { + packEnt, err := v.publishedRepo.GetPackage(packageId) + if err != nil { + return nil, err + } + var pattern string + if packEnt.ReleaseVersionPattern != "" { + pattern = packEnt.ReleaseVersionPattern + } else { + pattern = ".*" + } + err = ReleaseVersionMatchesPattern(versionEnt.Version, pattern) + if err != nil { + return nil, err + } + } + + dataMap["oldStatus"] = versionEnt.Status + dataMap["newStatus"] = newStatus + versionMeta = append(versionMeta, "status") + } + + if versionLabels != nil { + dataMap["oldVersionLabels"] = versionEnt.Labels + dataMap["newVersionLabels"] = versionLabels + versionMeta = append(versionMeta, "versionLabels") + } + + _, err = v.publishedRepo.PatchVersion(packageId, versionEnt.Version, status, versionLabels) + if err != nil { + return nil, err + } + + result, err := v.GetPackageVersionContent(packageId, versionEnt.Version, true, false, false) + if err != nil { + return nil, err + } + dataMap["version"] = versionEnt.Version + dataMap["revision"] = versionEnt.Revision + dataMap["versionMeta"] = versionMeta + v.atService.TrackEvent(view.ActivityTrackingEvent{ + Type: view.ATETPatchVersionMeta, + Data: dataMap, + PackageId: packageId, + Date: time.Now(), + UserId: ctx.GetUserId(), + }) + return result, nil +} + +func (v versionServiceImpl) GetPackageVersionsView_deprecated(req view.VersionListReq) (*view.PublishedVersionsView_deprecated_v2, error) { + packageEnt, err := v.publishedRepo.GetPackage(req.PackageId) + if err != nil { + return nil, err + } + if packageEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"packageId": req.PackageId}, + } + } + versions := make([]view.PublishedVersionListView_deprecated_v2, 0) + versionSortByPG := entity.GetVersionSortByPG(req.SortBy) + if versionSortByPG == "" { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameterValue, + Message: exception.InvalidParameterValueMsg, + Params: map[string]interface{}{"param": "sortBy", "value": req.SortBy}, + } + } + versionSortOrderPG := entity.GetVersionSortOrderPG(req.SortOrder) + if versionSortOrderPG == "" { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameterValue, + Message: exception.InvalidParameterValueMsg, + Params: map[string]interface{}{"param": "sortOrder", "value": req.SortOrder}, + } + } + + searchQueryReq := entity.PublishedVersionSearchQueryEntity{ + PackageId: req.PackageId, + Status: req.Status, + Label: req.Label, + TextFilter: req.TextFilter, + SortBy: versionSortByPG, + SortOrder: versionSortOrderPG, + Limit: req.Limit, + Offset: req.Page * req.Limit, + } + ents, err := v.publishedRepo.GetReadonlyPackageVersionsWithLimit_deprecated(searchQueryReq, req.CheckRevisions) + if err != nil { + return nil, err + } + for _, ent := range ents { + version := entity.MakeReadonlyPublishedVersionListView2_deprecated(&ent) + versions = append(versions, *version) + } + return &view.PublishedVersionsView_deprecated_v2{Versions: versions}, nil +} + +func (v versionServiceImpl) GetPackageVersionsView(req view.VersionListReq) (*view.PublishedVersionsView, error) { + packageEnt, err := v.publishedRepo.GetPackage(req.PackageId) + if err != nil { + return nil, err + } + if packageEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"packageId": req.PackageId}, + } + } + versions := make([]view.PublishedVersionListView, 0) + versionSortByPG := entity.GetVersionSortByPG(req.SortBy) + if versionSortByPG == "" { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameterValue, + Message: exception.InvalidParameterValueMsg, + Params: map[string]interface{}{"param": "sortBy", "value": req.SortBy}, + } + } + versionSortOrderPG := entity.GetVersionSortOrderPG(req.SortOrder) + if versionSortOrderPG == "" { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameterValue, + Message: exception.InvalidParameterValueMsg, + Params: map[string]interface{}{"param": "sortOrder", "value": req.SortOrder}, + } + } + + searchQueryReq := entity.PublishedVersionSearchQueryEntity{ + PackageId: req.PackageId, + Status: req.Status, + Label: req.Label, + TextFilter: req.TextFilter, + SortBy: versionSortByPG, + SortOrder: versionSortOrderPG, + Limit: req.Limit, + Offset: req.Page * req.Limit, + } + ents, err := v.publishedRepo.GetReadonlyPackageVersionsWithLimit(searchQueryReq, req.CheckRevisions) + if err != nil { + return nil, err + } + for _, ent := range ents { + version := entity.MakeReadonlyPublishedVersionListView2(&ent) + versions = append(versions, *version) + } + return &view.PublishedVersionsView{Versions: versions}, nil +} +func (v versionServiceImpl) GetPackageVersionContent_deprecated(packageId string, version string, includeSummary bool, includeOperations bool, includeGroups bool) (*view.VersionContent_deprecated, error) { + versionEnt, err := v.publishedRepo.GetReadonlyVersion_deprecated(packageId, version) + if err != nil { + return nil, err + } + if versionEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": version, "packageId": packageId}, + } + } + + latestRevision, err := v.publishedRepo.GetLatestRevision(versionEnt.PackageId, versionEnt.Version) + if err != nil { + return nil, err + } + if latestRevision == 0 { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": version, "packageId": packageId}, + } + } + versionContent := &view.VersionContent_deprecated{ + PublishedAt: versionEnt.PublishedAt, + PublishedBy: versionEnt.UserName, + PreviousVersion: view.MakeVersionRefKey(versionEnt.PreviousVersion, versionEnt.PreviousVersionRevision), + PreviousVersionPackageId: versionEnt.PreviousVersionPackageId, + VersionLabels: versionEnt.Labels, + Status: versionEnt.Status, + NotLatestRevision: versionEnt.Revision != latestRevision, + PackageId: versionEnt.PackageId, + Version: view.MakeVersionRefKey(versionEnt.Version, versionEnt.Revision), + RevisionsCount: latestRevision, + } + + versionOperationTypes, err := v.getVersionOperationTypes_deprecated(versionEnt, includeSummary, includeOperations) + if err != nil { + return nil, err + } + if includeGroups { + versionContent.OperationGroups, err = v.getVersionOperationGroups_deprecated(versionEnt) + if err != nil { + return nil, err + } + } + + versionContent.OperationTypes = versionOperationTypes + + return versionContent, nil +} +func (v versionServiceImpl) GetPackageVersionContent(packageId string, version string, includeSummary bool, includeOperations bool, includeGroups bool) (*view.VersionContent, error) { + versionEnt, err := v.publishedRepo.GetReadonlyVersion(packageId, version) + if err != nil { + return nil, err + } + if versionEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": version, "packageId": packageId}, + } + } + + latestRevision, err := v.publishedRepo.GetLatestRevision(versionEnt.PackageId, versionEnt.Version) + if err != nil { + return nil, err + } + if latestRevision == 0 { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": version, "packageId": packageId}, + } + } + versionContent := &view.VersionContent{ + PublishedAt: versionEnt.PublishedAt, + PublishedBy: *entity.MakePrincipalView(&versionEnt.PrincipalEntity), + PreviousVersion: view.MakeVersionRefKey(versionEnt.PreviousVersion, versionEnt.PreviousVersionRevision), + PreviousVersionPackageId: versionEnt.PreviousVersionPackageId, + VersionLabels: versionEnt.Labels, + Status: versionEnt.Status, + NotLatestRevision: versionEnt.Revision != latestRevision, + PackageId: versionEnt.PackageId, + Version: view.MakeVersionRefKey(versionEnt.Version, versionEnt.Revision), + RevisionsCount: latestRevision, + } + + versionOperationTypes, err := v.getVersionOperationTypes(versionEnt, includeSummary, includeOperations) + if err != nil { + return nil, err + } + if includeGroups { + versionContent.OperationGroups, err = v.getVersionOperationGroups(versionEnt) + if err != nil { + return nil, err + } + } + + versionContent.OperationTypes = versionOperationTypes + + return versionContent, nil +} + +func (v versionServiceImpl) getVersionOperationTypes_deprecated(versionEnt *entity.ReadonlyPublishedVersionEntity_deprecated, includeSummary bool, includeOperations bool) ([]view.VersionOperationType, error) { + if !includeSummary && !includeOperations { + return nil, nil + } + versionSummaryMap := make(map[string]*view.VersionOperationType, 0) + if includeSummary { + operationsCountEnts, err := v.operationRepo.GetOperationsTypeCount(versionEnt.PackageId, versionEnt.Version, versionEnt.Revision) + if err != nil { + return nil, err + } + for _, opCount := range operationsCountEnts { + apiType, _ := view.ParseApiType(opCount.ApiType) + if apiType == "" { + continue + } + operationCount := opCount.OperationsCount + deprecatedCount := opCount.DeprecatedCount + noBwcOperationsCount := opCount.NoBwcOperationsCount + if versionApiTypeSummary, exists := versionSummaryMap[opCount.ApiType]; exists { + versionApiTypeSummary.OperationsCount = &operationCount + versionApiTypeSummary.DeprecatedCount = &deprecatedCount + versionApiTypeSummary.NoBwcOperationsCount = &noBwcOperationsCount + } else { + versionSummaryMap[opCount.ApiType] = &view.VersionOperationType{ + ApiType: opCount.ApiType, + OperationsCount: &operationCount, + DeprecatedCount: &deprecatedCount, + NoBwcOperationsCount: &noBwcOperationsCount, + } + } + } + if versionEnt.PreviousVersion != "" { + previousPackageId := versionEnt.PreviousVersionPackageId + if previousPackageId == "" { + previousPackageId = versionEnt.PackageId + } + previousVersionEnt, err := v.publishedRepo.GetVersion(previousPackageId, versionEnt.PreviousVersion) + if err != nil { + return nil, err + } + if previousVersionEnt != nil { + comparisonId := view.MakeVersionComparisonId( + versionEnt.PackageId, versionEnt.Version, versionEnt.Revision, + previousVersionEnt.PackageId, previousVersionEnt.Version, previousVersionEnt.Revision) + versionComparison, err := v.publishedRepo.GetVersionComparison(comparisonId) + if err != nil { + return nil, err + } + if versionComparison != nil { + for _, ot := range versionComparison.OperationTypes { + apiType, _ := view.ParseApiType(ot.ApiType) + if apiType == "" { + continue + } + changeSummary := ot.ChangesSummary + if versionApiTypeSummary, exists := versionSummaryMap[ot.ApiType]; exists { + versionApiTypeSummary.ChangesSummary = &changeSummary + } else { + versionSummaryMap[ot.ApiType] = &view.VersionOperationType{ + ApiType: ot.ApiType, + ChangesSummary: &changeSummary, + } + } + } + if len(versionComparison.Refs) > 0 { + refsComparisons, err := v.publishedRepo.GetVersionRefsComparisons(comparisonId) + if err != nil { + return nil, err + } + for _, comparison := range refsComparisons { + for _, ot := range comparison.OperationTypes { + apiType, _ := view.ParseApiType(ot.ApiType) + if apiType == "" { + continue + } + changeSummary := ot.ChangesSummary + if versionApiTypeSummary, exists := versionSummaryMap[ot.ApiType]; exists { + if versionApiTypeSummary.ChangesSummary != nil { + versionApiTypeSummary.ChangesSummary.Breaking += changeSummary.Breaking + versionApiTypeSummary.ChangesSummary.SemiBreaking += changeSummary.SemiBreaking + versionApiTypeSummary.ChangesSummary.Deprecated += changeSummary.Deprecated + versionApiTypeSummary.ChangesSummary.NonBreaking += changeSummary.NonBreaking + versionApiTypeSummary.ChangesSummary.Annotation += changeSummary.Annotation + versionApiTypeSummary.ChangesSummary.Unclassified += changeSummary.Unclassified + } else { + versionApiTypeSummary.ChangesSummary = &changeSummary + } + } else { + versionSummaryMap[ot.ApiType] = &view.VersionOperationType{ + ApiType: ot.ApiType, + ChangesSummary: &changeSummary, + } + } + } + } + } + + } + } + } + } + if includeOperations { + operationTypeHashes, err := v.operationRepo.GetOperationsTypeDataHashes(versionEnt.PackageId, versionEnt.Version, versionEnt.Revision) + if err != nil { + return nil, err + } + for _, ot := range operationTypeHashes { + apiType, _ := view.ParseApiType(ot.ApiType) + if apiType == "" { + continue + } + if versionApiTypeSummary, exists := versionSummaryMap[ot.ApiType]; exists { + versionApiTypeSummary.Operations = ot.OperationsHash + } else { + versionSummaryMap[ot.ApiType] = &view.VersionOperationType{ + ApiType: ot.ApiType, + Operations: ot.OperationsHash, + } + } + } + } + versionOperationTypes := make([]view.VersionOperationType, 0) + for _, v := range versionSummaryMap { + versionOperationTypes = append(versionOperationTypes, *v) + } + return versionOperationTypes, nil +} + +func (v versionServiceImpl) getVersionOperationTypes(versionEnt *entity.PackageVersionRevisionEntity, includeSummary bool, includeOperations bool) ([]view.VersionOperationType, error) { + if !includeSummary && !includeOperations { + return nil, nil + } + zeroInt := 0 + versionSummaryMap := make(map[string]*view.VersionOperationType, 0) + if includeSummary { + operationsCountEnts, err := v.operationRepo.GetOperationsTypeCount(versionEnt.PackageId, versionEnt.Version, versionEnt.Revision) + if err != nil { + return nil, err + } + for _, opCount := range operationsCountEnts { + apiType, _ := view.ParseApiType(opCount.ApiType) + if apiType == "" { + continue + } + operationCount := opCount.OperationsCount + deprecatedCount := opCount.DeprecatedCount + noBwcOperationsCount := opCount.NoBwcOperationsCount + internalAudienceOperationsCount := opCount.InternalAudienceOperationsCount + unknownAudienceOperationsCount := opCount.UnknownAudienceOperationsCount + if versionApiTypeSummary, exists := versionSummaryMap[opCount.ApiType]; exists { + versionApiTypeSummary.OperationsCount = &operationCount + versionApiTypeSummary.DeprecatedCount = &deprecatedCount + versionApiTypeSummary.NoBwcOperationsCount = &noBwcOperationsCount + versionApiTypeSummary.InternalAudienceOperationsCount = &internalAudienceOperationsCount + versionApiTypeSummary.UnknownAudienceOperationsCount = &unknownAudienceOperationsCount + + } else { + versionSummaryMap[opCount.ApiType] = &view.VersionOperationType{ + ApiType: opCount.ApiType, + OperationsCount: &operationCount, + DeprecatedCount: &deprecatedCount, + NoBwcOperationsCount: &noBwcOperationsCount, + InternalAudienceOperationsCount: &internalAudienceOperationsCount, + UnknownAudienceOperationsCount: &unknownAudienceOperationsCount, + } + } + } + if versionEnt.PreviousVersion != "" { + previousPackageId := versionEnt.PreviousVersionPackageId + if previousPackageId == "" { + previousPackageId = versionEnt.PackageId + } + previousVersionEnt, err := v.publishedRepo.GetVersion(previousPackageId, versionEnt.PreviousVersion) + if err != nil { + return nil, err + } + if previousVersionEnt != nil { + comparisonId := view.MakeVersionComparisonId( + versionEnt.PackageId, versionEnt.Version, versionEnt.Revision, + previousVersionEnt.PackageId, previousVersionEnt.Version, previousVersionEnt.Revision) + versionComparison, err := v.publishedRepo.GetVersionComparison(comparisonId) + if err != nil { + return nil, err + } + if versionComparison != nil { + for _, ot := range versionComparison.OperationTypes { + apiType, _ := view.ParseApiType(ot.ApiType) + if apiType == "" { + continue + } + changeSummary := ot.ChangesSummary + numberOfImpactedOperations := ot.NumberOfImpactedOperations + if versionApiTypeSummary, exists := versionSummaryMap[ot.ApiType]; exists { + versionApiTypeSummary.ChangesSummary = &changeSummary + versionApiTypeSummary.NumberOfImpactedOperations = &numberOfImpactedOperations + versionApiTypeSummary.ApiAudienceTransitions = ot.ApiAudienceTransitions + + } else { + versionSummaryMap[ot.ApiType] = &view.VersionOperationType{ + ApiType: ot.ApiType, + ChangesSummary: &changeSummary, + NumberOfImpactedOperations: &numberOfImpactedOperations, + ApiAudienceTransitions: ot.ApiAudienceTransitions, + //in this case version doesn't have any operations of ot.ApiType type, but there are some changes + //but we still need to fill count fields with zero value because its a pointer + OperationsCount: &zeroInt, + DeprecatedCount: &zeroInt, + NoBwcOperationsCount: &zeroInt, + InternalAudienceOperationsCount: &zeroInt, + UnknownAudienceOperationsCount: &zeroInt, + } + } + } + if len(versionComparison.Refs) > 0 { + refsComparisons, err := v.publishedRepo.GetVersionRefsComparisons(comparisonId) + if err != nil { + return nil, err + } + for _, comparison := range refsComparisons { + for _, ot := range comparison.OperationTypes { + apiType, _ := view.ParseApiType(ot.ApiType) + if apiType == "" { + continue + } + changeSummary := ot.ChangesSummary + numberOfImpactedOperations := ot.NumberOfImpactedOperations + if versionApiTypeSummary, exists := versionSummaryMap[ot.ApiType]; exists { + if versionApiTypeSummary.ChangesSummary != nil { + versionApiTypeSummary.ChangesSummary.Breaking += changeSummary.Breaking + versionApiTypeSummary.ChangesSummary.SemiBreaking += changeSummary.SemiBreaking + versionApiTypeSummary.ChangesSummary.Deprecated += changeSummary.Deprecated + versionApiTypeSummary.ChangesSummary.NonBreaking += changeSummary.NonBreaking + versionApiTypeSummary.ChangesSummary.Annotation += changeSummary.Annotation + versionApiTypeSummary.ChangesSummary.Unclassified += changeSummary.Unclassified + } else { + versionApiTypeSummary.ChangesSummary = &changeSummary + } + if versionApiTypeSummary.NumberOfImpactedOperations != nil { + versionApiTypeSummary.NumberOfImpactedOperations.Breaking += numberOfImpactedOperations.Breaking + versionApiTypeSummary.NumberOfImpactedOperations.SemiBreaking += numberOfImpactedOperations.SemiBreaking + versionApiTypeSummary.NumberOfImpactedOperations.Deprecated += numberOfImpactedOperations.Deprecated + versionApiTypeSummary.NumberOfImpactedOperations.NonBreaking += numberOfImpactedOperations.NonBreaking + versionApiTypeSummary.NumberOfImpactedOperations.Annotation += numberOfImpactedOperations.Annotation + versionApiTypeSummary.NumberOfImpactedOperations.Unclassified += numberOfImpactedOperations.Unclassified + } else { + versionApiTypeSummary.NumberOfImpactedOperations = &numberOfImpactedOperations + } + if len(ot.ApiAudienceTransitions) > 0 { + if len(versionApiTypeSummary.ApiAudienceTransitions) > 0 { + //merge ApiAudienceTransitions for all referenced packages by unique Current and Previous audience fields + transitions := make([]view.ApiAudienceTransition, 0) + for _, audienceTransition := range versionApiTypeSummary.ApiAudienceTransitions { + audienceTransition := audienceTransition + for _, otAudienceTransition := range ot.ApiAudienceTransitions { + if audienceTransition.CurrentAudience == otAudienceTransition.CurrentAudience && + audienceTransition.PreviousAudience == otAudienceTransition.PreviousAudience { + audienceTransition.OperationsCount += otAudienceTransition.OperationsCount + break + } + } + transitions = append(transitions, audienceTransition) + } + for _, otAudienceTransition := range ot.ApiAudienceTransitions { + otAudienceTransition := otAudienceTransition + exists := false + for _, audienceTransition := range transitions { + if audienceTransition.CurrentAudience == otAudienceTransition.CurrentAudience && + audienceTransition.PreviousAudience == otAudienceTransition.PreviousAudience { + exists = true + break + } + } + if !exists { + transitions = append(transitions, otAudienceTransition) + } + } + versionApiTypeSummary.ApiAudienceTransitions = transitions + } else { + versionApiTypeSummary.ApiAudienceTransitions = ot.ApiAudienceTransitions + } + + } + } else { + versionSummaryMap[ot.ApiType] = &view.VersionOperationType{ + ApiType: ot.ApiType, + ChangesSummary: &changeSummary, + NumberOfImpactedOperations: &numberOfImpactedOperations, + ApiAudienceTransitions: ot.ApiAudienceTransitions, + //in this case version doesn't have any operations of ot.ApiType type, but there are some changes + //but we still need to fill count fields with zero value because its a pointer + OperationsCount: &zeroInt, + DeprecatedCount: &zeroInt, + NoBwcOperationsCount: &zeroInt, + InternalAudienceOperationsCount: &zeroInt, + UnknownAudienceOperationsCount: &zeroInt, + } + } + } + } + } + + } + } + } + } + if includeOperations { + operationTypeHashes, err := v.operationRepo.GetOperationsTypeDataHashes(versionEnt.PackageId, versionEnt.Version, versionEnt.Revision) + if err != nil { + return nil, err + } + for _, ot := range operationTypeHashes { + apiType, _ := view.ParseApiType(ot.ApiType) + if apiType == "" { + continue + } + if versionApiTypeSummary, exists := versionSummaryMap[ot.ApiType]; exists { + versionApiTypeSummary.Operations = ot.OperationsHash + } else { + versionSummaryMap[ot.ApiType] = &view.VersionOperationType{ + ApiType: ot.ApiType, + Operations: ot.OperationsHash, + } + } + } + } + versionOperationTypes := make([]view.VersionOperationType, 0) + for _, v := range versionSummaryMap { + versionOperationTypes = append(versionOperationTypes, *v) + } + return versionOperationTypes, nil +} + +func (v versionServiceImpl) getVersionOperationGroups_deprecated(versionEnt *entity.ReadonlyPublishedVersionEntity_deprecated) ([]view.VersionOperationGroup, error) { + operationGroupEntities, err := v.operationRepo.GetVersionOperationGroups(versionEnt.PackageId, versionEnt.Version, versionEnt.Revision) + if err != nil { + return nil, err + } + versionOperationGroups := make([]view.VersionOperationGroup, 0) + for _, operationGroupEnt := range operationGroupEntities { + versionOperationGroups = append(versionOperationGroups, entity.MakeVersionOperationGroupView(operationGroupEnt)) + } + return versionOperationGroups, nil +} + +func (v versionServiceImpl) getVersionOperationGroups(versionEnt *entity.PackageVersionRevisionEntity) ([]view.VersionOperationGroup, error) { + operationGroupEntities, err := v.operationRepo.GetVersionOperationGroups(versionEnt.PackageId, versionEnt.Version, versionEnt.Revision) + if err != nil { + return nil, err + } + versionOperationGroups := make([]view.VersionOperationGroup, 0) + for _, operationGroupEnt := range operationGroupEntities { + versionOperationGroups = append(versionOperationGroups, entity.MakeVersionOperationGroupView(operationGroupEnt)) + } + return versionOperationGroups, nil +} + +func (v versionServiceImpl) getVersionChangeSummary(packageId string, versionName string, revision int) (*view.ChangeSummary, error) { + versionEnt, err := v.publishedRepo.GetVersionByRevision(packageId, versionName, revision) + if err != nil { + return nil, err + } + if versionEnt == nil { + return nil, nil + } + previousPackageId := versionEnt.PreviousVersionPackageId + if previousPackageId == "" { + previousPackageId = versionEnt.PackageId + } + if versionEnt.PreviousVersion == "" { + return nil, nil + } + previousVersionEnt, err := v.publishedRepo.GetVersion(previousPackageId, versionEnt.PreviousVersion) + if err != nil { + return nil, err + } + if previousVersionEnt == nil { + return nil, nil + } + comparisonId := view.MakeVersionComparisonId( + versionEnt.PackageId, versionEnt.Version, versionEnt.Revision, + previousVersionEnt.PackageId, previousVersionEnt.Version, previousVersionEnt.Revision, + ) + versionComparison, err := v.publishedRepo.GetVersionComparison(comparisonId) + if err != nil { + return nil, err + } + if versionComparison == nil { + return nil, nil + } + changeSummary := &view.ChangeSummary{} + versionOperationTypes := make([]view.OperationType, 0) + + if len(versionComparison.Refs) > 0 { + versionComparisons, err := v.publishedRepo.GetVersionRefsComparisons(comparisonId) + if err != nil { + return nil, err + } + for _, comparison := range versionComparisons { + versionOperationTypes = append(versionOperationTypes, comparison.OperationTypes...) + } + } else { + versionOperationTypes = append(versionOperationTypes, versionComparison.OperationTypes...) + } + + for _, opType := range versionOperationTypes { + changeSummary.Breaking += opType.ChangesSummary.Breaking + changeSummary.SemiBreaking += opType.ChangesSummary.SemiBreaking + changeSummary.Deprecated += opType.ChangesSummary.Deprecated + changeSummary.NonBreaking += opType.ChangesSummary.NonBreaking + changeSummary.Annotation += opType.ChangesSummary.Annotation + changeSummary.Unclassified += opType.ChangesSummary.Unclassified + } + return changeSummary, nil +} + +func (p versionServiceImpl) GetVersionValidationChanges(packageId string, versionName string) (*view.VersionValidationChanges, error) { + version, err := p.publishedRepo.GetVersion(packageId, versionName) + if err != nil { + return nil, err + } + if version == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": versionName, "packageId": packageId}, + } + } + versionChanges, err := p.publishedRepo.GetVersionValidationChanges(packageId, version.Version, version.Revision) + if err != nil { + return nil, err + } + changelog := make([]view.VersionChangelogData, 0) + bwc := make([]view.VersionBwcData, 0) + if versionChanges != nil { + if versionChanges.Changelog != nil && len(versionChanges.Changelog.Data) != 0 { + changelog = versionChanges.Changelog.Data + } + if versionChanges.Bwc != nil && len(versionChanges.Bwc.Data) != 0 { + bwc = versionChanges.Bwc.Data + } + } + return &view.VersionValidationChanges{ + PreviousVersion: version.PreviousVersion, + PreviousVersionPackageId: version.PreviousVersionPackageId, + Changes: changelog, + Bwc: bwc, + }, nil +} + +func (p versionServiceImpl) GetVersionValidationProblems(packageId string, versionName string) (*view.VersionValidationProblems, error) { + version, err := p.publishedRepo.GetVersion(packageId, versionName) + if err != nil { + return nil, err + } + if version == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": versionName, "packageId": packageId}, + } + } + versionProblems, err := p.publishedRepo.GetVersionValidationProblems(packageId, version.Version, version.Revision) + if err != nil { + return nil, err + } + spectral := make([]view.VersionSpectralData, 0) + if versionProblems != nil { + if len(versionProblems.Spectral.Data) != 0 { + spectral = versionProblems.Spectral.Data + } + } + return &view.VersionValidationProblems{ + Spectral: spectral, + }, nil +} + +func (v versionServiceImpl) GetDefaultVersion(packageId string) (string, error) { + defaultVersion, err := v.publishedRepo.GetDefaultVersion(packageId, string(view.Release)) + if err != nil { + return "", err + } + if defaultVersion == nil { + defaultVersion, err = v.publishedRepo.GetDefaultVersion(packageId, string(view.Draft)) + if err != nil { + return "", err + } + } + if defaultVersion == nil { + return "", nil + } + return defaultVersion.Version, nil +} + +func (v versionServiceImpl) GetLatestRevision(packageId string, versionName string) (int, error) { + version, err := v.publishedRepo.GetVersion(packageId, versionName) + if err != nil { + return 0, err + } + if version == nil { + return 0, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": versionName, "packageId": packageId}, + } + } + return version.Revision, nil +} + +func (v versionServiceImpl) GetVersionDetails(packageId string, versionName string) (*view.VersionDetails, error) { + versionEnt, err := v.publishedRepo.GetVersion(packageId, versionName) + if err != nil { + return nil, err + } + if versionEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedVersionNotFound, + Message: exception.PublishedVersionNotFoundMsg, + Params: map[string]interface{}{"version": versionName}, + } + } + changeSummary, err := v.getVersionChangeSummary(packageId, versionEnt.Version, versionEnt.Revision) + if err != nil { + return nil, err + } + + latestRevision, err := v.GetLatestRevision(packageId, versionName) + if err != nil { + return nil, err + } + + versionDetails := view.VersionDetails{ + Version: view.MakeVersionRefKey(versionEnt.Version, latestRevision), + Summary: changeSummary, + } + return &versionDetails, nil +} + +func ReleaseVersionMatchesPattern(versionName string, pattern string) error { + versionNameRegexp := regexp.MustCompile(pattern) + if !versionNameRegexp.MatchString(versionName) { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.ReleaseVersionDoesntMatchPattern, + Message: exception.ReleaseVersionDoesntMatchPatternMsg, + Params: map[string]interface{}{"version": versionName, "pattern": pattern}, + } + } + return nil +} + +func (v versionServiceImpl) SearchForPackages(searchReq view.SearchQueryReq) (*view.SearchResult, error) { + searchQuery, err := entity.MakePackageSearchQueryEntity(&searchReq) + if err != nil { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidSearchParameters, + Message: exception.InvalidSearchParametersMsg, + Params: map[string]interface{}{"error": err.Error()}, + } + } + //todo maybe move to envs + searchQuery.PackageSearchWeight = entity.PackageSearchWeight{ + PackageNameWeight: 5, + PackageDescriptionWeight: 1, + PackageIdWeight: 1, + PackageServiceNameWeight: 3, + VersionWeight: 5, + VersionLabelWeight: 3, + DefaultVersionWeight: 5, + OpenCountWeight: 0.2, + } + searchQuery.VersionStatusSearchWeight = entity.VersionStatusSearchWeight{ + VersionReleaseStatus: string(view.Release), + VersionReleaseStatusWeight: 4, + VersionDraftStatus: string(view.Draft), + VersionDraftStatusWeight: 0.6, + VersionArchivedStatus: string(view.Archived), + VersionArchivedStatusWeight: 0.1, + } + versionEntities, err := v.publishedRepo.SearchForVersions(searchQuery) + if err != nil { + return nil, err + } + packages := make([]view.PackageSearchResult, 0) + for _, ent := range versionEntities { + packages = append(packages, *entity.MakePackageSearchResultView(ent)) + } + + return &view.SearchResult{Packages: &packages}, nil +} + +func (v versionServiceImpl) SearchForDocuments(searchReq view.SearchQueryReq) (*view.SearchResult, error) { + unknownTypes := make(map[string]bool, 0) + unknownTypes[string(view.Unknown)] = true + + unknownTypesList := make([]string, 0) + for unknownType := range unknownTypes { + unknownTypesList = append(unknownTypesList, unknownType) + } + + searchQuery, err := entity.MakeDocumentSearchQueryEntity(&searchReq, unknownTypesList) + if err != nil { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidSearchParameters, + Message: exception.InvalidSearchParametersMsg, + Params: map[string]interface{}{"error": err.Error()}, + } + } + //todo maybe move to envs + searchQuery.DocumentSearchWeight = entity.DocumentSearchWeight{ + TitleWeight: 5, + LabelsWeight: 3, + ContentWeight: 1, + OpenCountWeight: 0.2, + } + searchQuery.VersionStatusSearchWeight = entity.VersionStatusSearchWeight{ + VersionReleaseStatus: string(view.Release), + VersionReleaseStatusWeight: 4, + VersionDraftStatus: string(view.Draft), + VersionDraftStatusWeight: 0.6, + VersionArchivedStatus: string(view.Archived), + VersionArchivedStatusWeight: 0.1, + } + documentEntities, err := v.publishedRepo.SearchForDocuments(searchQuery) + if err != nil { + return nil, err + } + documents := make([]view.DocumentSearchResult, 0) + maxContentLength := 70 //maybe move to envs or input params? + for _, ent := range documentEntities { + var contentSlice string + if unknownTypes[ent.Type] { + contentSlice = "Unsupported content" + } else { + contentSlice = stripContentByFilter(searchReq.SearchString, ent.Metadata.GetDescription(), maxContentLength) + } + documents = append(documents, *entity.MakeDocumentSearchResultView(ent, contentSlice)) + } + + return &view.SearchResult{Documents: &documents}, nil +} + +func stripContentByFilter(filter string, content string, maxLen int) string { + contentLength := len(content) + filterLength := len(filter) + if maxLen < filterLength { + maxLen = filterLength + } + if maxLen >= contentLength { + return content + } + index := strings.Index(strings.ToLower(content), strings.ToLower(filter)) + if index == -1 { + return content[:maxLen] + } + contentOffset := (maxLen - filterLength) / 2 + startPos := index - contentOffset + endPos := index + filterLength + contentOffset + if startPos > 0 && + endPos < contentLength { + return content[startPos:endPos] + } + if startPos == 0 || endPos == contentLength { + return content[startPos:endPos] + } + if startPos < 0 { + endPos = endPos + (0 - startPos) + startPos = 0 + return content[startPos:endPos] + } + if endPos > contentLength { + startPos = startPos - (endPos - contentLength) + endPos = contentLength + return content[startPos:endPos] + } + return content[startPos:endPos] +} + +func ValidateVersionName(versionName string) error { + if strings.Contains(versionName, "@") { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.VersionNameNotAllowed, + Message: exception.VersionNameNotAllowedMsg, + Params: map[string]interface{}{"version": versionName, "character": "@"}, + } + } + return nil +} + +func (v versionServiceImpl) GetVersionStatus(packageId string, version string) (string, error) { + versionEnt, err := v.publishedRepo.GetVersion(packageId, version) + if err != nil { + return "", err + } + if versionEnt == nil { + return "", &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": version, "packageId": packageId}, + } + } + + return versionEnt.Status, nil +} + +func (v versionServiceImpl) GetVersionChanges(packageId, version, apiType string, severities []string, versionChangesReq view.VersionChangesReq) (*view.VersionChangesView, error) { + versionEnt, err := v.publishedRepo.GetVersion(packageId, version) + if err != nil { + return nil, err + } + if versionEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": version, "packageId": packageId}, + } + } + + if versionChangesReq.PreviousVersion == "" || versionChangesReq.PreviousVersionPackageId == "" { + if versionEnt.PreviousVersion == "" { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.NoPreviousVersion, + Message: exception.NoPreviousVersionMsg, + Params: map[string]interface{}{"version": version}, + } + } + versionChangesReq.PreviousVersion = versionEnt.PreviousVersion + if versionEnt.PreviousVersionPackageId != "" { + versionChangesReq.PreviousVersionPackageId = versionEnt.PreviousVersionPackageId + } else { + versionChangesReq.PreviousVersionPackageId = packageId + } + } + previousVersionEnt, err := v.publishedRepo.GetVersion(versionChangesReq.PreviousVersionPackageId, versionChangesReq.PreviousVersion) + if err != nil { + return nil, err + } + if previousVersionEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": versionChangesReq.PreviousVersion, "packageId": versionChangesReq.PreviousVersionPackageId}, + } + } + + comparisonId := view.MakeVersionComparisonId( + versionEnt.PackageId, versionEnt.Version, versionEnt.Revision, + previousVersionEnt.PackageId, previousVersionEnt.Version, previousVersionEnt.Revision, + ) + + versionComparison, err := v.publishedRepo.GetVersionComparison(comparisonId) + if err != nil { + return nil, err + } + if versionComparison == nil || versionComparison.NoContent { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.ComparisonNotFound, + Message: exception.ComparisonNotFoundMsg, + Params: map[string]interface{}{ + "comparisonId": comparisonId, + "packageId": versionEnt.PackageId, + "version": versionEnt.Version, + "revision": versionEnt.Revision, + "previousPackageId": previousVersionEnt.PackageId, + "previousVersion": previousVersionEnt.Version, + "previousRevision": previousVersionEnt.Revision, + }, + } + } + searchQuery := entity.ChangelogSearchQueryEntity{ + ComparisonId: comparisonId, + ApiType: apiType, + ApiKind: versionChangesReq.ApiKind, + ApiAudience: versionChangesReq.ApiAudience, + TextFilter: versionChangesReq.TextFilter, + Tags: versionChangesReq.Tags, + EmptyTag: versionChangesReq.EmptyTag, + RefPackageId: versionChangesReq.RefPackageId, + EmptyGroup: versionChangesReq.EmptyGroup, + Group: versionChangesReq.Group, + GroupPackageId: versionEnt.PackageId, + GroupVersion: versionEnt.Version, + GroupRevision: versionEnt.Revision, + Severities: severities, + } + operationComparisons := make([]interface{}, 0) + changelogOperationEnts, err := v.operationRepo.GetChangelog_deprecated(searchQuery) + if err != nil { + return nil, err + } + + packageVersions := make(map[string][]string) + for _, changelogOperationEnt := range changelogOperationEnts { + operationComparisons = append(operationComparisons, entity.MakeOperationComparisonChangesView(changelogOperationEnt)) + if packageRefKey := view.MakePackageRefKey(changelogOperationEnt.PackageId, changelogOperationEnt.Version, changelogOperationEnt.Revision); packageRefKey != "" { + packageVersions[changelogOperationEnt.PackageId] = append(packageVersions[changelogOperationEnt.PackageId], view.MakeVersionRefKey(changelogOperationEnt.Version, changelogOperationEnt.Revision)) + } + if previousPackageRefKey := view.MakePackageRefKey(changelogOperationEnt.PreviousPackageId, changelogOperationEnt.PreviousVersion, changelogOperationEnt.PreviousRevision); previousPackageRefKey != "" { + packageVersions[changelogOperationEnt.PreviousPackageId] = append(packageVersions[changelogOperationEnt.PreviousPackageId], view.MakeVersionRefKey(changelogOperationEnt.PreviousVersion, changelogOperationEnt.PreviousRevision)) + } + } + packagesRefs, err := v.packageVersionEnrichmentService.GetPackageVersionRefsMap(packageVersions) + if err != nil { + return nil, err + } + versionChanges := &view.VersionChangesView{ + PreviousVersion: previousVersionEnt.Version, + PreviousVersionPackageId: previousVersionEnt.PackageId, + Operations: operationComparisons, + Packages: packagesRefs, + } + return versionChanges, nil +} + +func (v versionServiceImpl) GetVersionRevisionsList_deprecated(packageId, versionName string, filterReq view.PagingFilterReq) (*view.PackageVersionRevisions_deprecated, error) { + ent, err := v.publishedRepo.GetVersion(packageId, versionName) + if err != nil { + return nil, err + } + if ent == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedVersionNotFound, + Message: exception.PublishedVersionNotFoundMsg, + Params: map[string]interface{}{"version": versionName}, + } + } + searchQueryReq := entity.PackageVersionSearchQueryEntity{ + PackageId: packageId, + Version: ent.Version, + TextFilter: filterReq.TextFilter, + Limit: filterReq.Limit, + Offset: filterReq.Offset, + } + versionRevisionsEnts, err := v.publishedRepo.GetVersionRevisionsList_deprecated(searchQueryReq) + if err != nil { + return nil, err + } + revisions := make([]view.PackageVersionRevision_deprecated, 0) + + for _, ent := range versionRevisionsEnts { + revisions = append(revisions, *entity.MakePackageVersionRevisionView_deprecated(&ent)) + } + return &view.PackageVersionRevisions_deprecated{Revisions: revisions}, nil +} +func (v versionServiceImpl) GetVersionRevisionsList(packageId, versionName string, filterReq view.PagingFilterReq) (*view.PackageVersionRevisions, error) { + ent, err := v.publishedRepo.GetVersion(packageId, versionName) + if err != nil { + return nil, err + } + if ent == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedVersionNotFound, + Message: exception.PublishedVersionNotFoundMsg, + Params: map[string]interface{}{"version": versionName}, + } + } + searchQueryReq := entity.PackageVersionSearchQueryEntity{ + PackageId: packageId, + Version: ent.Version, + TextFilter: filterReq.TextFilter, + Limit: filterReq.Limit, + Offset: filterReq.Offset, + } + versionRevisionsEnts, err := v.publishedRepo.GetVersionRevisionsList(searchQueryReq) + if err != nil { + return nil, err + } + revisions := make([]view.PackageVersionRevision, 0) + + for _, ent := range versionRevisionsEnts { + revisions = append(revisions, *entity.MakePackageVersionRevisionView(&ent)) + } + return &view.PackageVersionRevisions{Revisions: revisions}, nil +} + +func (v versionServiceImpl) GetTransformedDocuments_deprecated(packageId, version, apiType, groupName, format string) ([]byte, error) { + if format == "" { + format = string(view.HtmlDocumentFormat) + } + if !view.ValidTransformedDocumentsFormat_deprecated(format) { + return nil, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.UnknownResponseFormat, + Message: exception.UnknownResponseFormatMsg, + Params: map[string]interface{}{"format": format}, + } + } + versionEnt, err := v.publishedRepo.GetVersion(packageId, version) + if err != nil { + return nil, err + } + if versionEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedVersionNotFound, + Message: exception.PublishedVersionNotFoundMsg, + Params: map[string]interface{}{"version": version}, + } + } + groupId := view.MakeOperationGroupId(packageId, versionEnt.Version, versionEnt.Revision, apiType, groupName) + ent, err := v.publishedRepo.GetTransformedDocuments(packageId, version, apiType, groupId, view.DocumentGroupType_deprecated, string(view.JsonDocumentFormat)) + if err != nil { + return nil, err + } + if ent == nil { + return nil, nil + } + if format == string(view.JsonDocumentFormat) { + return ent.Data, nil + } else { + return v.portalService.GenerateInteractivePageForTransformedDocuments(packageId, versionEnt.Version, *ent) + } +} + +func (v versionServiceImpl) GetTransformedDocuments(packageId string, version string, apiType string, groupName string, buildType string, format string) ([]byte, error) { + err := view.ValidateFormatForBuildType(buildType, format) + if err != nil { + return nil, err + } + versionEnt, err := v.publishedRepo.GetVersion(packageId, version) + if err != nil { + return nil, err + } + if versionEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedVersionNotFound, + Message: exception.PublishedVersionNotFoundMsg, + Params: map[string]interface{}{"version": version}, + } + } + groupId := view.MakeOperationGroupId(packageId, versionEnt.Version, versionEnt.Revision, apiType, groupName) + ent, err := v.publishedRepo.GetTransformedDocuments(packageId, version, apiType, groupId, buildType, format) + if err != nil { + return nil, err + } + if ent == nil { + return nil, nil + } + if format == string(view.HtmlDocumentFormat) { + return v.portalService.GenerateInteractivePageForTransformedDocuments(packageId, versionEnt.Version, *ent) + } + return ent.Data, nil +} + +func (v versionServiceImpl) DeleteVersionsRecursively(ctx context.SecurityContext, packageId string, deleteBefore time.Time) (string, error) { + rootPackage, err := v.publishedRepo.GetPackage(packageId) + if err != nil { + return "", err + } + if rootPackage == nil { + return "", &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"packageId": packageId}, + } + } + + jobId := uuid.New().String() + ent := entity.VersionCleanupEntity{ + RunId: jobId, + PackageId: packageId, + DeleteBefore: deleteBefore, + Status: string(view.StatusRunning), + } + err = v.versionCleanupRepository.StoreVersionCleanupRun(ent) + if err != nil { + return jobId, err + } + + utils.SafeAsync(func() { + log.Infof("Starting old draft versions cleanup process %s for package %s", jobId, packageId) + page, limit, deletedItems := 0, 100, 0 + for { + getPackageListReq := view.PackageListReq{ + Kind: []string{entity.KIND_PACKAGE, entity.KIND_DASHBOARD}, + Limit: limit, + OnlyFavorite: false, + OnlyShared: false, + Offset: page * limit, + ParentId: packageId, + ShowAllDescendants: true, + } + packages, err := v.publishedRepo.GetFilteredPackagesWithOffset(getPackageListReq, ctx.GetUserId()) + if err != nil { + log.Errorf("failed to get child packages for versions cleanup %s: %s", jobId, err.Error()) + err = v.versionCleanupRepository.UpdateVersionCleanupRun(jobId, string(view.StatusError), err.Error(), deletedItems) + if err != nil { + log.Errorf("failed to set '%s' status for cleanup job id %s: %s", "error", jobId, err.Error()) + return + } + return + } + if len(packages) == 0 { + if rootPackage.Kind == entity.KIND_PACKAGE || rootPackage.Kind == entity.KIND_DASHBOARD { + deleted, err := v.publishedRepo.DeleteDraftVersionsBeforeDate(rootPackage.Id, deleteBefore, "cleanup_job_"+jobId) + if err != nil { + log.Errorf("failed to delete versions of package %s during versions cleanup %s: %s", rootPackage.Id, jobId, err.Error()) + err = v.versionCleanupRepository.UpdateVersionCleanupRun(jobId, string(view.StatusError), err.Error(), deletedItems) + if err != nil { + log.Errorf("failed to set '%s' status for cleanup job id %s: %s", "error", jobId, err.Error()) + return + } + return + } + deletedItems += deleted + } + err = v.versionCleanupRepository.UpdateVersionCleanupRun(jobId, string(view.StatusComplete), "", deletedItems) + if err != nil { + log.Errorf("failed to set '%s' status for cleanup job id %s: %s", "complete", jobId, err.Error()) + return + } + log.Infof("version cleanup job %s has deleted %d versions", jobId, deletedItems) + return + } + for _, pkg := range packages { + deleted, err := v.publishedRepo.DeleteDraftVersionsBeforeDate(pkg.Id, deleteBefore, "cleanup_job_"+jobId) + if err != nil { + log.Errorf("failed to delete versions of package %s during versions cleanup %s: %s", pkg.Id, jobId, err.Error()) + err = v.versionCleanupRepository.UpdateVersionCleanupRun(jobId, string(view.StatusError), err.Error(), deletedItems) + if err != nil { + log.Errorf("failed to set '%s' status for cleanup job id %s: %s", "error", jobId, err.Error()) + return + } + return + } + deletedItems += deleted + } + page++ + } + }) + return jobId, nil +} + +func (v versionServiceImpl) CopyVersion(ctx context.SecurityContext, packageId string, version string, req view.CopyVersionReq) (string, error) { + versionEnt, err := v.publishedRepo.GetVersion(packageId, version) + if err != nil { + return "", err + } + if versionEnt == nil { + return "", &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedVersionNotFound, + Message: exception.PublishedVersionNotFoundMsg, + Params: map[string]interface{}{"version": version}, + } + } + targetPackage, err := v.publishedRepo.GetPackage(req.TargetPackageId) + if err != nil { + return "", err + } + if targetPackage == nil { + return "", &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"packageId": req.TargetPackageId}, + } + } + currentPackage, err := v.publishedRepo.GetPackage(packageId) + if err != nil { + return "", err + } + if currentPackage == nil { + return "", &exception.CustomError{ + Status: http.StatusInternalServerError, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"packageId": packageId}, + } + } + if targetPackage.Kind != currentPackage.Kind { + return "", &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.PackageVersionCannotBeCopied, + Message: exception.PackageVersionCannotBeCopiedMsg, + Params: map[string]interface{}{ + "packageId": packageId, + "targetPackageId": req.TargetPackageId, + "version": version, + "error": fmt.Sprintf("target package kind doesn't match current package kind (target='%v', current='%v')", targetPackage.Kind, currentPackage.Kind), + }, + } + } + buildConfig, err := v.publishedService.GetPublishedVersionBuildConfig(packageId, version) + if err != nil { + return "", err + } + var versionSources []byte + if currentPackage.Kind == entity.KIND_PACKAGE { + versionSources, err = v.publishedService.GetVersionSources(packageId, version) + if err != nil { + return "", err + } + } + targetBuildConfig := view.BuildConfig{ + PackageId: req.TargetPackageId, + Version: req.TargetVersion, + PreviousVersion: req.TargetPreviousVersion, + PreviousVersionPackageId: req.TargetPreviousVersionPackageId, + Status: req.TargetStatus, + Refs: buildConfig.Refs, + Files: buildConfig.Files, + Metadata: buildConfig.Metadata, + BuildType: view.BuildType, + CreatedBy: ctx.GetUserId(), + ComparisonRevision: buildConfig.ComparisonRevision, + ComparisonPrevRevision: buildConfig.ComparisonPrevRevision, + UnresolvedRefs: buildConfig.UnresolvedRefs, + ResolveRefs: buildConfig.ResolveRefs, + ResolveConflicts: buildConfig.ResolveConflicts, + ServiceName: buildConfig.ServiceName, + ApiType: buildConfig.ApiType, + GroupName: buildConfig.GroupName, + } + if targetBuildConfig.PreviousVersionPackageId == targetBuildConfig.PackageId { + targetBuildConfig.PreviousVersionPackageId = "" + } + targetBuildConfig.Metadata.VersionLabels = req.TargetVersionLabels + + buildTask, err := v.buildService.PublishVersion(ctx, targetBuildConfig, versionSources, false, "", nil, false, false) + if err != nil { + return "", err + } + return buildTask.PublishId, nil +} + +func (v versionServiceImpl) GetPublishedVersionsHistory(filter view.PublishedVersionHistoryFilter) ([]view.PublishedVersionHistoryView, error) { + result := make([]view.PublishedVersionHistoryView, 0) + historyEnts, err := v.publishedRepo.GetPublishedVersionsHistory(filter) + if err != nil { + return nil, err + } + for _, ent := range historyEnts { + result = append(result, entity.MakePublishedVersionHistoryView(ent)) + } + + return result, nil +} + +func (v versionServiceImpl) StartPublishFromCSV(ctx context.SecurityContext, req view.PublishFromCSVReq) (string, error) { + if len(req.CSVData) == 0 { + return "", &exception.CustomError{ + Status: http.StatusInternalServerError, + Code: exception.EmptyCSVFile, + Message: exception.EmptyCSVFileMsg, + } + } + csvOriginal, err := parseCSV(req.CSVData) + if err != nil { + return "", &exception.CustomError{ + Status: http.StatusInternalServerError, + Code: exception.InvalidCSVFile, + Message: exception.InvalidCSVFileMsg, + Params: map[string]interface{}{"error": err.Error()}, + } + } + if len(csvOriginal) == 0 || len(csvOriginal[0]) == 0 { + return "", &exception.CustomError{ + Status: http.StatusInternalServerError, + Code: exception.EmptyCSVFile, + Message: exception.EmptyCSVFileMsg, + } + } + pkg, err := v.publishedRepo.GetPackage(req.PackageId) + if err != nil { + return "", err + } + if pkg == nil { + return "", &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"packageId": req.PackageId}, + } + } + if pkg.Kind != entity.KIND_DASHBOARD { + return "", &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.InvalidPackageKind, + Message: exception.InvalidPackageKindMsg, + Params: map[string]interface{}{"kind": pkg.Kind, "allowedKind": entity.KIND_DASHBOARD}, + } + } + workspace, err := v.publishedRepo.GetPackage(req.ServicesWorkspaceId) + if err != nil { + return "", err + } + if workspace == nil { + return "", &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PackageNotFound, + Message: exception.PackageNotFoundMsg, + Params: map[string]interface{}{"packageId": req.ServicesWorkspaceId}, + } + } + if workspace.Kind != entity.KIND_WORKSPACE { + return "", &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.InvalidPackageKind, + Message: exception.InvalidPackageKindMsg, + Params: map[string]interface{}{"kind": workspace.Kind, "allowedKind": entity.KIND_WORKSPACE}, + } + } + if req.PreviousVersion != "" { + previousVersionPackageId := req.PreviousVersionPackageId + if req.PreviousVersionPackageId == "" { + previousVersionPackageId = req.PackageId + } + prevVersion, err := v.publishedRepo.GetVersion(previousVersionPackageId, req.PreviousVersion) + if err != nil { + return "", err + } + if prevVersion == nil { + return "", &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"packageId": previousVersionPackageId, "version": req.PreviousVersion}, + } + } + } + + publishEntity := &entity.CSVDashboardPublishEntity{ + PublishId: uuid.NewString(), + Status: string(view.StatusRunning), + Message: "", + Report: []byte{}, + } + + err = v.publishedRepo.StoreCSVDashboardPublishProcess(publishEntity) + if err != nil { + return "", err + } + + utils.SafeAsync(func() { + v.publishFromCSV(ctx, pkg.Name, req, csvOriginal, publishEntity) + }) + return publishEntity.PublishId, nil +} + +func (v versionServiceImpl) publishFromCSV(ctx context.SecurityContext, dashboardName string, req view.PublishFromCSVReq, csvOriginal [][]string, publishEntity *entity.CSVDashboardPublishEntity) { + type ServiceInfo struct { + PackageId string + Version string + Revision int + OperationIds []string + } + columns := 4 + serviceNameCol := 0 + serviceVersionCol := 1 + methodCol := 2 + pathCol := 3 + + separator := ',' + customSeparator := getCSVSeparator(csvOriginal[0][0]) + if customSeparator != nil { + separator = *customSeparator + } + + servicesMap := make(map[string]*ServiceInfo) + allServices := make(map[string]struct{}) + includedServices := make(map[string]struct{}) + notIncludedServices := make(map[string]struct{}) + notIncludedVersions := make(map[string]struct{}) + notIncludedOperationsCount := 0 + + report := make([][]string, len(csvOriginal)) + for i := range csvOriginal { + report[i] = make([]string, len(csvOriginal[i])) + copy(report[i], csvOriginal[i]) + } + + firstRow := 0 + //skip first row if its just a separator + if customSeparator != nil { + firstRow = 1 + } + pathParamsRegex := regexp.MustCompile(`\{.+?\}`) + for i := firstRow; i < len(csvOriginal); i++ { + row := csvOriginal[i] + if len(row) != columns { + report[i] = append(report[i], "incorrect number of columns") + continue + } + serviceName := row[serviceNameCol] + if serviceName == "" { + report[i] = append(report[i], "empty service name") + continue + } + allServices[serviceName] = struct{}{} + serviceVersion := row[serviceVersionCol] + if serviceVersion == "" { + report[i] = append(report[i], "empty service version") + continue + } + method := row[methodCol] + if method == "" { + report[i] = append(report[i], "empty method") + continue + } + path := row[pathCol] + if path == "" { + report[i] = append(report[i], "empty path") + continue + } + path = pathParamsRegex.ReplaceAllString(path, "*") //replace all path parameters with '*' + serviceInfo := servicesMap[serviceName] + if serviceInfo == nil { + if _, exists := notIncludedServices[serviceName]; exists { + report[i] = append(report[i], "service package doesn't exist") + continue + } + servicePackageId, err := v.publishedRepo.GetServiceOwner(req.ServicesWorkspaceId, serviceName) + if err != nil { + report[i] = append(report[i], fmt.Sprintf("failed to look up service package: %v", err.Error())) + continue + } + if servicePackageId == "" { + report[i] = append(report[i], "service package doesn't exist") + notIncludedServices[serviceName] = struct{}{} + continue + } + svcInfo := ServiceInfo{ + PackageId: servicePackageId, + } + serviceInfo = &svcInfo + servicesMap[serviceName] = serviceInfo + } + if serviceInfo.Version == "" { + if _, exists := notIncludedVersions[fmt.Sprintf("%v%v%v", serviceInfo.PackageId, keySeparator, serviceVersion)]; exists { + report[i] = append(report[i], "service version doesn't exist") + continue + } + versionEnt, err := v.publishedRepo.GetVersion(serviceInfo.PackageId, serviceVersion) + if err != nil { + report[i] = append(report[i], fmt.Sprintf("failed to look up service version: %v", err.Error())) + continue + } + if versionEnt == nil { + report[i] = append(report[i], "service version doesn't exist") + continue + } + if versionEnt.Status != string(view.Release) { + report[i] = append(report[i], fmt.Sprintf("service version not in '%v' status", view.Release)) + continue + } + serviceInfo.Version = versionEnt.Version + serviceInfo.Revision = versionEnt.Revision + } else { + if serviceInfo.Version != serviceVersion { + report[i] = append(report[i], fmt.Sprintf("service already matched with '%v' version", serviceInfo.Version)) + continue + } + } + serviceOperationIds, err := v.operationRepo.GetOperationsByPathAndMethod(serviceInfo.PackageId, serviceInfo.Version, serviceInfo.Revision, string(view.RestApiType), path, method) + if err != nil { + report[i] = append(report[i], fmt.Sprintf("failed to look up operation by path and method: %v", err.Error())) + notIncludedOperationsCount++ + continue + } + if len(serviceOperationIds) == 0 { + report[i] = append(report[i], "endpoint not found") + notIncludedOperationsCount++ + continue + } + if len(serviceOperationIds) > 1 { + report[i] = append(report[i], "more than 1 endpoint matched") + notIncludedOperationsCount++ + continue + } + serviceInfo.OperationIds = append(serviceInfo.OperationIds, serviceOperationIds[0]) + report[i] = append(report[i], "ok") + includedServices[serviceName] = struct{}{} + } + dashboardRefs := make([]view.BCRef, 0) + for _, info := range servicesMap { + if info.Version != "" { + dashboardRefs = append(dashboardRefs, view.BCRef{ + RefId: info.PackageId, + Version: view.MakeVersionRefKey(info.Version, info.Revision), + }) + } + } + + var err error + publishEntity.Report, err = csvToBytes(report, separator) + if err != nil { + v.updateDashboardPublishProcess(publishEntity, string(view.StatusError), fmt.Sprintf("internal server error: failed to generate csv report: %v", err.Error())) + return + } + if len(dashboardRefs) == 0 { + v.updateDashboardPublishProcess(publishEntity, string(view.StatusError), "no versions matched") + return + } + + dashboardPublishBuildConfig := view.BuildConfig{ + PackageId: req.PackageId, + Version: req.Version, + BuildType: view.BuildType, + PreviousVersion: req.PreviousVersion, + PreviousVersionPackageId: req.PreviousVersionPackageId, + Status: req.Status, + Refs: dashboardRefs, + CreatedBy: ctx.GetUserId(), + Metadata: view.BuildConfigMetadata{ + VersionLabels: req.VersionLabels, + }, + } + build, err := v.buildService.PublishVersion(ctx, dashboardPublishBuildConfig, nil, false, "", nil, false, false) + if err != nil { + v.updateDashboardPublishProcess(publishEntity, string(view.StatusError), fmt.Sprintf("failed to start csv dashboard publish: %v", err.Error())) + return + } + err = v.buildService.AwaitBuildCompletion(build.PublishId) + if err != nil { + v.updateDashboardPublishProcess(publishEntity, string(view.StatusError), fmt.Sprintf("failed to publish dashboard from csv: %v", err.Error())) + return + } + err = v.operationGroupService.CreateOperationGroup(ctx, req.PackageId, req.Version, string(view.RestApiType), view.CreateOperationGroupReq{ + GroupName: dashboardName, + }) + if err != nil { + if customError, ok := err.(*exception.CustomError); ok { + if customError.Code != exception.OperationGroupAlreadyExists { + v.updateDashboardPublishProcess(publishEntity, string(view.StatusError), fmt.Sprintf("failed to create operation group: %v", err.Error())) + return + } + } else { + v.updateDashboardPublishProcess(publishEntity, string(view.StatusError), fmt.Sprintf("failed to create operation group: %v", err.Error())) + return + } + } + groupOperations := make([]view.GroupOperations, 0) + uniqueOperations := make(map[view.GroupOperations]struct{}) + for _, info := range servicesMap { + if info.Version != "" && len(info.OperationIds) > 0 { + for _, operationId := range info.OperationIds { + op := view.GroupOperations{ + PackageId: info.PackageId, + Version: view.MakeVersionRefKey(info.Version, info.Revision), + OperationId: operationId, + } + if _, exists := uniqueOperations[op]; exists { + continue + } + groupOperations = append(groupOperations, op) + uniqueOperations[op] = struct{}{} + } + } + } + err = v.operationGroupService.UpdateOperationGroup(ctx, req.PackageId, req.Version, string(view.RestApiType), dashboardName, view.UpdateOperationGroupReq{ + Operations: &groupOperations, + }) + if err != nil { + v.updateDashboardPublishProcess(publishEntity, string(view.StatusError), fmt.Sprintf("failed to add operations to operation group: %v", err.Error())) + return + } + + notIncludedServicesCount := 0 + for service := range allServices { + if svc, exists := servicesMap[service]; exists { + if svc.Version == "" { + notIncludedServicesCount++ + } + } else { + notIncludedServicesCount++ + } + } + summary := "" + if notIncludedServicesCount > 0 { + summary = fmt.Sprintf(`%v services were not included into dashboard version`, notIncludedServicesCount) + } + if notIncludedOperationsCount > 0 { + if summary != "" { + summary = fmt.Sprintf(`%v; %v operations were not included into %v operation group`, summary, notIncludedOperationsCount, dashboardName) + } else { + summary = fmt.Sprintf(`%v operations were not included into %v operation group`, notIncludedOperationsCount, dashboardName) + } + } + + v.updateDashboardPublishProcess(publishEntity, string(view.StatusComplete), summary) +} + +func (v versionServiceImpl) updateDashboardPublishProcess(publishEntity *entity.CSVDashboardPublishEntity, status string, message string) { + publishEntity.Status = status + publishEntity.Message = message + err := v.publishedRepo.UpdateCSVDashboardPublishProcess(publishEntity) + if err != nil { + log.Errorf("failed to update dashboard publish process: %v", err.Error()) + } +} + +func (v versionServiceImpl) GetCSVDashboardPublishStatus(publishId string) (*view.CSVDashboardPublishStatusResponse, error) { + publishEnt, err := v.publishedRepo.GetCSVDashboardPublishProcess(publishId) + if err != nil { + return nil, err + } + if publishEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishProcessNotFound, + Message: exception.PublishProcessNotFoundMsg, + Params: map[string]interface{}{"publishId": publishId}, + } + } + return &view.CSVDashboardPublishStatusResponse{ + Status: publishEnt.Status, + Message: publishEnt.Message, + }, nil +} + +func (v versionServiceImpl) GetCSVDashboardPublishReport(publishId string) ([]byte, error) { + publishEnt, err := v.publishedRepo.GetCSVDashboardPublishReport(publishId) + if err != nil { + return nil, err + } + if publishEnt == nil { + return nil, &exception.CustomError{ + Status: http.StatusNotFound, + Code: exception.PublishProcessNotFound, + Message: exception.PublishProcessNotFoundMsg, + Params: map[string]interface{}{"publishId": publishId}, + } + } + return publishEnt.Report, nil +} + +func parseCSV(csvData []byte) ([][]string, error) { + csvReader := csv.NewReader(bytes.NewReader(csvData)) + csvReader.FieldsPerRecord = -1 + firstRow, err := csvReader.Read() + if err != nil { + return nil, fmt.Errorf("failed to read first csv record: %w", err) + } + //check first row for custom separator + if len(firstRow) == 1 { + sep := getCSVSeparator(firstRow[0]) + if sep != nil { + csvReader.Comma = *sep + } + } + records, err := csvReader.ReadAll() + if err != nil { + return nil, fmt.Errorf("failed to parse csv records: %w", err) + } + + return append([][]string{firstRow}, records...), nil +} + +func csvToBytes(csvReport [][]string, separator rune) ([]byte, error) { + var b bytes.Buffer + writer := bufio.NewWriter(&b) + csvWriter := csv.NewWriter(writer) + csvWriter.Comma = separator + if err := csvWriter.WriteAll(csvReport); err != nil { + return nil, err + } + csvWriter.Flush() + return b.Bytes(), nil +} + +func getCSVSeparator(record string) *rune { + if len(strings.Split(strings.ToLower(record), "sep=")) == 2 { + sep := []rune(strings.Split(strings.ToLower(record), "sep=")[1]) + if len(sep) == 1 { + return &sep[0] + } + } + return nil +} \ No newline at end of file diff --git a/qubership-apihub-service/service/WsBranchService.go b/qubership-apihub-service/service/WsBranchService.go new file mode 100644 index 0000000..a4f532e --- /dev/null +++ b/qubership-apihub-service/service/WsBranchService.go @@ -0,0 +1,227 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "fmt" + "strings" + "sync" + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/metrics" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/buraksezer/olric" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/websocket" + log "github.com/sirupsen/logrus" + + ws "github.com/gorilla/websocket" +) + +type WsBranchService interface { + ConnectToProjectBranch(ctx context.SecurityContext, projectId string, branchName string, wsId string, connection *ws.Conn) error + HasActiveEditSession(projectId string, branchName string) bool + NotifyProjectBranchUsers(projectId string, branchName string, action interface{}) + NotifyProjectBranchUser(projectId string, branchName string, wsId string, action interface{}) + DisconnectClient(projectId string, branchName string, wsId string) + DisconnectClients(projectId string, branchName string) +} + +func NewWsBranchService(userService UserService, wsLoadBalancer WsLoadBalancer) WsBranchService { + service := &wsBranchServiceImpl{ + branchEditSessions: make(map[string]*websocket.WsEditSession), + userService: userService, + wsLoadBalancer: wsLoadBalancer, + mutex: sync.RWMutex{}, + } + utils.SafeAsync(func() { + service.runAsyncBranchKeepaliveJob() + }) + utils.SafeAsync(func() { + _, err := wsLoadBalancer.GetBranchEventTopic().AddListener(service.handleRemoteBranchEvent) + if err != nil { + log.Errorf("Failed to subscribe to branch remote events: %s", err.Error()) + } + }) + return service +} + +type wsBranchServiceImpl struct { + branchEditSessions map[string]*websocket.WsEditSession + userService UserService + wsLoadBalancer WsLoadBalancer + mutex sync.RWMutex +} + +func (w *wsBranchServiceImpl) HasActiveEditSession(projectId string, branchName string) bool { + editSessionId := makeBranchEditSessionId(projectId, branchName) + if editSessionId == "" { + log.Errorf("unable to make session id from %s %s", projectId, branchName) + return false + } + + _, exists := w.branchEditSessions[editSessionId] + if !exists { + hasSession, err := w.wsLoadBalancer.HasBranchEditSession(editSessionId) + if err != nil { + log.Errorf("unable to check if branch edit session exists: %s", err.Error()) + return false + } + return hasSession + } + return exists +} + +func (w *wsBranchServiceImpl) ConnectToProjectBranch(ctx context.SecurityContext, projectId string, branchName string, wsId string, connection *ws.Conn) error { + user, err := w.userService.GetUserFromDB(ctx.GetUserId()) // TODO: maybe store user object in context? + if err != nil { + return err + } + if user == nil { + userId := ctx.GetUserId() + user = &view.User{Id: userId, Name: userId} + } + + editSessionId := makeBranchEditSessionId(projectId, branchName) + if editSessionId == "" { + return fmt.Errorf("unable to make session id from %s %s", projectId, branchName) + } + + w.mutex.Lock() + defer w.mutex.Unlock() + + editSession, exists := w.branchEditSessions[editSessionId] + if !exists { + editSession = websocket.NewWsEditSession(editSessionId, nil, w, user.Id) + w.branchEditSessions[editSessionId] = editSession + metrics.WSBranchEditSessionCount.WithLabelValues().Set(float64(len(w.branchEditSessions))) + } + + editSession.ConnectClient(wsId, connection, *user, nil) + + return nil +} + +func (w *wsBranchServiceImpl) NotifyProjectBranchUsers(projectId string, branchName string, action interface{}) { + editSessionId := makeBranchEditSessionId(projectId, branchName) + if editSessionId == "" { + log.Errorf("unable to make session id from %s %s", projectId, branchName) + return + } + + editSession, exists := w.branchEditSessions[editSessionId] + if !exists { + err := w.wsLoadBalancer.GetBranchEventTopic().Publish(websocket.BranchEventToMap(websocket.BranchEvent{ProjectId: projectId, BranchName: branchName, Action: action})) + if err != nil { + log.Errorf("unable to publish ws branch event: %s", err.Error()) + } + return + } + editSession.NotifyAll(action) +} + +func (w *wsBranchServiceImpl) NotifyProjectBranchUser(projectId string, branchName string, wsId string, action interface{}) { + editSessionId := makeBranchEditSessionId(projectId, branchName) + if editSessionId == "" { + log.Errorf("unable to make session id from %s %s", projectId, branchName) + return + } + + editSession, exists := w.branchEditSessions[editSessionId] + if !exists { + err := w.wsLoadBalancer.GetBranchEventTopic().Publish(websocket.BranchEventToMap(websocket.BranchEvent{ProjectId: projectId, BranchName: branchName, WsId: wsId, Action: action})) + if err != nil { + log.Errorf("unable to publish ws branch event for user: %s", err.Error()) + } + return + } + editSession.NotifyClient(wsId, action) +} + +func (w *wsBranchServiceImpl) HandleSessionClosed(editSessionId string) { + w.mutex.Lock() + defer w.mutex.Unlock() + + delete(w.branchEditSessions, editSessionId) + metrics.WSBranchEditSessionCount.WithLabelValues().Set(float64(len(w.branchEditSessions))) +} + +func (w *wsBranchServiceImpl) HandleUserDisconnected(editSessionId string, wsId string) { +} + +func makeBranchEditSessionId(projectId string, branchName string) string { + id := projectId + stringSeparator + branchName + if strings.Count(id, stringSeparator) > 1 { + log.Errorf("Unable to compose correct ws edit session id since names contain string separator") + return "" + } + return id +} + +func (w *wsBranchServiceImpl) DisconnectClient(projectId string, branchName string, wsId string) { + sessionId := makeBranchEditSessionId(projectId, branchName) + session, exists := w.branchEditSessions[sessionId] + if !exists { + return + } + session.ForceDisconnect(wsId) +} + +func (w *wsBranchServiceImpl) DisconnectClients(projectId string, branchName string) { + sessionId := makeBranchEditSessionId(projectId, branchName) + session, exists := w.branchEditSessions[sessionId] + if !exists { + return + } + session.ForceDisconnectAll() +} + +func (w *wsBranchServiceImpl) runAsyncBranchKeepaliveJob() { + for range time.Tick(websocket.PingTime) { + for sessId, session := range w.branchEditSessions { + sessIdTmp := sessId + sessionTmp := session + utils.SafeAsync(func() { + err := w.wsLoadBalancer.TrackSession(sessIdTmp) + if err != nil { + log.Errorf("Unable to make keepalive for branch edit session with id = %s: %s", sessIdTmp, err.Error()) + } + }) + utils.SafeAsync(func() { + sessionTmp.SendPingToAllClients() + }) + } + } +} + +func (w *wsBranchServiceImpl) handleRemoteBranchEvent(msg olric.DTopicMessage) { + eventMap := msg.Message.(map[string]interface{}) + event := websocket.BranchEventFromMap(eventMap) + + editSessionId := makeBranchEditSessionId(event.ProjectId, event.BranchName) + + editSession, exists := w.branchEditSessions[editSessionId] + if !exists { + return + } + log.Debugf("Got remote branch event: %+v, sessId: %s", event, editSessionId) + if event.WsId != "" { + editSession.NotifyClient(event.WsId, event.Action) + } else { + editSession.NotifyAll(event.Action) + } +} diff --git a/qubership-apihub-service/service/WsFileEditService.go b/qubership-apihub-service/service/WsFileEditService.go new file mode 100644 index 0000000..395e1fa --- /dev/null +++ b/qubership-apihub-service/service/WsFileEditService.go @@ -0,0 +1,605 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "encoding/json" + "fmt" + "strings" + "sync" + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/metrics" + "github.com/buraksezer/olric" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/context" + ot "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/ot" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/websocket" + ws "github.com/gorilla/websocket" + log "github.com/sirupsen/logrus" +) + +type WsFileEditService interface { + ConnectToFileEditSession(ctx context.SecurityContext, projectId string, branchName string, fileId string, wsId string, connection *ws.Conn) error + SetFileContent(projectId string, branchName string, fileId string, content []byte) + HandleCommitAction(projectId string, branchName string, fileId string) error +} + +func NewWsFileEditService(userService UserService, draftContentService DraftContentService, branchEditorsService BranchEditorsService, wsLoadBalancer WsLoadBalancer) WsFileEditService { + service := &WsFileEditServiceImpl{ + fileEditSessions: make(map[string]*websocket.WsEditSession), + files: map[string]*ot.ServerDoc{}, + fileMutex: map[string]*sync.RWMutex{}, // TODO: replace with channel + cursors: map[string]map[string]websocket.CursorValue{}, + wsEventsCh: map[string]chan websocket.WsEvent{}, + opsStatData: map[string]websocket.OpsStatData{}, + userService: userService, + draftContentService: draftContentService, + branchEditorsService: branchEditorsService, + wsLoadBalancer: wsLoadBalancer, + sessionMutex: sync.RWMutex{}, // TODO: replace with channel + } + + utils.SafeAsync(func() { + service.runAsyncSaveJob() + }) + utils.SafeAsync(func() { + service.runAsyncFileKeepaliveJob() + }) + utils.SafeAsync(func() { + _, err := wsLoadBalancer.GetFileEventTopic().AddListener(service.handleRemoteFileEvent) + if err != nil { + log.Errorf("Failed to subscribe to file remote events: %s", err.Error()) + } + }) + return service +} + +type WsFileEditServiceImpl struct { + fileEditSessions map[string]*websocket.WsEditSession + files map[string]*ot.ServerDoc + fileMutex map[string]*sync.RWMutex + cursors map[string]map[string]websocket.CursorValue + wsEventsCh map[string]chan websocket.WsEvent + opsStatData map[string]websocket.OpsStatData + + userService UserService + draftContentService DraftContentService + branchEditorsService BranchEditorsService + wsLoadBalancer WsLoadBalancer + sessionMutex sync.RWMutex +} + +const stringSeparator = "|@@|" + +func (w *WsFileEditServiceImpl) HandleCommitAction(projectId string, branchName string, fileId string) error { + editSessionId := makeFileEditSessionId(projectId, branchName, fileId) + if editSessionId == "" { + return fmt.Errorf("unable to make session id from %s %s %s", projectId, branchName, fileId) + } + + w.sessionMutex.Lock() + defer w.sessionMutex.Unlock() + + file, exists := w.files[editSessionId] + if exists { + log.Infof("Detected non-saved content before commit for %s, going to save data", editSessionId) + opsStatData := w.opsStatData[editSessionId] + if opsStatData.LastSavedRev < file.Rev() { + w.SaveFileContent(editSessionId) + } + } else { + err := w.wsLoadBalancer.GetFileEventTopic().Publish(websocket.FileEventToMap(websocket.FileEvent{ProjectId: projectId, BranchName: branchName, FileId: fileId, Action: "commit"})) + if err != nil { + log.Errorf("unable to publish ws file event: %s", err.Error()) + } + return nil + } + + return nil +} + +func (w *WsFileEditServiceImpl) ConnectToFileEditSession(ctx context.SecurityContext, projectId string, branchName string, fileId string, wsId string, connection *ws.Conn) error { + user, err := w.userService.GetUserFromDB(ctx.GetUserId()) // TODO: maybe store user object in context? + if err != nil { + return err + } + if user == nil { + userId := ctx.GetUserId() + user = &view.User{Id: userId, Name: userId} + } + + w.sessionMutex.Lock() + defer w.sessionMutex.Unlock() + + editSessionId := makeFileEditSessionId(projectId, branchName, fileId) + if editSessionId == "" { + return fmt.Errorf("unable to make session id from %s %s %s", projectId, branchName, fileId) + } + + editSession, exists := w.fileEditSessions[editSessionId] + if !exists { + cd, err := w.draftContentService.GetContentFromDraftOrGit(ctx, projectId, branchName, fileId) + if err != nil { + return err + } + + editSession = websocket.NewWsEditSession(editSessionId, w, w, user.Id) + w.fileEditSessions[editSessionId] = editSession + metrics.WSFileEditSessionCount.WithLabelValues().Set(float64(len(w.fileEditSessions))) + w.files[editSessionId] = &ot.ServerDoc{Doc: ot.NewDocFromStr(string(cd.Data)), History: []ot.Ops{}} + w.fileMutex[editSessionId] = &sync.RWMutex{} + _, mapExists := w.cursors[editSessionId] + if !mapExists { + w.cursors[editSessionId] = make(map[string]websocket.CursorValue) + } + _, chExists := w.wsEventsCh[editSessionId] + if !chExists { + ch := make(chan websocket.WsEvent) + w.wsEventsCh[editSessionId] = ch + utils.SafeAsync(func() { + w.handleWsEvents(ch) + }) + } + w.opsStatData[editSessionId] = websocket.OpsStatData{LastSavedRev: 0, SaveTimestamp: time.Time{}} + } + + wg := sync.WaitGroup{} + wg.Add(1) + + editSession.ConnectClient(wsId, connection, *user, &wg) + + wg.Wait() // wait for user:connected messages should be sent first + + // send doc snapshot on connect + file := w.files[editSessionId] + snapshotMessage := websocket.DocSnapshotOutputMessage{ + Type: websocket.DocSnapshotOutputType, + Revision: file.Rev(), + Document: []string{file.Doc.String()}, + } + + editSession.NotifyClientSync(wsId, snapshotMessage) + + // send other user cursors on connect + cursors := w.cursors[editSessionId] + for cWsId, cursor := range cursors { + if cWsId == wsId { + continue + } + editSession.NotifyClient(wsId, websocket.UserCursorOutput{ + Type: websocket.UserCursorOutputType, + SessionId: cWsId, + Cursor: websocket.CursorValue{ + Position: cursor.Position, + SelectionEnd: cursor.SelectionEnd, + }, + }) + } + + return nil +} + +func (w *WsFileEditServiceImpl) SetFileContent(projectId string, branchName string, fileId string, content []byte) { + editSessionId := makeFileEditSessionId(projectId, branchName, fileId) + if editSessionId == "" { + return + } + sess, exists := w.fileEditSessions[editSessionId] + if !exists { + err := w.wsLoadBalancer.GetFileEventTopic().Publish(websocket.FileEventToMap(websocket.FileEvent{ProjectId: projectId, BranchName: branchName, FileId: fileId, Action: "set_content", Content: string(content)})) + if err != nil { + log.Errorf("unable to publish ws file event: %s", err.Error()) + } + return + } + + fMutex, mExists := w.fileMutex[sess.EditSessionId] + if !mExists { + log.Errorf("Unable to SetFileContent: file mutex not found for edit session id = %s", sess.EditSessionId) + return + } + + fMutex.Lock() + defer fMutex.Unlock() + + file, fExists := w.files[sess.EditSessionId] + if !fExists { + log.Errorf("Unable to SetFileContent: file not found for edit session id = %s", sess.EditSessionId) + return + } + + wsId := "system" // TODO: this user doesnt really exists. Probably should be handled separately on frontend + + message := websocket.OperationInputMessage{ + Type: websocket.OperationOutputType, + Revision: file.Rev(), + Operation: []interface{}{ + -file.Doc.Size, + string(content)}, + } + + utils.SafeAsync(func() { + w.HandleOperationWsMessage(sess, message, wsId) + }) +} + +func (w *WsFileEditServiceImpl) handleWsEvents(ch chan websocket.WsEvent) { + for { + event, more := <-ch + if event.EditSessionId != "" { + sess, exists := w.fileEditSessions[event.EditSessionId] + if !exists { + continue // TODO: error log? + } + + var typed websocket.TypedWsMessage + err := json.Unmarshal(event.Data, &typed) + if err != nil { + log.Errorf("Unable to unmarshall WS file edit message %s with err %s", string(event.Data), err.Error()) + sess.NotifyClientSync(event.WsId, + websocket.UnexpectedMessage{ + Type: websocket.UnexpectedMessageType, + Message: string(event.Data), + }) + w.disconnectClient(sess.EditSessionId, event.WsId) + continue + } + log.Debugf("Received from id '%s' message '%s'", event.WsId, string(event.Data)) + + switch typed.Type { + case websocket.UserCursorInputType: + var message websocket.UserCursorInput + err := json.Unmarshal(event.Data, &message) + if err != nil { + log.Errorf("Unable to unmarshall WS file edit message %s with err %s", string(event.Data), err.Error()) + sess.NotifyClientSync(event.WsId, + websocket.UnexpectedMessage{ + Type: websocket.UnexpectedMessageType, + Message: string(event.Data), + }) + w.disconnectClient(sess.EditSessionId, event.WsId) + continue + } + cValue := websocket.CursorValue{Position: message.Position, SelectionEnd: message.SelectionEnd} + w.cursors[event.EditSessionId][event.WsId] = cValue + + outMessage := websocket.UserCursorOutput{ + Type: websocket.UserCursorOutputType, + SessionId: event.WsId, + Cursor: cValue, + } + + sess.NotifyOthers(event.WsId, outMessage) + + continue + case websocket.OperationInputMessageType: + var message websocket.OperationInputMessage + err := json.Unmarshal(event.Data, &message) + if err != nil { + log.Errorf("Unable to unmarshall WS file edit message %s with err %s", string(event.Data), err.Error()) + sess.NotifyClientSync(event.WsId, + websocket.UnexpectedMessage{ + Type: websocket.UnexpectedMessageType, + Message: string(event.Data), + }) + w.disconnectClient(sess.EditSessionId, event.WsId) + continue + } + + w.HandleOperationWsMessage(sess, message, event.WsId) + continue + case websocket.DebugStateInputMessageType: + w.HandleDebugStateWsMessage(sess, event.WsId) + continue + default: + log.Errorf("unknown message type '%s'", typed.Type) + sess.NotifyClientSync(event.WsId, + websocket.UnexpectedMessage{ + Type: websocket.UnexpectedMessageType, + Message: string(event.Data), + }) + w.disconnectClient(sess.EditSessionId, event.WsId) + } + } + + if !more { + return + } + } +} + +func (w *WsFileEditServiceImpl) HandleOperationWsMessage(sess *websocket.WsEditSession, message websocket.OperationInputMessage, wsId string) { + ops := makeGoOtOps(message.Operation) + + fMutex, mExists := w.fileMutex[sess.EditSessionId] + if !mExists { + log.Errorf("Unable to process operaton: file mutex not found for edit session id = %s", sess.EditSessionId) + w.disconnectClients(sess.EditSessionId) + return + } + fMutex.Lock() + defer fMutex.Unlock() + + file, fExists := w.files[sess.EditSessionId] + if !fExists { + log.Errorf("Unable to process operaton: file not found for edit session id = %s", sess.EditSessionId) + w.disconnectClients(sess.EditSessionId) + return + } + + resultOtOps, err := file.Recv(message.Revision, ops) + if err != nil { + log.Errorf("Unable to process operaton for session %s: ops not applicable: %s", sess.EditSessionId, err.Error()) + sess.NotifyClientSync(wsId, + websocket.UnexpectedMessage{ + Type: websocket.UnexpectedMessageType, + Message: message, + }) + w.disconnectClient(sess.EditSessionId, wsId) + return + } + + opsStatData := w.opsStatData[sess.EditSessionId] + if file.Rev() == 1 || (file.Rev()-opsStatData.LastSavedRev) > 100 { + utils.SafeAsync(func() { + w.SaveFileContent(sess.EditSessionId) + }) + } + client := sess.GetClient(wsId) + if client != nil { + projectId, branchName, _ := splitFileEditSessionId(sess.EditSessionId) + err = w.branchEditorsService.AddBranchEditor(projectId, branchName, client.User.Id) + if err != nil { + log.Errorf("Unable to add editor for session %s: %s", sess.EditSessionId, err.Error()) + // TODO: close session or not? + } + } + + resultJsOps := makeJsOtOps(resultOtOps) + + responseMessage := websocket.OperationOutputMessage{ + Type: websocket.OperationOutputType, + SessionId: wsId, + Revision: message.Revision, + Operation: resultJsOps, + } + + sess.NotifyAll(responseMessage) +} + +func (w *WsFileEditServiceImpl) HandleMessage(messageBytes []byte, wsId string, session *websocket.WsEditSession) { + event := websocket.WsEvent{ + EditSessionId: session.EditSessionId, + WsId: wsId, + Data: messageBytes, + } + + ch, exists := w.wsEventsCh[session.EditSessionId] + if !exists { + log.Errorf("Unable to handle event since session %s channel not found", session.EditSessionId) + } + ch <- event +} + +func (w *WsFileEditServiceImpl) runAsyncSaveJob() { + for range time.Tick(time.Second * 5) { + for sessId, data := range w.opsStatData { + file, fExists := w.files[sessId] + if !fExists { + continue + } + if data.LastSavedRev < file.Rev() { + sessIdTmp := sessId + utils.SafeAsync(func() { + w.SaveFileContent(sessIdTmp) + }) + } + } + } +} + +func (w *WsFileEditServiceImpl) HandleSessionClosed(editSessionId string) { + w.sessionMutex.Lock() + defer w.sessionMutex.Unlock() + + w.SaveFileContent(editSessionId) + + delete(w.fileEditSessions, editSessionId) + metrics.WSFileEditSessionCount.WithLabelValues().Set(float64(len(w.fileEditSessions))) + delete(w.files, editSessionId) + delete(w.fileMutex, editSessionId) + delete(w.cursors, editSessionId) + + channel, sessExists := w.wsEventsCh[editSessionId] + if sessExists { + close(channel) + } + delete(w.wsEventsCh, editSessionId) + delete(w.opsStatData, editSessionId) +} + +func (w *WsFileEditServiceImpl) HandleUserDisconnected(editSessionId string, wsId string) { + w.sessionMutex.Lock() + defer w.sessionMutex.Unlock() + + // delete user's cursor value if any + cursors, sessExists := w.cursors[editSessionId] + if !sessExists { + return + } + + _, userExists := cursors[wsId] + if !userExists { + return + } + delete(cursors, wsId) +} + +func (w *WsFileEditServiceImpl) SaveFileContent(editSessionId string) { + sess, exists := w.fileEditSessions[editSessionId] + if !exists { + log.Errorf("session not found for id = %s", editSessionId) + } + + fMutex, mExists := w.fileMutex[sess.EditSessionId] + if !mExists { + log.Errorf("Unable to process operaton: file mutex not found for edit session id = %s", sess.EditSessionId) + } + fMutex.Lock() + defer fMutex.Unlock() + + file, fExists := w.files[editSessionId] + if !fExists { + log.Errorf("file not found for edit session id = %s", editSessionId) + } + + // avoid empty save + statData, sdExists := w.opsStatData[editSessionId] + if sdExists { + if statData.LastSavedRev == file.Rev() { + return + } + } + + projectId, branchName, fileId := splitFileEditSessionId(sess.EditSessionId) + + err := w.draftContentService.UpdateDraftContentData(context.CreateFromId(sess.OriginatorUserId), projectId, branchName, fileId, []byte(file.Doc.String())) + if err != nil { + log.Errorf("failed to save ws file content for session %s: %s", editSessionId, err.Error()) + w.disconnectClients(sess.EditSessionId) + } + + w.opsStatData[editSessionId] = websocket.OpsStatData{SaveTimestamp: time.Now(), LastSavedRev: file.Rev()} +} + +func makeGoOtOps(ops []interface{}) ot.Ops { + var result ot.Ops + for _, op := range ops { + switch op.(type) { + case int: + result = append(result, ot.Op{N: op.(int)}) + case float64: + result = append(result, ot.Op{N: int(op.(float64))}) + case string: + result = append(result, ot.Op{S: op.(string)}) + default: + log.Errorf("unknown op type: '%+v'", op) + continue + } + } + return result +} + +func makeJsOtOps(ops ot.Ops) []interface{} { + var result []interface{} + for _, op := range ops { + if op.S != "" { + result = append(result, op.S) + } else { + result = append(result, op.N) + } + } + return result +} + +func makeFileEditSessionId(projectId string, branchName string, fileId string) string { + id := projectId + stringSeparator + branchName + stringSeparator + fileId + if strings.Count(id, stringSeparator) > 2 { + log.Errorf("Unable to compose correct ws edit session id since names contain string separator") + return "" + } + return id +} + +func splitFileEditSessionId(editSessionId string) (string, string, string) { + parts := strings.Split(editSessionId, stringSeparator) + if len(parts) != 3 { + log.Errorf("Incorrect ws edit session id: %s, unable to split", editSessionId) + return "", "", "" + } + //return projectId, branchName, fileId + return parts[0], parts[1], parts[2] +} + +func (w *WsFileEditServiceImpl) HandleDebugStateWsMessage(sess *websocket.WsEditSession, wsId string) { + message := websocket.DebugSessionStateOutputMessage{ + Session: sess, + File: w.files[sess.EditSessionId], + Cursors: w.cursors[sess.EditSessionId], + OpsStatData: w.opsStatData[sess.EditSessionId], + } + + sess.NotifyClient(wsId, message) +} + +func (w *WsFileEditServiceImpl) disconnectClient(sessionId string, wsId string) { + session, exists := w.fileEditSessions[sessionId] + if !exists { + return + } + session.ForceDisconnect(wsId) +} + +func (w *WsFileEditServiceImpl) disconnectClients(sessionId string) { + session, exists := w.fileEditSessions[sessionId] + if !exists { + return + } + session.ForceDisconnectAll() +} + +func (w *WsFileEditServiceImpl) runAsyncFileKeepaliveJob() { + for range time.Tick(websocket.PingTime) { + for sessId, session := range w.fileEditSessions { + sessIdTmp := sessId + sessionTmp := session + utils.SafeAsync(func() { + err := w.wsLoadBalancer.TrackSession(sessIdTmp) + if err != nil { + log.Errorf("Unable to make keepalive for file edit session with id = %s: %s", sessIdTmp, err.Error()) + } + }) + utils.SafeAsync(func() { + sessionTmp.SendPingToAllClients() + }) + } + } +} + +func (w *WsFileEditServiceImpl) handleRemoteFileEvent(msg olric.DTopicMessage) { + eventMap := msg.Message.(map[string]interface{}) + event := websocket.FileEventFromMap(eventMap) + editSessionId := makeBranchEditSessionId(event.ProjectId, event.BranchName) + + _, exists := w.fileEditSessions[editSessionId] + if !exists { + return + } + switch event.Action { + case "set_content": + w.SetFileContent(event.ProjectId, event.BranchName, event.FileId, []byte(event.Content)) + case "commit": + err := w.HandleCommitAction(event.ProjectId, event.BranchName, event.FileId) + if err != nil { + log.Errorf("Got error when handling commit action: %s", err.Error()) + } + default: + log.Errorf("Unknown remote file event action type: %s", event.Action) + } +} diff --git a/qubership-apihub-service/service/WsLoadBalancer.go b/qubership-apihub-service/service/WsLoadBalancer.go new file mode 100644 index 0000000..4137610 --- /dev/null +++ b/qubership-apihub-service/service/WsLoadBalancer.go @@ -0,0 +1,474 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "encoding/gob" + "errors" + "fmt" + "net/url" + "strings" + "sync" + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/cache" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/buraksezer/olric" + "github.com/buraksezer/olric/query" + ws "github.com/gorilla/websocket" + log "github.com/sirupsen/logrus" +) + +const LocalServer = "local" + +type WsLoadBalancer interface { + SelectWsServer(projectId string, branchName string, fileId string) (string, error) + TrackSession(sessionId string) error + ListSessions() ([]WSLoadBalancerSession, error) + HasBranchEditSession(sessionId string) (bool, error) + ListNodes() ([]string, error) + ListForwardedSessions() []string + GetBindAddr() string + RedirectWs(addr string, serverConn *ws.Conn, origSecWebSocketKey string) + GetBranchEventTopic() *olric.DTopic + GetFileEventTopic() *olric.DTopic +} + +func NewWsLoadBalancer(op cache.OlricProvider) (WsLoadBalancer, error) { + gob.Register(WSLoadBalancerSession{}) + + lb := &wsLoadBalancerImpl{ + op: op, + isReadyWg: sync.WaitGroup{}, + olricC: nil, + sessMap: nil, + bindAddr: "", + forwardedConnections: sync.Map{}, + } + lb.isReadyWg.Add(1) + + utils.SafeAsync(func() { + lb.initWhenOlricReady() + }) + + return lb, nil +} + +type wsLoadBalancerImpl struct { + op cache.OlricProvider + isReadyWg sync.WaitGroup + olricC *olric.Olric + sessMap *olric.DMap + branchEventTopic *olric.DTopic + fileEventTopic *olric.DTopic + bindAddr string + forwardedConnections sync.Map +} + +type forwardedConnection struct { + serverConn *ws.Conn + clientConn *ws.Conn + started time.Time + origSecWebSocketKey string +} + +func (w *wsLoadBalancerImpl) initWhenOlricReady() { + var err error + hasErrors := false + + w.olricC = w.op.Get() + w.sessMap, err = w.olricC.NewDMap("EditSessions") + if err != nil { + log.Errorf("Failed to creare dmap EditSessions: %s", err.Error()) + hasErrors = true + } + w.branchEventTopic, err = w.olricC.NewDTopic("branch_events", 50, olric.UnorderedDelivery) + if err != nil { + log.Errorf("Failed to creare branchEventTopic: %s", err.Error()) + hasErrors = true + } + w.fileEventTopic, err = w.olricC.NewDTopic("file_events", 50, olric.UnorderedDelivery) + if err != nil { + log.Errorf("Failed to creare fileEventTopic: %s", err.Error()) + hasErrors = true + } + w.bindAddr = w.op.GetBindAddr() + + if hasErrors { + log.Infof("Failed to init WsLoadBalancer, going to retry") + time.Sleep(time.Second * 5) + w.initWhenOlricReady() + return + } + + w.isReadyWg.Done() + log.Infof("WsLoadBalancer is ready") + + utils.SafeAsync(func() { + w.runErrorHandlingJob() + }) + + utils.SafeAsync(func() { + w.sendPingPong() + }) +} + +func (w *wsLoadBalancerImpl) GetBindAddr() string { + w.isReadyWg.Wait() + + return w.bindAddr +} + +func (w *wsLoadBalancerImpl) ListForwardedSessions() []string { + w.isReadyWg.Wait() + + var result []string + w.forwardedConnections.Range(func(key, value interface{}) bool { + result = append(result, fmt.Sprintf("Url: %s | started: %s | ws-key: %s", key.(string), value.(forwardedConnection).started.Format(time.RFC3339), value.(forwardedConnection).origSecWebSocketKey)) + return true + }) + + return result +} + +func (w *wsLoadBalancerImpl) ListSessions() ([]WSLoadBalancerSession, error) { + w.isReadyWg.Wait() + + var result []WSLoadBalancerSession + cursor, err := w.sessMap.Query(query.M{"$onKey": query.M{"$regexMatch": ""}}) + if err != nil { + return nil, err + } + err = cursor.Range(func(key string, value interface{}) bool { + result = append(result, value.(WSLoadBalancerSession)) + return true + }) + if err != nil { + return nil, err + } + + return result, nil +} + +func (w *wsLoadBalancerImpl) HasBranchEditSession(sessionId string) (bool, error) { + w.isReadyWg.Wait() + + branchSessionRegex := strings.Replace(sessionId, "|", "\\|", -1) + var branchEditSessions []WSLoadBalancerSession + + cursor, err := w.sessMap.Query(query.M{"$onKey": query.M{"$regexMatch": branchSessionRegex}}) + if err != nil { + return false, err + } + err = cursor.Range(func(key string, value interface{}) bool { + branchEditSessions = append(branchEditSessions, value.(WSLoadBalancerSession)) + return true + }) + if len(branchEditSessions) > 0 { + return true, nil + } + return false, nil +} + +func (w *wsLoadBalancerImpl) ListNodes() ([]string, error) { + w.isReadyWg.Wait() + + stats, err := w.olricC.Stats() + if err != nil { + return nil, err + } + + var result []string + + for _, v := range stats.ClusterMembers { + result = append(result, fmt.Sprintf("%+v", v)) + } + return result, err +} + +type WSLoadBalancerSession struct { + SessionId string + NodeAddress string +} + +func (w *wsLoadBalancerImpl) TrackSession(sessionId string) error { + w.isReadyWg.Wait() + + session := WSLoadBalancerSession{ + SessionId: sessionId, + NodeAddress: w.bindAddr, + } + + return w.sessMap.PutEx(sessionId, session, time.Second*30) +} + +func (w *wsLoadBalancerImpl) SelectWsServer(projectId string, branchName string, fileId string) (string, error) { + w.isReadyWg.Wait() + + var sessionId string + if fileId != "" { + sessionId = makeFileEditSessionId(projectId, branchName, fileId) + + branchSessionId := makeBranchEditSessionId(projectId, branchName) + branchObj, err := w.sessMap.Get(branchSessionId) + if err != nil { + if errors.Is(err, olric.ErrKeyNotFound) { + // no branch edit session found, continue + } else { + return "", err + } + } + if branchObj != nil { + branchSession := branchObj.(WSLoadBalancerSession) + if branchSession.NodeAddress == w.bindAddr { + return LocalServer, nil + } else { + return branchSession.NodeAddress, nil + } + } + } else { + sessionId = makeBranchEditSessionId(projectId, branchName) + + fileSessionRegex := makeFileEditSessionId(projectId, branchName, ".*") + fileSessionRegex = strings.Replace(fileSessionRegex, "|", "\\|", -1) + var fileEditSessions []WSLoadBalancerSession + cursor, err := w.sessMap.Query(query.M{"$onKey": query.M{"$regexMatch": fileSessionRegex}}) + if err != nil { + return "", err + } + err = cursor.Range(func(key string, value interface{}) bool { + fileEditSessions = append(fileEditSessions, value.(WSLoadBalancerSession)) + return true + }) + // all sessions should be bound to one node + if len(fileEditSessions) > 0 { + if fileEditSessions[0].NodeAddress == w.bindAddr { + return LocalServer, nil + } else { + return fileEditSessions[0].NodeAddress, nil + } + } + } + + obj, err := w.sessMap.Get(sessionId) + if err != nil { + if errors.Is(err, olric.ErrKeyNotFound) { + session := WSLoadBalancerSession{ + SessionId: sessionId, + NodeAddress: w.bindAddr, + } + err = w.sessMap.PutIfEx(sessionId, session, time.Second*30, olric.IfNotFound) + if err != nil { + if errors.Is(err, olric.ErrKeyFound) { + // session is already acquired, re-run procedure + return w.SelectWsServer(projectId, branchName, fileId) + } else { + return "", err + } + } + return LocalServer, nil + } else { + return "", err + } + } + session := obj.(WSLoadBalancerSession) + + if session.NodeAddress == w.bindAddr { + return LocalServer, nil + } else { + return session.NodeAddress, nil + } +} + +func (w *wsLoadBalancerImpl) RedirectWs(addr string, serverConn *ws.Conn, origSecWebSocketKey string) { + w.isReadyWg.Wait() + + log.Debugf("Forwarding ws to %s", addr) + clientConn, resp, err := ws.DefaultDialer.Dial(addr, nil) + if err != nil { + var body []byte + var statusCode int + if resp != nil && resp.Body != nil { + _, errR := resp.Body.Read(body) + if errR != nil { + log.Errorf("Failed to read body: %s", errR) + serverConn.Close() + return + } + statusCode = resp.StatusCode + } + log.Errorf("Redirect to %s failed: %s. Status code: %d, body: %s", addr, err.Error(), statusCode, string(body)) + + serverConn.Close() + return + } + + serverConn.SetReadDeadline(time.Now().Add(PingTime * 2)) + serverConn.SetPongHandler(func(appData string) error { + serverConn.SetReadDeadline(time.Now().Add(PingTime * 2)) + return nil + }) + + utils.SafeAsync(func() { readAndForward(clientConn, serverConn) }) + + w.forwardedConnections.Store(addr, forwardedConnection{clientConn: clientConn, serverConn: serverConn, started: time.Now(), origSecWebSocketKey: origSecWebSocketKey}) + + log.Debugf("Forward connection to %s established", addr) + + defer func() { + serverConn.Close() + clientConn.Close() + serverConn = nil + clientConn = nil + w.forwardedConnections.Delete(addr) + }() + + readAndForward(serverConn, clientConn) +} + +func readAndForward(from *ws.Conn, to *ws.Conn) { + for { + mt, data, err := from.ReadMessage() + if err != nil { + break + } + err = to.WriteMessage(mt, data) + if err != nil { + break + } + } +} + +func (w *wsLoadBalancerImpl) handleBadSessions() { + sessions, err := w.ListSessions() + if err != nil { + log.Errorf("Failed to list sessions: %s", err.Error()) + return + } + + branchSession := map[string]string{} + fileSessions := map[string]string{} + + for _, sess := range sessions { + switch strings.Count(sess.SessionId, stringSeparator) { + case 1: + branchSession[sess.SessionId] = sess.NodeAddress + case 2: + fileSessions[sess.SessionId] = sess.NodeAddress + default: + log.Errorf("incorrect session id: %s", sess.SessionId) + } + } + + // Check bad sessions. + // Sessions are considered to be bad if they're on different nodes. E.x. Branch edit session on node1 and file edit session on node2. + for id, node := range fileSessions { + project, branch, _ := splitSessionId(id) + matchingBranchSessionId := makeBranchEditSessionId(project, branch) + branchNode, exists := branchSession[matchingBranchSessionId] + if exists && (node != branchNode) { + log.Errorf("Bad WS sessions detected: %s %s", id, matchingBranchSessionId) + } + } + + var toDelete []string + // Check stale forwarded connections + w.forwardedConnections.Range(func(key, value interface{}) bool { + addr := key.(string) + fwdConn := value.(forwardedConnection) + + project, branch, file := getIdsFromUrl(addr) + var sessionId string + if file == "" { + sessionId = makeBranchEditSessionId(project, branch) + } else { + sessionId = makeFileEditSessionId(project, branch, file) + } + _, exists := branchSession[sessionId] + if !exists { + _, fsExists := fileSessions[sessionId] + if !fsExists { + log.Warnf("Stale redirect session detected: %s, closing connection", addr) + fwdConn.clientConn.Close() + fwdConn.serverConn.Close() + toDelete = append(toDelete, addr) + } + } + return true + }) + + for _, item := range toDelete { + w.forwardedConnections.Delete(item) + } +} + +func splitSessionId(sessionId string) (string, string, string) { + parts := strings.Split(sessionId, stringSeparator) + if len(parts) > 2 { + return parts[0], parts[1], parts[2] + } + return parts[0], parts[1], "" +} + +func getIdsFromUrl(urlStr string) (string, string, string) { + qParts := strings.Split(urlStr, "?") + parts := strings.Split(qParts[0], "/") + if strings.Contains(urlStr, "/files/") { + project, _ := url.QueryUnescape(parts[6]) + branch, _ := url.QueryUnescape(parts[8]) + file, _ := url.QueryUnescape(parts[10]) + return project, branch, file + } else { + project, _ := url.QueryUnescape(parts[6]) + branch, _ := url.QueryUnescape(parts[8]) + return project, branch, "" + } +} + +func (w *wsLoadBalancerImpl) runErrorHandlingJob() { + for range time.Tick(time.Second * 60) { + w.handleBadSessions() + } +} + +func (w *wsLoadBalancerImpl) GetBranchEventTopic() *olric.DTopic { + w.isReadyWg.Wait() + return w.branchEventTopic +} + +func (w *wsLoadBalancerImpl) GetFileEventTopic() *olric.DTopic { + w.isReadyWg.Wait() + return w.fileEventTopic +} + +func (w *wsLoadBalancerImpl) sendPingPong() { + ticker := time.NewTicker(PingTime) + for range ticker.C { + w.forwardedConnections.Range(func(key, value interface{}) bool { + addr := key.(string) + fs := value.(forwardedConnection) + utils.SafeAsync(func() { + if err := fs.serverConn.WriteControl(ws.PingMessage, []byte{}, time.Now().Add(PingTime*2)); err != nil { + fs.serverConn.Close() + fs.clientConn.Close() + w.forwardedConnections.Delete(addr) + } + }) + return true + }) + } +} + +const PingTime = time.Second * 5 diff --git a/qubership-apihub-service/service/ZeroDayAdminService.go b/qubership-apihub-service/service/ZeroDayAdminService.go new file mode 100644 index 0000000..18f8937 --- /dev/null +++ b/qubership-apihub-service/service/ZeroDayAdminService.go @@ -0,0 +1,89 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package service + +import ( + "fmt" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + log "github.com/sirupsen/logrus" + "os" +) + +const ( + APIHUB_ADMIN_EMAIL = "APIHUB_ADMIN_EMAIL" + APIHUB_ADMIN_PASSWORD = "APIHUB_ADMIN_PASSWORD" +) + +type ZeroDayAdminService interface { + CreateZeroDayAdmin() error +} + +func NewZeroDayAdminService(userService UserService, roleService RoleService, repo repository.UserRepository) ZeroDayAdminService { + return &zeroDayAdminServiceImpl{ + userService: userService, + roleService: roleService, + repo: repo, + } +} + +type zeroDayAdminServiceImpl struct { + userService UserService + roleService RoleService + repo repository.UserRepository +} + +func (a zeroDayAdminServiceImpl) CreateZeroDayAdmin() error { + email := os.Getenv(APIHUB_ADMIN_EMAIL) + password := os.Getenv(APIHUB_ADMIN_PASSWORD) + if email == "" || password == "" { + return fmt.Errorf("CreateZeroDayAdmin: empty envs detected, admin will not be created") + } + + user, _ := a.userService.GetUserByEmail(email) + if user != nil { + _, err := a.userService.AuthenticateUser(email, password) + if err != nil { + passwordHash, err := createBcryptHashedPassword(password) + if err != nil { + return err + } + err = a.repo.UpdateUserPassword(user.Id, passwordHash) + if err != nil { + return err + } + log.Infof("CreateZeroDayAdmin: password is updated for sysadm user") + } else { + log.Infof("CreateZeroDayAdmin: sysadm user is already present") + } + } else { + user, err := a.userService.CreateInternalUser( + &view.InternalUser{ + Email: email, + Password: password, + }, + ) + if err != nil { + return err + } + + _, err = a.roleService.AddSystemAdministrator(user.Id) + if err != nil { + return err + } + log.Infof("CreateZeroDayAdmin: sysadm user with has been created") + } + return nil +} diff --git a/qubership-apihub-service/service/validation/ArchiveValidator.go b/qubership-apihub-service/service/validation/ArchiveValidator.go new file mode 100644 index 0000000..8a0deb6 --- /dev/null +++ b/qubership-apihub-service/service/validation/ArchiveValidator.go @@ -0,0 +1,220 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package validation + +import ( + "archive/zip" + "net/http" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/archive" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" +) + +func ValidatePublishSources(srcArc *archive.SourcesArchive) error { + var fileIds []string + for _, configFile := range srcArc.BuildCfg.Files { + fileIds = append(fileIds, configFile.FileId) + } + + duplicates, missing, unknown := validateFiles(srcArc.FileHeaders, fileIds) + if len(duplicates) != 0 { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.FileDuplicate, + Message: exception.FileDuplicateMsg, + Params: map[string]interface{}{"fileIds": duplicates, "configName": "build config"}, + } + } + + if len(missing) != 0 { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.FileMissing, + Message: exception.FileMissingMsg, + Params: map[string]interface{}{"fileIds": missing, "location": "sources"}, + } + } + + if len(unknown) != 0 { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.FileRedundant, + Message: exception.FileRedundantMsg, + Params: map[string]interface{}{"files": unknown, "location": "sources"}, + } + } + + return nil +} + +func ValidatePublishBuildResult(buildArc *archive.BuildResultArchive) error { + var documentsFileIds, operationsFileIds, comparisonsFileIds []string + for _, configFile := range buildArc.PackageDocuments.Documents { + documentsFileIds = append(documentsFileIds, configFile.Filename) + } + for _, configFile := range buildArc.PackageOperations.Operations { + operationsFileIds = append(operationsFileIds, configFile.OperationId) + } + for _, configFile := range buildArc.PackageComparisons.Comparisons { + if configFile.ComparisonFileId != "" { + comparisonsFileIds = append(comparisonsFileIds, configFile.ComparisonFileId) + } + } + + var fullUnknownList []string + for f := range buildArc.UncategorizedFileHeaders { + fullUnknownList = append(fullUnknownList, f) + } + + duplicates, missing, unknown := validateFiles(buildArc.DocumentsHeaders, documentsFileIds) + if len(duplicates) != 0 { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.FileDuplicate, + Message: exception.FileDuplicateMsg, + Params: map[string]interface{}{"fileIds": duplicates, "configName": archive.DocumentsFilePath + " config"}, + } + } + + if len(missing) != 0 { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.FileMissing, + Message: exception.FileMissingMsg, + Params: map[string]interface{}{"fileIds": missing, "location": archive.DocumentsRootFolder + " folder in achive"}, + } + } + + for _, u := range unknown { + fullUnknownList = append(fullUnknownList, archive.DocumentsRootFolder+u) + } + + duplicates, missing, unknown = validateFiles(buildArc.OperationFileHeaders, operationsFileIds) + if len(duplicates) != 0 { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.FileDuplicate, + Message: exception.FileDuplicateMsg, + Params: map[string]interface{}{"fileIds": duplicates, "configName": archive.OperationsFilePath + " config"}, + } + } + + if len(missing) != 0 { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.FileMissing, + Message: exception.FileMissingMsg, + Params: map[string]interface{}{"fileIds": missing, "location": archive.OperationFilesRootFolder + " folder in achive"}, + } + } + + for _, u := range unknown { + fullUnknownList = append(fullUnknownList, archive.OperationFilesRootFolder+u) + } + + duplicates, missing, unknown = validateFiles(buildArc.ComparisonsFileHeaders, comparisonsFileIds) + if len(duplicates) != 0 { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.FileDuplicate, + Message: exception.FileDuplicateMsg, + Params: map[string]interface{}{"fileIds": duplicates, "configName": archive.ComparisonsFilePath + " config"}, + } + } + + if len(missing) != 0 { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.FileMissing, + Message: exception.FileMissingMsg, + Params: map[string]interface{}{"fileIds": missing, "location": archive.ComparisonsRootFolder + " folder in achive"}, + } + } + + for _, u := range unknown { + fullUnknownList = append(fullUnknownList, archive.ComparisonsRootFolder+u) + } + + if len(fullUnknownList) != 0 { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.FileRedundant, + Message: exception.FileRedundantMsg, + Params: map[string]interface{}{"files": fullUnknownList, "location": "build result archive"}, + } + } + return nil +} + +func validateFiles(zipFileHeaders map[string]*zip.File, configFileIds []string) ([]string, []string, []string) { + duplicates, configFileIdsMap := getDuplicateFiles(configFileIds) + if len(duplicates) != 0 { + return duplicates, nil, nil + } + missing := getMissingFiles(zipFileHeaders, configFileIdsMap) + if len(missing) != 0 { + return nil, missing, nil + } + unknown := getUnknownFiles(zipFileHeaders, configFileIdsMap) + if len(unknown) != 0 { + return nil, nil, unknown + } + return nil, nil, nil +} + +func getDuplicateFiles(configFileIds []string) ([]string, map[string]struct{}) { + configFileIdsMap := map[string]struct{}{} + duplicatesMap := map[string]struct{}{} + for _, file := range configFileIds { + if _, exists := configFileIdsMap[file]; exists { + duplicatesMap[file] = struct{}{} + } else { + configFileIdsMap[file] = struct{}{} + } + } + duplicates := make([]string, 0, len(duplicatesMap)) + for f := range duplicatesMap { + duplicates = append(duplicates, f) + } + return duplicates, configFileIdsMap +} + +func getMissingFiles(zipFileHeaders map[string]*zip.File, configFileIds map[string]struct{}) []string { + missingMap := map[string]struct{}{} + for file := range configFileIds { + if _, exists := zipFileHeaders[file]; !exists { + missingMap[file] = struct{}{} + } + } + missing := make([]string, 0, len(missingMap)) + for f := range missingMap { + missing = append(missing, f) + } + return missing +} + +func getUnknownFiles(zipFileHeaders map[string]*zip.File, configFileIds map[string]struct{}) []string { + unknownMap := map[string]struct{}{} + for filePath := range zipFileHeaders { + if _, exists := configFileIds[filePath]; !exists { + unknownMap[filePath] = struct{}{} + } + } + unknown := make([]string, 0, len(unknownMap)) + for f := range unknownMap { + unknown = append(unknown, f) + } + return unknown +} diff --git a/qubership-apihub-service/service/validation/PublishedValidator.go b/qubership-apihub-service/service/validation/PublishedValidator.go new file mode 100644 index 0000000..cc35f09 --- /dev/null +++ b/qubership-apihub-service/service/validation/PublishedValidator.go @@ -0,0 +1,811 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package validation + +import ( + "fmt" + "net/http" + "strings" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/archive" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/entity" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/repository" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +type PublishedValidator interface { + ValidatePackage(buildArc *archive.BuildResultArchive, buildConfig *view.BuildConfig) error + ValidateBuildResultAgainstConfig(buildArc *archive.BuildResultArchive, buildConfig *view.BuildConfig) error //TODO remove and merge logic with ValidatePackage + ValidateChanges(buildArc *archive.BuildResultArchive) error //TODO remove and merge logic with ValidatePackage +} + +func NewPublishedValidator(publishedRepo repository.PublishedRepository) PublishedValidator { + return &publishedValidatorImpl{ + publishedRepo: publishedRepo, + } +} + +type publishedValidatorImpl struct { + publishedRepo repository.PublishedRepository +} + +func (p publishedValidatorImpl) ValidatePackage(buildArc *archive.BuildResultArchive, buildConfig *view.BuildConfig) error { + if err := p.validatePackageInfo(buildArc, buildConfig); err != nil { + return err + } + + if err := p.validatePackageDocuments(buildArc, buildConfig); err != nil { + return err + } + + if err := p.validatePackageOperations(buildArc, buildConfig); err != nil { + return err + } + + if err := p.validatePackageComparisons(buildArc, buildConfig); err != nil { + return err + } + + if err := p.validatePackageBuilderNotifications(buildArc, buildConfig); err != nil { + return err + } + + if len(buildArc.PackageDocuments.Documents) == 0 && len(buildArc.PackageInfo.Refs) == 0 { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.EmptyDataForPublish, + Message: exception.EmptyDataForPublishMsg, + } + } + if len(buildArc.PackageDocuments.Documents) != 0 && buildArc.PackageInfo.Kind == entity.KIND_GROUP { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackagedFile, + Message: exception.InvalidPackagedFileMsg, + Params: map[string]interface{}{"file": "documents", "error": "cannot publish package with kind 'group' which contains documents"}, + } + } + + if (len(buildArc.PackageInfo.Refs) > 0 || len(buildArc.PackageDocuments.Documents) == 0) && + buildArc.PackageInfo.Kind == entity.KIND_PACKAGE { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackagedFile, + Message: exception.InvalidPackagedFileMsg, + Params: map[string]interface{}{"file": "refs", "error": "cannot publish package with kind 'package' with refs or without documents"}, + } + } + + // Doesn't work for migration, maybe need some flag + // if err := ValidateVersionName(info.Version); err != nil { + // return err + // } + + return nil +} + +func (p publishedValidatorImpl) ValidateBuildResultAgainstConfig(buildArc *archive.BuildResultArchive, buildConfig *view.BuildConfig) error { + info := buildArc.PackageInfo + if info.PackageId != buildConfig.PackageId { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.PackageForBuildConfigDiscrepancy, + Message: exception.PackageForBuildConfigDiscrepancyMsg, + Params: map[string]interface{}{ + "param": "packageId", + "expected": buildConfig.PackageId, + "actual": info.PackageId, + }, + } + } + if info.Version != buildConfig.Version { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.PackageForBuildConfigDiscrepancy, + Message: exception.PackageForBuildConfigDiscrepancyMsg, + Params: map[string]interface{}{ + "param": "version", + "expected": buildConfig.Version, + "actual": info.Version, + }, + } + } + if info.Status != buildConfig.Status { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.PackageForBuildConfigDiscrepancy, + Message: exception.PackageForBuildConfigDiscrepancyMsg, + Params: map[string]interface{}{ + "param": "status", + "expected": buildConfig.Status, + "actual": info.Status, + }, + } + } + if info.PreviousVersion != buildConfig.PreviousVersion { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.PackageForBuildConfigDiscrepancy, + Message: exception.PackageForBuildConfigDiscrepancyMsg, + Params: map[string]interface{}{ + "param": "previousVersion", + "expected": buildConfig.PreviousVersion, + "actual": info.PreviousVersion, + }, + } + } + if info.PreviousVersionPackageId != buildConfig.PreviousVersionPackageId { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.PackageForBuildConfigDiscrepancy, + Message: exception.PackageForBuildConfigDiscrepancyMsg, + Params: map[string]interface{}{ + "param": "previousVersionPackageId", + "expected": buildConfig.PreviousVersionPackageId, + "actual": info.PreviousVersionPackageId, + }, + } + } + + if info.BuildType != buildConfig.BuildType { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.PackageForBuildConfigDiscrepancy, + Message: exception.PackageForBuildConfigDiscrepancyMsg, + Params: map[string]interface{}{ + "param": "buildType", + "expected": buildConfig.BuildType, + "actual": info.BuildType, + }, + } + } + if info.Format != buildConfig.Format { + if info.Format != "" || buildConfig.Format != string(view.JsonDocumentFormat) { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.PackageForBuildConfigDiscrepancy, + Message: exception.PackageForBuildConfigDiscrepancyMsg, + Params: map[string]interface{}{ + "param": "format", + "expected": buildConfig.Format, + "actual": info.Format, + }, + } + } + } + + return nil +} + +func (p publishedValidatorImpl) ValidateChanges(buildArc *archive.BuildResultArchive) error { + info := view.MakeChangelogInfoFileView(buildArc.PackageInfo) + comparisons := buildArc.PackageComparisons + if err := utils.ValidateObject(info); err != nil { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackagedFile, + Message: exception.InvalidPackagedFileMsg, + Params: map[string]interface{}{"file": "info", "error": err.Error()}, + } + } + if info.Revision == 0 { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackagedFile, + Message: exception.InvalidPackagedFileMsg, + Params: map[string]interface{}{"file": "info", "error": "version revision cannot be empty with changelog buildType"}, + } + } + if info.PreviousVersionRevision == 0 { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackagedFile, + Message: exception.InvalidPackagedFileMsg, + Params: map[string]interface{}{"file": "info", "error": "previous version revision cannot be empty with changelog buildType"}, + } + } + + if len(comparisons.Comparisons) == 0 { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackagedFile, + Message: exception.InvalidPackagedFileMsg, + Params: map[string]interface{}{"file": "comparisons", "error": "at least one comparison required for changelog buildType"}, + } + } + if err := utils.ValidateObject(comparisons); err != nil { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackagedFile, + Message: exception.InvalidPackagedFileMsg, + Params: map[string]interface{}{"file": "comparisons", "error": err.Error()}, + } + } + for _, comparison := range comparisons.Comparisons { + if comparison.Version != "" { + if (buildArc.PackageInfo.Revision != comparison.Revision && comparison.Revision != 0) || + buildArc.PackageInfo.Version != comparison.Version || + buildArc.PackageInfo.PackageId != comparison.PackageId { + versionEnt, err := p.publishedRepo.GetVersionByRevision(comparison.PackageId, comparison.Version, comparison.Revision) + if err != nil { + return err + } + if versionEnt == nil { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.PublishedVersionRevisionNotFound, + Message: exception.PublishedVersionRevisionNotFoundMsg, + Params: map[string]interface{}{"version": comparison.Version, "revision": comparison.Revision, "packageId": comparison.PackageId}, + } + } + } + } + if comparison.PreviousVersion != "" { + previousVersionEnt, err := p.publishedRepo.GetVersionByRevision(comparison.PreviousVersionPackageId, comparison.PreviousVersion, comparison.PreviousVersionRevision) + if err != nil { + return err + } + if previousVersionEnt == nil { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.PublishedVersionRevisionNotFound, + Message: exception.PublishedVersionRevisionNotFoundMsg, + Params: map[string]interface{}{"version": comparison.PreviousVersion, "revision": comparison.PreviousVersionRevision, "packageId": comparison.PreviousVersionPackageId}, + } + } + } + if comparison.FromCache { + comparisonId := view.MakeVersionComparisonId( + comparison.PackageId, + comparison.Version, + comparison.Revision, + comparison.PreviousVersionPackageId, + comparison.PreviousVersion, + comparison.PreviousVersionRevision) + comparisonEntity, err := p.publishedRepo.GetVersionComparison(comparisonId) + if err != nil { + return err + } + if comparisonEntity == nil { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.ComparisonNotFound, + Message: exception.ComparisonNotFoundMsg, + Params: map[string]interface{}{ + "comparisonId": comparisonId, + "packageId": comparison.PackageId, + "version": comparison.Version, + "revision": comparison.Revision, + "previousPackageId": comparison.PreviousVersionPackageId, + "previousVersion": comparison.PreviousVersion, + "previousRevision": comparison.PreviousVersionRevision, + }, + } + } + } + } + + return nil +} + +func (p publishedValidatorImpl) validatePackageInfo(buildArc *archive.BuildResultArchive, buildConfig *view.BuildConfig) error { + if err := utils.ValidateObject(buildArc.PackageInfo); err != nil { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackagedFile, + Message: exception.InvalidPackagedFileMsg, + Params: map[string]interface{}{"file": "info", "error": err.Error()}, + } + } + info := buildArc.PackageInfo + if _, err := view.ParseVersionStatus(buildArc.PackageInfo.Status); err != nil { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackagedFile, + Message: exception.InvalidPackagedFileMsg, + Params: map[string]interface{}{"file": "info", "error": err.Error()}, + } + } + if info.PreviousVersionPackageId == info.PackageId { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPreviousVersionPackage, + Message: exception.InvalidPreviousVersionPackageMsg, + Params: map[string]interface{}{"previousVersionPackageId": info.PreviousVersionPackageId, "packageId": info.PackageId}, + } + } + if info.Version == info.PreviousVersion { + if info.PreviousVersionPackageId == "" { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.VersionIsEqualToPreviousVersion, + Message: exception.VersionIsEqualToPreviousVersionMsg, + Params: map[string]interface{}{"version": info.Version, "previousVersion": info.PreviousVersion}, + } + } + } + for _, srcRef := range buildConfig.Refs { + refExists := false + for _, ref := range info.Refs { + if ref.RefId == srcRef.RefId && ref.Version == srcRef.Version { + refExists = true + break + } + } + if !refExists { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.ReferenceMissingFromPackage, + Message: exception.ReferenceMissingFromPackageMsg, + Params: map[string]interface{}{"refId": srcRef.RefId, "version": srcRef.Version}, + } + } + } + if buildArc.PackageInfo.MigrationBuild { + ent, err := p.publishedRepo.GetVersion(buildArc.PackageInfo.PackageId, buildArc.PackageInfo.Version) + if err != nil { + return err + } + if ent == nil { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": buildArc.PackageInfo.Version, "packageId": buildArc.PackageInfo.PackageId}, + } + } + } + return nil +} + +func (p publishedValidatorImpl) validatePackageDocuments(buildArc *archive.BuildResultArchive, buildConfig *view.BuildConfig) error { + if err := utils.ValidateObject(buildArc.PackageDocuments); err != nil { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackagedFile, + Message: exception.InvalidPackagedFileMsg, + Params: map[string]interface{}{"file": "documents", "error": err.Error()}, + } + } + documents := buildArc.PackageDocuments + for _, document := range documents.Documents { + if view.InvalidDocumentType(document.Type) { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidDocumentType, + Message: exception.InvalidDocumentTypeMsg, + Params: map[string]interface{}{"type": document.Type}, + } + } + /*if view.InvalidDocumentFormat(document.Format) { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidDocumentFormat, + Message: exception.InvalidDocumentFormatMsg, + Params: map[string]interface{}{"format": document.Format}, + } + }*/ + } + + for _, srcFile := range buildConfig.Files { + if srcFile.Publish != nil && *srcFile.Publish { + documentExists := false + for _, document := range documents.Documents { + if document.FileId == srcFile.FileId { + documentExists = true + break + } + } + if !documentExists { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.DocumentMissingFromPackage, + Message: exception.DocumentMissingFromPackageMsg, + Params: map[string]interface{}{"fileId": srcFile.FileId}, + } + } + } + } + + return nil +} + +func (p publishedValidatorImpl) validatePackageOperations(buildArc *archive.BuildResultArchive, buildConfig *view.BuildConfig) error { + if err := utils.ValidateObject(buildArc.PackageOperations); err != nil { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackagedFile, + Message: exception.InvalidPackagedFileMsg, + Params: map[string]interface{}{"file": "operations", "error": err.Error()}, + } + } + operations := buildArc.PackageOperations + for _, operation := range operations.Operations { + apiType, err := view.ParseApiType(operation.ApiType) + if err != nil { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackagedFile, + Message: exception.InvalidPackagedFileMsg, + Params: map[string]interface{}{ + "file": "operations", + "error": fmt.Sprintf("object with operationId = %v is incorrect: %v", operation.OperationId, err.Error()), + }, + } + } + if !view.ValidApiAudience(operation.ApiAudience) { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackagedFile, + Message: exception.InvalidPackagedFileMsg, + Params: map[string]interface{}{ + "file": "operations", + "error": fmt.Sprintf("object with operationId = %v has incorrect api_audience: %v", operation.OperationId, operation.ApiAudience), + }, + } + } + // Do not check api kind up to D's comment. Validation is not required for this field. + // I.e. any value from builder is acceptable. + + /*_, err = view.ParseApiKind(operation.ApiKind) + if err != nil { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackagedFile, + Message: exception.InvalidPackagedFileMsg, + Params: map[string]interface{}{ + "file": "operations", + "error": fmt.Sprintf("object with operationId = %v is incorrect: %v", operation.OperationId, err.Error()), + }, + } + }*/ + + var operationMetadata entity.Metadata + operationMetadata = operation.Metadata + + switch apiType { + case view.RestApiType: + if operationMetadata.GetPath() == "" { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackagedFile, + Message: exception.InvalidPackagedFileMsg, + Params: map[string]interface{}{ + "file": "operations", + "error": fmt.Sprintf("object with operationId = %v is incorrect: %v", operation.OperationId, "Metadata.Path for operation is missing"), + }, + } + } + if operationMetadata.GetMethod() == "" { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackagedFile, + Message: exception.InvalidPackagedFileMsg, + Params: map[string]interface{}{ + "file": "operations", + "error": fmt.Sprintf("object with operationId = %v is incorrect: %v", operation.OperationId, "Metadata.Method for operation is missing"), + }, + } + } + for scope := range operation.SearchScopes { + if !view.ValidRestOperationScope(scope) { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackagedFile, + Message: exception.InvalidPackagedFileMsg, + Params: map[string]interface{}{ + "file": "operations", + "error": fmt.Sprintf("object with operationId = %v is incorrect: search scope %v doesn't exist for %v api type", operation.OperationId, scope, apiType), + }, + } + } + } + case view.GraphqlApiType: + if operationMetadata.GetType() == "" { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackagedFile, + Message: exception.InvalidPackagedFileMsg, + Params: map[string]interface{}{ + "file": "operations", + "error": fmt.Sprintf("object with operationId = %v is incorrect: %v", operation.OperationId, "Metadata.Type for operation is missing"), + }, + } + } + if !view.ValidGraphQLOperationType(operationMetadata.GetType()) { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidGraphQLOperationType, + Message: exception.InvalidGraphQLOperationTypeMsg, + Params: map[string]interface{}{"type": operationMetadata.GetType()}, + } + } + if operationMetadata.GetMethod() == "" { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackagedFile, + Message: exception.InvalidPackagedFileMsg, + Params: map[string]interface{}{ + "file": "operations", + "error": fmt.Sprintf("object with operationId = %v is incorrect: %v", operation.OperationId, "Metadata.Method for operation is missing"), + }, + } + } + for scope := range operation.SearchScopes { + if !view.ValidGraphqlOperationScope(scope) { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackagedFile, + Message: exception.InvalidPackagedFileMsg, + Params: map[string]interface{}{ + "file": "operations", + "error": fmt.Sprintf("object with operationId = %v is incorrect: search scope %v doesn't exist for %v api type", operation.OperationId, scope, apiType), + }, + } + } + } + case view.ProtobufApiType: + if operationMetadata.GetType() == "" { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackagedFile, + Message: exception.InvalidPackagedFileMsg, + Params: map[string]interface{}{ + "file": "operations", + "error": fmt.Sprintf("object with operationId = %v is incorrect: %v", operation.OperationId, "Metadata.Type for operation is missing"), + }, + } + } + if !view.ValidProtobufOperationType(operationMetadata.GetType()) { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidProtobufOperationType, + Message: exception.InvalidProtobufOperationTypeMsg, + Params: map[string]interface{}{"type": operationMetadata.GetType()}, + } + } + if operationMetadata.GetMethod() == "" { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackagedFile, + Message: exception.InvalidPackagedFileMsg, + Params: map[string]interface{}{ + "file": "operations", + "error": fmt.Sprintf("object with operationId = %v is incorrect: %v", operation.OperationId, "Metadata.Method for operation is missing"), + }, + } + } + //todo validate protobuf search scopes + default: + + } + } + + return nil +} + +func (p publishedValidatorImpl) validatePackageComparisons(buildArc *archive.BuildResultArchive, buildConfig *view.BuildConfig) error { + if err := utils.ValidateObject(buildArc.PackageComparisons); err != nil { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackagedFile, + Message: exception.InvalidPackagedFileMsg, + Params: map[string]interface{}{"file": "comparisons", "error": err.Error()}, + } + } + comparisons := buildArc.PackageComparisons + info := buildArc.PackageInfo + if info.NoChangelog && len(comparisons.Comparisons) != 0 { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.ChangesAreNotEmpty, + Message: exception.ChangesAreNotEmptyMsg, + } + } + + if !info.NoChangelog && info.PreviousVersion != "" && len(comparisons.Comparisons) == 0 { + // need to check if previous version was deleted + prevPkgId := "" + if info.PreviousVersionPackageId != "" { + prevPkgId = info.PreviousVersionPackageId + } else { + prevPkgId = info.PackageId + } + pvEnt, err := p.publishedRepo.GetVersionIncludingDeleted(prevPkgId, info.PreviousVersion) + if err != nil { + return fmt.Errorf("failed to get previous version in validatePackage: %w", err) + } + if pvEnt == nil { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.PublishedPackageVersionNotFound, + Message: exception.PublishedPackageVersionNotFoundMsg, + Params: map[string]interface{}{"version": info.PreviousVersion, "packageId": prevPkgId}, + } + } + if pvEnt.DeletedAt != nil && !pvEnt.DeletedAt.IsZero() { + // previous version is deleted, so it's ok + } else { + // previous version is not deleted, and we don't have comparisons + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackagedFile, + Message: exception.InvalidPackagedFileMsg, + Params: map[string]interface{}{"file": "comparisons", "error": "at least one comparison required for publishing package with previous version"}, + } + } + } + + excludedRefs := make(map[string]struct{}, 0) + for _, ref := range info.Refs { + if ref.Excluded { + excludedRefs[view.MakePackageVersionRefKey(ref.RefId, ref.Version)] = struct{}{} + } + } + for _, comparison := range comparisons.Comparisons { + if _, refExcluded := excludedRefs[view.MakePackageVersionRefKey(comparison.PackageId, view.MakeVersionRefKey(comparison.Version, comparison.Revision))]; refExcluded { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.ExcludedComparisonReference, + Message: exception.ExcludedComparisonReferenceMsg, + Params: map[string]interface{}{"packageId": comparison.PackageId, "version": comparison.Version, "revision": comparison.Revision}, + } + } + if comparison.Version != "" { + if comparison.PackageId == "" { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidComparisonField, + Message: exception.InvalidComparisonFieldMsg, + Params: map[string]interface{}{"field": "packageId", "error": "packageId cannot be empty if version field is filled"}, + } + } + } + if comparison.Version == "" && comparison.PreviousVersion == "" { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidComparisonField, + Message: exception.InvalidComparisonFieldMsg, + Params: map[string]interface{}{"field": "version", "error": "version and previousVersion cannot both be empty"}, + } + } + if comparison.PreviousVersion != "" { + if comparison.PreviousVersionPackageId == "" { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidComparisonField, + Message: exception.InvalidComparisonFieldMsg, + Params: map[string]interface{}{"field": "previousVersionPackageId", "error": "previousVersionPackageId cannot be empty if previousVersion field is filled"}, + } + } + if comparison.PreviousVersionRevision == 0 { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidComparisonField, + Message: exception.InvalidComparisonFieldMsg, + Params: map[string]interface{}{"field": "previousVersionRevision", "error": "previousVersionRevision cannot be empty if previousVersion field is filled"}, + } + } + } + if strings.Contains(comparison.Version, "@") { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidComparisonField, + Message: exception.InvalidComparisonFieldMsg, + Params: map[string]interface{}{"field": "version", "error": "version cannot not contain '@' symbol"}, + } + } + if strings.Contains(comparison.PreviousVersion, "@") { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidComparisonField, + Message: exception.InvalidComparisonFieldMsg, + Params: map[string]interface{}{"field": "previousVersion", "error": "previousVersion cannot not contain '@' symbol"}, + } + } + if comparison.Version != "" { + if (buildArc.PackageInfo.Revision != comparison.Revision && comparison.Revision != 0) || + buildArc.PackageInfo.Version != comparison.Version || + buildArc.PackageInfo.PackageId != comparison.PackageId { + versionEnt, err := p.publishedRepo.GetVersionIncludingDeleted(comparison.PackageId, view.MakeVersionRefKey(comparison.Version, comparison.Revision)) + if err != nil { + return err + } + if versionEnt == nil { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.PublishedVersionRevisionNotFound, + Message: exception.PublishedVersionRevisionNotFoundMsg, + Params: map[string]interface{}{"version": comparison.Version, "revision": comparison.Revision, "packageId": comparison.PackageId}, + } + } + /*if versionEnt.DeletedAt != nil { + // TODO: delete this changelog + }*/ + } + } + if comparison.PreviousVersion != "" { + previousVersionEnt, err := p.publishedRepo.GetVersionIncludingDeleted(comparison.PreviousVersionPackageId, view.MakeVersionRefKey(comparison.PreviousVersion, comparison.PreviousVersionRevision)) + if err != nil { + return err + } + if previousVersionEnt == nil { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.PublishedVersionRevisionNotFound, + Message: exception.PublishedVersionRevisionNotFoundMsg, + Params: map[string]interface{}{"version": comparison.PreviousVersion, "revision": comparison.PreviousVersionRevision, "packageId": comparison.PreviousVersionPackageId}, + } + } + /*if previousVersionEnt.DeletedAt != nil { + // TODO: delete this changelog + }*/ + } + if comparison.FromCache { + comparisonId := view.MakeVersionComparisonId( + comparison.PackageId, + comparison.Version, + comparison.Revision, + comparison.PreviousVersionPackageId, + comparison.PreviousVersion, + comparison.PreviousVersionRevision) + comparisonEntity, err := p.publishedRepo.GetVersionComparison(comparisonId) + if err != nil { + return err + } + if comparisonEntity == nil { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.ComparisonNotFound, + Message: exception.ComparisonNotFoundMsg, + Params: map[string]interface{}{ + "comparisonId": comparisonId, + "packageId": comparison.PackageId, + "version": comparison.Version, + "revision": comparison.Revision, + "previousPackageId": comparison.PreviousVersionPackageId, + "previousVersion": comparison.PreviousVersion, + "previousRevision": comparison.PreviousVersionRevision, + }, + } + } + } + // if comparison.ComparisonFileId != "" { + // if _, exists := comparisonsFileHeaders[comparison.ComparisonFileId]; !exists { + // return &exception.CustomError{ + // Status: http.StatusBadRequest, + // Code: exception.PackageArchivedFileNotFound, + // Message: exception.PackageArchivedFileNotFoundMsg, + // Params: map[string]interface{}{"file": comparison.ComparisonFileId, "folder": "comparisons/"}, + // } + // } + // } + } + return nil +} + +func (p publishedValidatorImpl) validatePackageBuilderNotifications(buildArc *archive.BuildResultArchive, buildConfig *view.BuildConfig) error { + if err := utils.ValidateObject(buildArc.BuilderNotifications); err != nil { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidPackagedFile, + Message: exception.InvalidPackagedFileMsg, + Params: map[string]interface{}{"file": "notifications", "error": err.Error()}, + } + } + return nil +} diff --git a/qubership-apihub-service/static/templates/index.html b/qubership-apihub-service/static/templates/index.html new file mode 100644 index 0000000..83e7187 --- /dev/null +++ b/qubership-apihub-service/static/templates/index.html @@ -0,0 +1,36 @@ + + + + + %s + + %s + + +
    + +
    +

    %s

    +

    Version: %s

    +
    +
    +
    + %s +
    +
    Table of contents
    +
      + %s +
    +
    + +
    + + diff --git a/qubership-apihub-service/static/templates/ls.html b/qubership-apihub-service/static/templates/ls.html new file mode 100644 index 0000000..81d2854 --- /dev/null +++ b/qubership-apihub-service/static/templates/ls.html @@ -0,0 +1,53 @@ + + + + + + + Legal Statement + + + +
    + +
    +

    %s

    +

    Version: %s

    +
    +
    + +
    + +
    +
    Legal Statement
    + +

    + qubership-apihub +

    +

    + qubership-apihub +

    +
    + +
    + + diff --git a/qubership-apihub-service/static/templates/page.html b/qubership-apihub-service/static/templates/page.html new file mode 100644 index 0000000..009841b --- /dev/null +++ b/qubership-apihub-service/static/templates/page.html @@ -0,0 +1,40 @@ + + + + + %s + + + + + +
    + +
    +

    %s

    +

    Version: %s

    +
    +
    + +
    + +
    + +
    + +
    + + diff --git a/qubership-apihub-service/static/templates/resources/ExcelExportTemplate.xlsx b/qubership-apihub-service/static/templates/resources/ExcelExportTemplate.xlsx new file mode 100644 index 0000000..76341fa Binary files /dev/null and b/qubership-apihub-service/static/templates/resources/ExcelExportTemplate.xlsx differ diff --git a/qubership-apihub-service/static/templates/resources/corporatelogo.png b/qubership-apihub-service/static/templates/resources/corporatelogo.png new file mode 100644 index 0000000..d6dce17 Binary files /dev/null and b/qubership-apihub-service/static/templates/resources/corporatelogo.png differ diff --git a/qubership-apihub-service/static/templates/resources/styles.css b/qubership-apihub-service/static/templates/resources/styles.css new file mode 100644 index 0000000..73ed25a --- /dev/null +++ b/qubership-apihub-service/static/templates/resources/styles.css @@ -0,0 +1,103 @@ +/** + * Copyright 2024-2025 NetCracker Technology Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +body { + background-color: rgb(245, 245, 250); + font-family: Arial, Helvetica, sans-serif; +} + +a { + text-decoration: none; + color:#000000; +} +a:hover { + color: #0063ce; +} + +.header { + display: flex; + justify-content: space-between; + align-items: center; + padding-right: 10px; +} + +.header__logo { + height: 100px; +} + +.header__info { + text-align: left; +} + +.page { + padding: 0 10px; +} + +.breadcrumbs { + padding: 10px 10px; +} + +.content { + padding: 10px; + border-radius: 10px; +} + +.footer { + display: flex; + justify-content: space-between; + color: #000000; + margin-top: 10px; + padding: 0 10px; +} + +.footer__statement { + +} + +.footer__legal { + margin-left: auto; + text-align: right; + font-size: 10pt; +} + +.card { + background-color: rgb(255, 255, 255); + box-shadow: rgb(4 11 23 / 4%) 0px 1px 5px, rgb(0 0 0 / 6%) 0px 5px 25px, rgb(0 0 0 / 9%) 0px 0px 2px; +} + +.card__title { + font-size: 18pt; +} + +.card__text { + padding: 5px 0; + font-size: 12pt; +} + +.card__list { + list-style-type: circle; +} + +.card__list li{ + font-size: 13pt; +} + +table, th, td { + border-collapse: collapse; + border: 1px solid hsla(210,18%,87%,1); + padding: 5px; +} diff --git a/qubership-apihub-service/static/templates/scripts/apispec-view.js b/qubership-apihub-service/static/templates/scripts/apispec-view.js new file mode 100644 index 0000000..ba1374b --- /dev/null +++ b/qubership-apihub-service/static/templates/scripts/apispec-view.js @@ -0,0 +1,2 @@ +//placeholder +//todo: need to pack real libabry to here \ No newline at end of file diff --git a/qubership-apihub-service/static/templates/scripts/markdown-it.min.js b/qubership-apihub-service/static/templates/scripts/markdown-it.min.js new file mode 100644 index 0000000..e45f77c --- /dev/null +++ b/qubership-apihub-service/static/templates/scripts/markdown-it.min.js @@ -0,0 +1,19 @@ +/** + * Copyright 2024-2025 NetCracker Technology Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*! markdown-it 13.0.1 https://github.com/markdown-it/markdown-it @license MIT */ +!function(e,r){"object"==typeof exports&&"undefined"!=typeof module?module.exports=r():"function"==typeof define&&define.amd?define(r):(e="undefined"!=typeof globalThis?globalThis:e||self).markdownit=r()}(this,(function(){"use strict";function e(e){if(e.__esModule)return e;var r=Object.defineProperty({},"__esModule",{value:!0});return Object.keys(e).forEach((function(t){var n=Object.getOwnPropertyDescriptor(e,t);Object.defineProperty(r,t,n.get?n:{enumerable:!0,get:function(){return e[t]}})})),r}var r={Aacute:"\xc1",aacute:"\xe1",Abreve:"\u0102",abreve:"\u0103",ac:"\u223e",acd:"\u223f",acE:"\u223e\u0333",Acirc:"\xc2",acirc:"\xe2",acute:"\xb4",Acy:"\u0410",acy:"\u0430",AElig:"\xc6",aelig:"\xe6",af:"\u2061",Afr:"\ud835\udd04",afr:"\ud835\udd1e",Agrave:"\xc0",agrave:"\xe0",alefsym:"\u2135",aleph:"\u2135",Alpha:"\u0391",alpha:"\u03b1",Amacr:"\u0100",amacr:"\u0101",amalg:"\u2a3f",amp:"&",AMP:"&",andand:"\u2a55",And:"\u2a53",and:"\u2227",andd:"\u2a5c",andslope:"\u2a58",andv:"\u2a5a",ang:"\u2220",ange:"\u29a4",angle:"\u2220",angmsdaa:"\u29a8",angmsdab:"\u29a9",angmsdac:"\u29aa",angmsdad:"\u29ab",angmsdae:"\u29ac",angmsdaf:"\u29ad",angmsdag:"\u29ae",angmsdah:"\u29af",angmsd:"\u2221",angrt:"\u221f",angrtvb:"\u22be",angrtvbd:"\u299d",angsph:"\u2222",angst:"\xc5",angzarr:"\u237c",Aogon:"\u0104",aogon:"\u0105",Aopf:"\ud835\udd38",aopf:"\ud835\udd52",apacir:"\u2a6f",ap:"\u2248",apE:"\u2a70",ape:"\u224a",apid:"\u224b",apos:"'",ApplyFunction:"\u2061",approx:"\u2248",approxeq:"\u224a",Aring:"\xc5",aring:"\xe5",Ascr:"\ud835\udc9c",ascr:"\ud835\udcb6",Assign:"\u2254",ast:"*",asymp:"\u2248",asympeq:"\u224d",Atilde:"\xc3",atilde:"\xe3",Auml:"\xc4",auml:"\xe4",awconint:"\u2233",awint:"\u2a11",backcong:"\u224c",backepsilon:"\u03f6",backprime:"\u2035",backsim:"\u223d",backsimeq:"\u22cd",Backslash:"\u2216",Barv:"\u2ae7",barvee:"\u22bd",barwed:"\u2305",Barwed:"\u2306",barwedge:"\u2305",bbrk:"\u23b5",bbrktbrk:"\u23b6",bcong:"\u224c",Bcy:"\u0411",bcy:"\u0431",bdquo:"\u201e",becaus:"\u2235",because:"\u2235",Because:"\u2235",bemptyv:"\u29b0",bepsi:"\u03f6",bernou:"\u212c",Bernoullis:"\u212c",Beta:"\u0392",beta:"\u03b2",beth:"\u2136",between:"\u226c",Bfr:"\ud835\udd05",bfr:"\ud835\udd1f",bigcap:"\u22c2",bigcirc:"\u25ef",bigcup:"\u22c3",bigodot:"\u2a00",bigoplus:"\u2a01",bigotimes:"\u2a02",bigsqcup:"\u2a06",bigstar:"\u2605",bigtriangledown:"\u25bd",bigtriangleup:"\u25b3",biguplus:"\u2a04",bigvee:"\u22c1",bigwedge:"\u22c0",bkarow:"\u290d",blacklozenge:"\u29eb",blacksquare:"\u25aa",blacktriangle:"\u25b4",blacktriangledown:"\u25be",blacktriangleleft:"\u25c2",blacktriangleright:"\u25b8",blank:"\u2423",blk12:"\u2592",blk14:"\u2591",blk34:"\u2593",block:"\u2588",bne:"=\u20e5",bnequiv:"\u2261\u20e5",bNot:"\u2aed",bnot:"\u2310",Bopf:"\ud835\udd39",bopf:"\ud835\udd53",bot:"\u22a5",bottom:"\u22a5",bowtie:"\u22c8",boxbox:"\u29c9",boxdl:"\u2510",boxdL:"\u2555",boxDl:"\u2556",boxDL:"\u2557",boxdr:"\u250c",boxdR:"\u2552",boxDr:"\u2553",boxDR:"\u2554",boxh:"\u2500",boxH:"\u2550",boxhd:"\u252c",boxHd:"\u2564",boxhD:"\u2565",boxHD:"\u2566",boxhu:"\u2534",boxHu:"\u2567",boxhU:"\u2568",boxHU:"\u2569",boxminus:"\u229f",boxplus:"\u229e",boxtimes:"\u22a0",boxul:"\u2518",boxuL:"\u255b",boxUl:"\u255c",boxUL:"\u255d",boxur:"\u2514",boxuR:"\u2558",boxUr:"\u2559",boxUR:"\u255a",boxv:"\u2502",boxV:"\u2551",boxvh:"\u253c",boxvH:"\u256a",boxVh:"\u256b",boxVH:"\u256c",boxvl:"\u2524",boxvL:"\u2561",boxVl:"\u2562",boxVL:"\u2563",boxvr:"\u251c",boxvR:"\u255e",boxVr:"\u255f",boxVR:"\u2560",bprime:"\u2035",breve:"\u02d8",Breve:"\u02d8",brvbar:"\xa6",bscr:"\ud835\udcb7",Bscr:"\u212c",bsemi:"\u204f",bsim:"\u223d",bsime:"\u22cd",bsolb:"\u29c5",bsol:"\\",bsolhsub:"\u27c8",bull:"\u2022",bullet:"\u2022",bump:"\u224e",bumpE:"\u2aae",bumpe:"\u224f",Bumpeq:"\u224e",bumpeq:"\u224f",Cacute:"\u0106",cacute:"\u0107",capand:"\u2a44",capbrcup:"\u2a49",capcap:"\u2a4b",cap:"\u2229",Cap:"\u22d2",capcup:"\u2a47",capdot:"\u2a40",CapitalDifferentialD:"\u2145",caps:"\u2229\ufe00",caret:"\u2041",caron:"\u02c7",Cayleys:"\u212d",ccaps:"\u2a4d",Ccaron:"\u010c",ccaron:"\u010d",Ccedil:"\xc7",ccedil:"\xe7",Ccirc:"\u0108",ccirc:"\u0109",Cconint:"\u2230",ccups:"\u2a4c",ccupssm:"\u2a50",Cdot:"\u010a",cdot:"\u010b",cedil:"\xb8",Cedilla:"\xb8",cemptyv:"\u29b2",cent:"\xa2",centerdot:"\xb7",CenterDot:"\xb7",cfr:"\ud835\udd20",Cfr:"\u212d",CHcy:"\u0427",chcy:"\u0447",check:"\u2713",checkmark:"\u2713",Chi:"\u03a7",chi:"\u03c7",circ:"\u02c6",circeq:"\u2257",circlearrowleft:"\u21ba",circlearrowright:"\u21bb",circledast:"\u229b",circledcirc:"\u229a",circleddash:"\u229d",CircleDot:"\u2299",circledR:"\xae",circledS:"\u24c8",CircleMinus:"\u2296",CirclePlus:"\u2295",CircleTimes:"\u2297",cir:"\u25cb",cirE:"\u29c3",cire:"\u2257",cirfnint:"\u2a10",cirmid:"\u2aef",cirscir:"\u29c2",ClockwiseContourIntegral:"\u2232",CloseCurlyDoubleQuote:"\u201d",CloseCurlyQuote:"\u2019",clubs:"\u2663",clubsuit:"\u2663",colon:":",Colon:"\u2237",Colone:"\u2a74",colone:"\u2254",coloneq:"\u2254",comma:",",commat:"@",comp:"\u2201",compfn:"\u2218",complement:"\u2201",complexes:"\u2102",cong:"\u2245",congdot:"\u2a6d",Congruent:"\u2261",conint:"\u222e",Conint:"\u222f",ContourIntegral:"\u222e",copf:"\ud835\udd54",Copf:"\u2102",coprod:"\u2210",Coproduct:"\u2210",copy:"\xa9",COPY:"\xa9",copysr:"\u2117",CounterClockwiseContourIntegral:"\u2233",crarr:"\u21b5",cross:"\u2717",Cross:"\u2a2f",Cscr:"\ud835\udc9e",cscr:"\ud835\udcb8",csub:"\u2acf",csube:"\u2ad1",csup:"\u2ad0",csupe:"\u2ad2",ctdot:"\u22ef",cudarrl:"\u2938",cudarrr:"\u2935",cuepr:"\u22de",cuesc:"\u22df",cularr:"\u21b6",cularrp:"\u293d",cupbrcap:"\u2a48",cupcap:"\u2a46",CupCap:"\u224d",cup:"\u222a",Cup:"\u22d3",cupcup:"\u2a4a",cupdot:"\u228d",cupor:"\u2a45",cups:"\u222a\ufe00",curarr:"\u21b7",curarrm:"\u293c",curlyeqprec:"\u22de",curlyeqsucc:"\u22df",curlyvee:"\u22ce",curlywedge:"\u22cf",curren:"\xa4",curvearrowleft:"\u21b6",curvearrowright:"\u21b7",cuvee:"\u22ce",cuwed:"\u22cf",cwconint:"\u2232",cwint:"\u2231",cylcty:"\u232d",dagger:"\u2020",Dagger:"\u2021",daleth:"\u2138",darr:"\u2193",Darr:"\u21a1",dArr:"\u21d3",dash:"\u2010",Dashv:"\u2ae4",dashv:"\u22a3",dbkarow:"\u290f",dblac:"\u02dd",Dcaron:"\u010e",dcaron:"\u010f",Dcy:"\u0414",dcy:"\u0434",ddagger:"\u2021",ddarr:"\u21ca",DD:"\u2145",dd:"\u2146",DDotrahd:"\u2911",ddotseq:"\u2a77",deg:"\xb0",Del:"\u2207",Delta:"\u0394",delta:"\u03b4",demptyv:"\u29b1",dfisht:"\u297f",Dfr:"\ud835\udd07",dfr:"\ud835\udd21",dHar:"\u2965",dharl:"\u21c3",dharr:"\u21c2",DiacriticalAcute:"\xb4",DiacriticalDot:"\u02d9",DiacriticalDoubleAcute:"\u02dd",DiacriticalGrave:"`",DiacriticalTilde:"\u02dc",diam:"\u22c4",diamond:"\u22c4",Diamond:"\u22c4",diamondsuit:"\u2666",diams:"\u2666",die:"\xa8",DifferentialD:"\u2146",digamma:"\u03dd",disin:"\u22f2",div:"\xf7",divide:"\xf7",divideontimes:"\u22c7",divonx:"\u22c7",DJcy:"\u0402",djcy:"\u0452",dlcorn:"\u231e",dlcrop:"\u230d",dollar:"$",Dopf:"\ud835\udd3b",dopf:"\ud835\udd55",Dot:"\xa8",dot:"\u02d9",DotDot:"\u20dc",doteq:"\u2250",doteqdot:"\u2251",DotEqual:"\u2250",dotminus:"\u2238",dotplus:"\u2214",dotsquare:"\u22a1",doublebarwedge:"\u2306",DoubleContourIntegral:"\u222f",DoubleDot:"\xa8",DoubleDownArrow:"\u21d3",DoubleLeftArrow:"\u21d0",DoubleLeftRightArrow:"\u21d4",DoubleLeftTee:"\u2ae4",DoubleLongLeftArrow:"\u27f8",DoubleLongLeftRightArrow:"\u27fa",DoubleLongRightArrow:"\u27f9",DoubleRightArrow:"\u21d2",DoubleRightTee:"\u22a8",DoubleUpArrow:"\u21d1",DoubleUpDownArrow:"\u21d5",DoubleVerticalBar:"\u2225",DownArrowBar:"\u2913",downarrow:"\u2193",DownArrow:"\u2193",Downarrow:"\u21d3",DownArrowUpArrow:"\u21f5",DownBreve:"\u0311",downdownarrows:"\u21ca",downharpoonleft:"\u21c3",downharpoonright:"\u21c2",DownLeftRightVector:"\u2950",DownLeftTeeVector:"\u295e",DownLeftVectorBar:"\u2956",DownLeftVector:"\u21bd",DownRightTeeVector:"\u295f",DownRightVectorBar:"\u2957",DownRightVector:"\u21c1",DownTeeArrow:"\u21a7",DownTee:"\u22a4",drbkarow:"\u2910",drcorn:"\u231f",drcrop:"\u230c",Dscr:"\ud835\udc9f",dscr:"\ud835\udcb9",DScy:"\u0405",dscy:"\u0455",dsol:"\u29f6",Dstrok:"\u0110",dstrok:"\u0111",dtdot:"\u22f1",dtri:"\u25bf",dtrif:"\u25be",duarr:"\u21f5",duhar:"\u296f",dwangle:"\u29a6",DZcy:"\u040f",dzcy:"\u045f",dzigrarr:"\u27ff",Eacute:"\xc9",eacute:"\xe9",easter:"\u2a6e",Ecaron:"\u011a",ecaron:"\u011b",Ecirc:"\xca",ecirc:"\xea",ecir:"\u2256",ecolon:"\u2255",Ecy:"\u042d",ecy:"\u044d",eDDot:"\u2a77",Edot:"\u0116",edot:"\u0117",eDot:"\u2251",ee:"\u2147",efDot:"\u2252",Efr:"\ud835\udd08",efr:"\ud835\udd22",eg:"\u2a9a",Egrave:"\xc8",egrave:"\xe8",egs:"\u2a96",egsdot:"\u2a98",el:"\u2a99",Element:"\u2208",elinters:"\u23e7",ell:"\u2113",els:"\u2a95",elsdot:"\u2a97",Emacr:"\u0112",emacr:"\u0113",empty:"\u2205",emptyset:"\u2205",EmptySmallSquare:"\u25fb",emptyv:"\u2205",EmptyVerySmallSquare:"\u25ab",emsp13:"\u2004",emsp14:"\u2005",emsp:"\u2003",ENG:"\u014a",eng:"\u014b",ensp:"\u2002",Eogon:"\u0118",eogon:"\u0119",Eopf:"\ud835\udd3c",eopf:"\ud835\udd56",epar:"\u22d5",eparsl:"\u29e3",eplus:"\u2a71",epsi:"\u03b5",Epsilon:"\u0395",epsilon:"\u03b5",epsiv:"\u03f5",eqcirc:"\u2256",eqcolon:"\u2255",eqsim:"\u2242",eqslantgtr:"\u2a96",eqslantless:"\u2a95",Equal:"\u2a75",equals:"=",EqualTilde:"\u2242",equest:"\u225f",Equilibrium:"\u21cc",equiv:"\u2261",equivDD:"\u2a78",eqvparsl:"\u29e5",erarr:"\u2971",erDot:"\u2253",escr:"\u212f",Escr:"\u2130",esdot:"\u2250",Esim:"\u2a73",esim:"\u2242",Eta:"\u0397",eta:"\u03b7",ETH:"\xd0",eth:"\xf0",Euml:"\xcb",euml:"\xeb",euro:"\u20ac",excl:"!",exist:"\u2203",Exists:"\u2203",expectation:"\u2130",exponentiale:"\u2147",ExponentialE:"\u2147",fallingdotseq:"\u2252",Fcy:"\u0424",fcy:"\u0444",female:"\u2640",ffilig:"\ufb03",fflig:"\ufb00",ffllig:"\ufb04",Ffr:"\ud835\udd09",ffr:"\ud835\udd23",filig:"\ufb01",FilledSmallSquare:"\u25fc",FilledVerySmallSquare:"\u25aa",fjlig:"fj",flat:"\u266d",fllig:"\ufb02",fltns:"\u25b1",fnof:"\u0192",Fopf:"\ud835\udd3d",fopf:"\ud835\udd57",forall:"\u2200",ForAll:"\u2200",fork:"\u22d4",forkv:"\u2ad9",Fouriertrf:"\u2131",fpartint:"\u2a0d",frac12:"\xbd",frac13:"\u2153",frac14:"\xbc",frac15:"\u2155",frac16:"\u2159",frac18:"\u215b",frac23:"\u2154",frac25:"\u2156",frac34:"\xbe",frac35:"\u2157",frac38:"\u215c",frac45:"\u2158",frac56:"\u215a",frac58:"\u215d",frac78:"\u215e",frasl:"\u2044",frown:"\u2322",fscr:"\ud835\udcbb",Fscr:"\u2131",gacute:"\u01f5",Gamma:"\u0393",gamma:"\u03b3",Gammad:"\u03dc",gammad:"\u03dd",gap:"\u2a86",Gbreve:"\u011e",gbreve:"\u011f",Gcedil:"\u0122",Gcirc:"\u011c",gcirc:"\u011d",Gcy:"\u0413",gcy:"\u0433",Gdot:"\u0120",gdot:"\u0121",ge:"\u2265",gE:"\u2267",gEl:"\u2a8c",gel:"\u22db",geq:"\u2265",geqq:"\u2267",geqslant:"\u2a7e",gescc:"\u2aa9",ges:"\u2a7e",gesdot:"\u2a80",gesdoto:"\u2a82",gesdotol:"\u2a84",gesl:"\u22db\ufe00",gesles:"\u2a94",Gfr:"\ud835\udd0a",gfr:"\ud835\udd24",gg:"\u226b",Gg:"\u22d9",ggg:"\u22d9",gimel:"\u2137",GJcy:"\u0403",gjcy:"\u0453",gla:"\u2aa5",gl:"\u2277",glE:"\u2a92",glj:"\u2aa4",gnap:"\u2a8a",gnapprox:"\u2a8a",gne:"\u2a88",gnE:"\u2269",gneq:"\u2a88",gneqq:"\u2269",gnsim:"\u22e7",Gopf:"\ud835\udd3e",gopf:"\ud835\udd58",grave:"`",GreaterEqual:"\u2265",GreaterEqualLess:"\u22db",GreaterFullEqual:"\u2267",GreaterGreater:"\u2aa2",GreaterLess:"\u2277",GreaterSlantEqual:"\u2a7e",GreaterTilde:"\u2273",Gscr:"\ud835\udca2",gscr:"\u210a",gsim:"\u2273",gsime:"\u2a8e",gsiml:"\u2a90",gtcc:"\u2aa7",gtcir:"\u2a7a",gt:">",GT:">",Gt:"\u226b",gtdot:"\u22d7",gtlPar:"\u2995",gtquest:"\u2a7c",gtrapprox:"\u2a86",gtrarr:"\u2978",gtrdot:"\u22d7",gtreqless:"\u22db",gtreqqless:"\u2a8c",gtrless:"\u2277",gtrsim:"\u2273",gvertneqq:"\u2269\ufe00",gvnE:"\u2269\ufe00",Hacek:"\u02c7",hairsp:"\u200a",half:"\xbd",hamilt:"\u210b",HARDcy:"\u042a",hardcy:"\u044a",harrcir:"\u2948",harr:"\u2194",hArr:"\u21d4",harrw:"\u21ad",Hat:"^",hbar:"\u210f",Hcirc:"\u0124",hcirc:"\u0125",hearts:"\u2665",heartsuit:"\u2665",hellip:"\u2026",hercon:"\u22b9",hfr:"\ud835\udd25",Hfr:"\u210c",HilbertSpace:"\u210b",hksearow:"\u2925",hkswarow:"\u2926",hoarr:"\u21ff",homtht:"\u223b",hookleftarrow:"\u21a9",hookrightarrow:"\u21aa",hopf:"\ud835\udd59",Hopf:"\u210d",horbar:"\u2015",HorizontalLine:"\u2500",hscr:"\ud835\udcbd",Hscr:"\u210b",hslash:"\u210f",Hstrok:"\u0126",hstrok:"\u0127",HumpDownHump:"\u224e",HumpEqual:"\u224f",hybull:"\u2043",hyphen:"\u2010",Iacute:"\xcd",iacute:"\xed",ic:"\u2063",Icirc:"\xce",icirc:"\xee",Icy:"\u0418",icy:"\u0438",Idot:"\u0130",IEcy:"\u0415",iecy:"\u0435",iexcl:"\xa1",iff:"\u21d4",ifr:"\ud835\udd26",Ifr:"\u2111",Igrave:"\xcc",igrave:"\xec",ii:"\u2148",iiiint:"\u2a0c",iiint:"\u222d",iinfin:"\u29dc",iiota:"\u2129",IJlig:"\u0132",ijlig:"\u0133",Imacr:"\u012a",imacr:"\u012b",image:"\u2111",ImaginaryI:"\u2148",imagline:"\u2110",imagpart:"\u2111",imath:"\u0131",Im:"\u2111",imof:"\u22b7",imped:"\u01b5",Implies:"\u21d2",incare:"\u2105",in:"\u2208",infin:"\u221e",infintie:"\u29dd",inodot:"\u0131",intcal:"\u22ba",int:"\u222b",Int:"\u222c",integers:"\u2124",Integral:"\u222b",intercal:"\u22ba",Intersection:"\u22c2",intlarhk:"\u2a17",intprod:"\u2a3c",InvisibleComma:"\u2063",InvisibleTimes:"\u2062",IOcy:"\u0401",iocy:"\u0451",Iogon:"\u012e",iogon:"\u012f",Iopf:"\ud835\udd40",iopf:"\ud835\udd5a",Iota:"\u0399",iota:"\u03b9",iprod:"\u2a3c",iquest:"\xbf",iscr:"\ud835\udcbe",Iscr:"\u2110",isin:"\u2208",isindot:"\u22f5",isinE:"\u22f9",isins:"\u22f4",isinsv:"\u22f3",isinv:"\u2208",it:"\u2062",Itilde:"\u0128",itilde:"\u0129",Iukcy:"\u0406",iukcy:"\u0456",Iuml:"\xcf",iuml:"\xef",Jcirc:"\u0134",jcirc:"\u0135",Jcy:"\u0419",jcy:"\u0439",Jfr:"\ud835\udd0d",jfr:"\ud835\udd27",jmath:"\u0237",Jopf:"\ud835\udd41",jopf:"\ud835\udd5b",Jscr:"\ud835\udca5",jscr:"\ud835\udcbf",Jsercy:"\u0408",jsercy:"\u0458",Jukcy:"\u0404",jukcy:"\u0454",Kappa:"\u039a",kappa:"\u03ba",kappav:"\u03f0",Kcedil:"\u0136",kcedil:"\u0137",Kcy:"\u041a",kcy:"\u043a",Kfr:"\ud835\udd0e",kfr:"\ud835\udd28",kgreen:"\u0138",KHcy:"\u0425",khcy:"\u0445",KJcy:"\u040c",kjcy:"\u045c",Kopf:"\ud835\udd42",kopf:"\ud835\udd5c",Kscr:"\ud835\udca6",kscr:"\ud835\udcc0",lAarr:"\u21da",Lacute:"\u0139",lacute:"\u013a",laemptyv:"\u29b4",lagran:"\u2112",Lambda:"\u039b",lambda:"\u03bb",lang:"\u27e8",Lang:"\u27ea",langd:"\u2991",langle:"\u27e8",lap:"\u2a85",Laplacetrf:"\u2112",laquo:"\xab",larrb:"\u21e4",larrbfs:"\u291f",larr:"\u2190",Larr:"\u219e",lArr:"\u21d0",larrfs:"\u291d",larrhk:"\u21a9",larrlp:"\u21ab",larrpl:"\u2939",larrsim:"\u2973",larrtl:"\u21a2",latail:"\u2919",lAtail:"\u291b",lat:"\u2aab",late:"\u2aad",lates:"\u2aad\ufe00",lbarr:"\u290c",lBarr:"\u290e",lbbrk:"\u2772",lbrace:"{",lbrack:"[",lbrke:"\u298b",lbrksld:"\u298f",lbrkslu:"\u298d",Lcaron:"\u013d",lcaron:"\u013e",Lcedil:"\u013b",lcedil:"\u013c",lceil:"\u2308",lcub:"{",Lcy:"\u041b",lcy:"\u043b",ldca:"\u2936",ldquo:"\u201c",ldquor:"\u201e",ldrdhar:"\u2967",ldrushar:"\u294b",ldsh:"\u21b2",le:"\u2264",lE:"\u2266",LeftAngleBracket:"\u27e8",LeftArrowBar:"\u21e4",leftarrow:"\u2190",LeftArrow:"\u2190",Leftarrow:"\u21d0",LeftArrowRightArrow:"\u21c6",leftarrowtail:"\u21a2",LeftCeiling:"\u2308",LeftDoubleBracket:"\u27e6",LeftDownTeeVector:"\u2961",LeftDownVectorBar:"\u2959",LeftDownVector:"\u21c3",LeftFloor:"\u230a",leftharpoondown:"\u21bd",leftharpoonup:"\u21bc",leftleftarrows:"\u21c7",leftrightarrow:"\u2194",LeftRightArrow:"\u2194",Leftrightarrow:"\u21d4",leftrightarrows:"\u21c6",leftrightharpoons:"\u21cb",leftrightsquigarrow:"\u21ad",LeftRightVector:"\u294e",LeftTeeArrow:"\u21a4",LeftTee:"\u22a3",LeftTeeVector:"\u295a",leftthreetimes:"\u22cb",LeftTriangleBar:"\u29cf",LeftTriangle:"\u22b2",LeftTriangleEqual:"\u22b4",LeftUpDownVector:"\u2951",LeftUpTeeVector:"\u2960",LeftUpVectorBar:"\u2958",LeftUpVector:"\u21bf",LeftVectorBar:"\u2952",LeftVector:"\u21bc",lEg:"\u2a8b",leg:"\u22da",leq:"\u2264",leqq:"\u2266",leqslant:"\u2a7d",lescc:"\u2aa8",les:"\u2a7d",lesdot:"\u2a7f",lesdoto:"\u2a81",lesdotor:"\u2a83",lesg:"\u22da\ufe00",lesges:"\u2a93",lessapprox:"\u2a85",lessdot:"\u22d6",lesseqgtr:"\u22da",lesseqqgtr:"\u2a8b",LessEqualGreater:"\u22da",LessFullEqual:"\u2266",LessGreater:"\u2276",lessgtr:"\u2276",LessLess:"\u2aa1",lesssim:"\u2272",LessSlantEqual:"\u2a7d",LessTilde:"\u2272",lfisht:"\u297c",lfloor:"\u230a",Lfr:"\ud835\udd0f",lfr:"\ud835\udd29",lg:"\u2276",lgE:"\u2a91",lHar:"\u2962",lhard:"\u21bd",lharu:"\u21bc",lharul:"\u296a",lhblk:"\u2584",LJcy:"\u0409",ljcy:"\u0459",llarr:"\u21c7",ll:"\u226a",Ll:"\u22d8",llcorner:"\u231e",Lleftarrow:"\u21da",llhard:"\u296b",lltri:"\u25fa",Lmidot:"\u013f",lmidot:"\u0140",lmoustache:"\u23b0",lmoust:"\u23b0",lnap:"\u2a89",lnapprox:"\u2a89",lne:"\u2a87",lnE:"\u2268",lneq:"\u2a87",lneqq:"\u2268",lnsim:"\u22e6",loang:"\u27ec",loarr:"\u21fd",lobrk:"\u27e6",longleftarrow:"\u27f5",LongLeftArrow:"\u27f5",Longleftarrow:"\u27f8",longleftrightarrow:"\u27f7",LongLeftRightArrow:"\u27f7",Longleftrightarrow:"\u27fa",longmapsto:"\u27fc",longrightarrow:"\u27f6",LongRightArrow:"\u27f6",Longrightarrow:"\u27f9",looparrowleft:"\u21ab",looparrowright:"\u21ac",lopar:"\u2985",Lopf:"\ud835\udd43",lopf:"\ud835\udd5d",loplus:"\u2a2d",lotimes:"\u2a34",lowast:"\u2217",lowbar:"_",LowerLeftArrow:"\u2199",LowerRightArrow:"\u2198",loz:"\u25ca",lozenge:"\u25ca",lozf:"\u29eb",lpar:"(",lparlt:"\u2993",lrarr:"\u21c6",lrcorner:"\u231f",lrhar:"\u21cb",lrhard:"\u296d",lrm:"\u200e",lrtri:"\u22bf",lsaquo:"\u2039",lscr:"\ud835\udcc1",Lscr:"\u2112",lsh:"\u21b0",Lsh:"\u21b0",lsim:"\u2272",lsime:"\u2a8d",lsimg:"\u2a8f",lsqb:"[",lsquo:"\u2018",lsquor:"\u201a",Lstrok:"\u0141",lstrok:"\u0142",ltcc:"\u2aa6",ltcir:"\u2a79",lt:"<",LT:"<",Lt:"\u226a",ltdot:"\u22d6",lthree:"\u22cb",ltimes:"\u22c9",ltlarr:"\u2976",ltquest:"\u2a7b",ltri:"\u25c3",ltrie:"\u22b4",ltrif:"\u25c2",ltrPar:"\u2996",lurdshar:"\u294a",luruhar:"\u2966",lvertneqq:"\u2268\ufe00",lvnE:"\u2268\ufe00",macr:"\xaf",male:"\u2642",malt:"\u2720",maltese:"\u2720",Map:"\u2905",map:"\u21a6",mapsto:"\u21a6",mapstodown:"\u21a7",mapstoleft:"\u21a4",mapstoup:"\u21a5",marker:"\u25ae",mcomma:"\u2a29",Mcy:"\u041c",mcy:"\u043c",mdash:"\u2014",mDDot:"\u223a",measuredangle:"\u2221",MediumSpace:"\u205f",Mellintrf:"\u2133",Mfr:"\ud835\udd10",mfr:"\ud835\udd2a",mho:"\u2127",micro:"\xb5",midast:"*",midcir:"\u2af0",mid:"\u2223",middot:"\xb7",minusb:"\u229f",minus:"\u2212",minusd:"\u2238",minusdu:"\u2a2a",MinusPlus:"\u2213",mlcp:"\u2adb",mldr:"\u2026",mnplus:"\u2213",models:"\u22a7",Mopf:"\ud835\udd44",mopf:"\ud835\udd5e",mp:"\u2213",mscr:"\ud835\udcc2",Mscr:"\u2133",mstpos:"\u223e",Mu:"\u039c",mu:"\u03bc",multimap:"\u22b8",mumap:"\u22b8",nabla:"\u2207",Nacute:"\u0143",nacute:"\u0144",nang:"\u2220\u20d2",nap:"\u2249",napE:"\u2a70\u0338",napid:"\u224b\u0338",napos:"\u0149",napprox:"\u2249",natural:"\u266e",naturals:"\u2115",natur:"\u266e",nbsp:"\xa0",nbump:"\u224e\u0338",nbumpe:"\u224f\u0338",ncap:"\u2a43",Ncaron:"\u0147",ncaron:"\u0148",Ncedil:"\u0145",ncedil:"\u0146",ncong:"\u2247",ncongdot:"\u2a6d\u0338",ncup:"\u2a42",Ncy:"\u041d",ncy:"\u043d",ndash:"\u2013",nearhk:"\u2924",nearr:"\u2197",neArr:"\u21d7",nearrow:"\u2197",ne:"\u2260",nedot:"\u2250\u0338",NegativeMediumSpace:"\u200b",NegativeThickSpace:"\u200b",NegativeThinSpace:"\u200b",NegativeVeryThinSpace:"\u200b",nequiv:"\u2262",nesear:"\u2928",nesim:"\u2242\u0338",NestedGreaterGreater:"\u226b",NestedLessLess:"\u226a",NewLine:"\n",nexist:"\u2204",nexists:"\u2204",Nfr:"\ud835\udd11",nfr:"\ud835\udd2b",ngE:"\u2267\u0338",nge:"\u2271",ngeq:"\u2271",ngeqq:"\u2267\u0338",ngeqslant:"\u2a7e\u0338",nges:"\u2a7e\u0338",nGg:"\u22d9\u0338",ngsim:"\u2275",nGt:"\u226b\u20d2",ngt:"\u226f",ngtr:"\u226f",nGtv:"\u226b\u0338",nharr:"\u21ae",nhArr:"\u21ce",nhpar:"\u2af2",ni:"\u220b",nis:"\u22fc",nisd:"\u22fa",niv:"\u220b",NJcy:"\u040a",njcy:"\u045a",nlarr:"\u219a",nlArr:"\u21cd",nldr:"\u2025",nlE:"\u2266\u0338",nle:"\u2270",nleftarrow:"\u219a",nLeftarrow:"\u21cd",nleftrightarrow:"\u21ae",nLeftrightarrow:"\u21ce",nleq:"\u2270",nleqq:"\u2266\u0338",nleqslant:"\u2a7d\u0338",nles:"\u2a7d\u0338",nless:"\u226e",nLl:"\u22d8\u0338",nlsim:"\u2274",nLt:"\u226a\u20d2",nlt:"\u226e",nltri:"\u22ea",nltrie:"\u22ec",nLtv:"\u226a\u0338",nmid:"\u2224",NoBreak:"\u2060",NonBreakingSpace:"\xa0",nopf:"\ud835\udd5f",Nopf:"\u2115",Not:"\u2aec",not:"\xac",NotCongruent:"\u2262",NotCupCap:"\u226d",NotDoubleVerticalBar:"\u2226",NotElement:"\u2209",NotEqual:"\u2260",NotEqualTilde:"\u2242\u0338",NotExists:"\u2204",NotGreater:"\u226f",NotGreaterEqual:"\u2271",NotGreaterFullEqual:"\u2267\u0338",NotGreaterGreater:"\u226b\u0338",NotGreaterLess:"\u2279",NotGreaterSlantEqual:"\u2a7e\u0338",NotGreaterTilde:"\u2275",NotHumpDownHump:"\u224e\u0338",NotHumpEqual:"\u224f\u0338",notin:"\u2209",notindot:"\u22f5\u0338",notinE:"\u22f9\u0338",notinva:"\u2209",notinvb:"\u22f7",notinvc:"\u22f6",NotLeftTriangleBar:"\u29cf\u0338",NotLeftTriangle:"\u22ea",NotLeftTriangleEqual:"\u22ec",NotLess:"\u226e",NotLessEqual:"\u2270",NotLessGreater:"\u2278",NotLessLess:"\u226a\u0338",NotLessSlantEqual:"\u2a7d\u0338",NotLessTilde:"\u2274",NotNestedGreaterGreater:"\u2aa2\u0338",NotNestedLessLess:"\u2aa1\u0338",notni:"\u220c",notniva:"\u220c",notnivb:"\u22fe",notnivc:"\u22fd",NotPrecedes:"\u2280",NotPrecedesEqual:"\u2aaf\u0338",NotPrecedesSlantEqual:"\u22e0",NotReverseElement:"\u220c",NotRightTriangleBar:"\u29d0\u0338",NotRightTriangle:"\u22eb",NotRightTriangleEqual:"\u22ed",NotSquareSubset:"\u228f\u0338",NotSquareSubsetEqual:"\u22e2",NotSquareSuperset:"\u2290\u0338",NotSquareSupersetEqual:"\u22e3",NotSubset:"\u2282\u20d2",NotSubsetEqual:"\u2288",NotSucceeds:"\u2281",NotSucceedsEqual:"\u2ab0\u0338",NotSucceedsSlantEqual:"\u22e1",NotSucceedsTilde:"\u227f\u0338",NotSuperset:"\u2283\u20d2",NotSupersetEqual:"\u2289",NotTilde:"\u2241",NotTildeEqual:"\u2244",NotTildeFullEqual:"\u2247",NotTildeTilde:"\u2249",NotVerticalBar:"\u2224",nparallel:"\u2226",npar:"\u2226",nparsl:"\u2afd\u20e5",npart:"\u2202\u0338",npolint:"\u2a14",npr:"\u2280",nprcue:"\u22e0",nprec:"\u2280",npreceq:"\u2aaf\u0338",npre:"\u2aaf\u0338",nrarrc:"\u2933\u0338",nrarr:"\u219b",nrArr:"\u21cf",nrarrw:"\u219d\u0338",nrightarrow:"\u219b",nRightarrow:"\u21cf",nrtri:"\u22eb",nrtrie:"\u22ed",nsc:"\u2281",nsccue:"\u22e1",nsce:"\u2ab0\u0338",Nscr:"\ud835\udca9",nscr:"\ud835\udcc3",nshortmid:"\u2224",nshortparallel:"\u2226",nsim:"\u2241",nsime:"\u2244",nsimeq:"\u2244",nsmid:"\u2224",nspar:"\u2226",nsqsube:"\u22e2",nsqsupe:"\u22e3",nsub:"\u2284",nsubE:"\u2ac5\u0338",nsube:"\u2288",nsubset:"\u2282\u20d2",nsubseteq:"\u2288",nsubseteqq:"\u2ac5\u0338",nsucc:"\u2281",nsucceq:"\u2ab0\u0338",nsup:"\u2285",nsupE:"\u2ac6\u0338",nsupe:"\u2289",nsupset:"\u2283\u20d2",nsupseteq:"\u2289",nsupseteqq:"\u2ac6\u0338",ntgl:"\u2279",Ntilde:"\xd1",ntilde:"\xf1",ntlg:"\u2278",ntriangleleft:"\u22ea",ntrianglelefteq:"\u22ec",ntriangleright:"\u22eb",ntrianglerighteq:"\u22ed",Nu:"\u039d",nu:"\u03bd",num:"#",numero:"\u2116",numsp:"\u2007",nvap:"\u224d\u20d2",nvdash:"\u22ac",nvDash:"\u22ad",nVdash:"\u22ae",nVDash:"\u22af",nvge:"\u2265\u20d2",nvgt:">\u20d2",nvHarr:"\u2904",nvinfin:"\u29de",nvlArr:"\u2902",nvle:"\u2264\u20d2",nvlt:"<\u20d2",nvltrie:"\u22b4\u20d2",nvrArr:"\u2903",nvrtrie:"\u22b5\u20d2",nvsim:"\u223c\u20d2",nwarhk:"\u2923",nwarr:"\u2196",nwArr:"\u21d6",nwarrow:"\u2196",nwnear:"\u2927",Oacute:"\xd3",oacute:"\xf3",oast:"\u229b",Ocirc:"\xd4",ocirc:"\xf4",ocir:"\u229a",Ocy:"\u041e",ocy:"\u043e",odash:"\u229d",Odblac:"\u0150",odblac:"\u0151",odiv:"\u2a38",odot:"\u2299",odsold:"\u29bc",OElig:"\u0152",oelig:"\u0153",ofcir:"\u29bf",Ofr:"\ud835\udd12",ofr:"\ud835\udd2c",ogon:"\u02db",Ograve:"\xd2",ograve:"\xf2",ogt:"\u29c1",ohbar:"\u29b5",ohm:"\u03a9",oint:"\u222e",olarr:"\u21ba",olcir:"\u29be",olcross:"\u29bb",oline:"\u203e",olt:"\u29c0",Omacr:"\u014c",omacr:"\u014d",Omega:"\u03a9",omega:"\u03c9",Omicron:"\u039f",omicron:"\u03bf",omid:"\u29b6",ominus:"\u2296",Oopf:"\ud835\udd46",oopf:"\ud835\udd60",opar:"\u29b7",OpenCurlyDoubleQuote:"\u201c",OpenCurlyQuote:"\u2018",operp:"\u29b9",oplus:"\u2295",orarr:"\u21bb",Or:"\u2a54",or:"\u2228",ord:"\u2a5d",order:"\u2134",orderof:"\u2134",ordf:"\xaa",ordm:"\xba",origof:"\u22b6",oror:"\u2a56",orslope:"\u2a57",orv:"\u2a5b",oS:"\u24c8",Oscr:"\ud835\udcaa",oscr:"\u2134",Oslash:"\xd8",oslash:"\xf8",osol:"\u2298",Otilde:"\xd5",otilde:"\xf5",otimesas:"\u2a36",Otimes:"\u2a37",otimes:"\u2297",Ouml:"\xd6",ouml:"\xf6",ovbar:"\u233d",OverBar:"\u203e",OverBrace:"\u23de",OverBracket:"\u23b4",OverParenthesis:"\u23dc",para:"\xb6",parallel:"\u2225",par:"\u2225",parsim:"\u2af3",parsl:"\u2afd",part:"\u2202",PartialD:"\u2202",Pcy:"\u041f",pcy:"\u043f",percnt:"%",period:".",permil:"\u2030",perp:"\u22a5",pertenk:"\u2031",Pfr:"\ud835\udd13",pfr:"\ud835\udd2d",Phi:"\u03a6",phi:"\u03c6",phiv:"\u03d5",phmmat:"\u2133",phone:"\u260e",Pi:"\u03a0",pi:"\u03c0",pitchfork:"\u22d4",piv:"\u03d6",planck:"\u210f",planckh:"\u210e",plankv:"\u210f",plusacir:"\u2a23",plusb:"\u229e",pluscir:"\u2a22",plus:"+",plusdo:"\u2214",plusdu:"\u2a25",pluse:"\u2a72",PlusMinus:"\xb1",plusmn:"\xb1",plussim:"\u2a26",plustwo:"\u2a27",pm:"\xb1",Poincareplane:"\u210c",pointint:"\u2a15",popf:"\ud835\udd61",Popf:"\u2119",pound:"\xa3",prap:"\u2ab7",Pr:"\u2abb",pr:"\u227a",prcue:"\u227c",precapprox:"\u2ab7",prec:"\u227a",preccurlyeq:"\u227c",Precedes:"\u227a",PrecedesEqual:"\u2aaf",PrecedesSlantEqual:"\u227c",PrecedesTilde:"\u227e",preceq:"\u2aaf",precnapprox:"\u2ab9",precneqq:"\u2ab5",precnsim:"\u22e8",pre:"\u2aaf",prE:"\u2ab3",precsim:"\u227e",prime:"\u2032",Prime:"\u2033",primes:"\u2119",prnap:"\u2ab9",prnE:"\u2ab5",prnsim:"\u22e8",prod:"\u220f",Product:"\u220f",profalar:"\u232e",profline:"\u2312",profsurf:"\u2313",prop:"\u221d",Proportional:"\u221d",Proportion:"\u2237",propto:"\u221d",prsim:"\u227e",prurel:"\u22b0",Pscr:"\ud835\udcab",pscr:"\ud835\udcc5",Psi:"\u03a8",psi:"\u03c8",puncsp:"\u2008",Qfr:"\ud835\udd14",qfr:"\ud835\udd2e",qint:"\u2a0c",qopf:"\ud835\udd62",Qopf:"\u211a",qprime:"\u2057",Qscr:"\ud835\udcac",qscr:"\ud835\udcc6",quaternions:"\u210d",quatint:"\u2a16",quest:"?",questeq:"\u225f",quot:'"',QUOT:'"',rAarr:"\u21db",race:"\u223d\u0331",Racute:"\u0154",racute:"\u0155",radic:"\u221a",raemptyv:"\u29b3",rang:"\u27e9",Rang:"\u27eb",rangd:"\u2992",range:"\u29a5",rangle:"\u27e9",raquo:"\xbb",rarrap:"\u2975",rarrb:"\u21e5",rarrbfs:"\u2920",rarrc:"\u2933",rarr:"\u2192",Rarr:"\u21a0",rArr:"\u21d2",rarrfs:"\u291e",rarrhk:"\u21aa",rarrlp:"\u21ac",rarrpl:"\u2945",rarrsim:"\u2974",Rarrtl:"\u2916",rarrtl:"\u21a3",rarrw:"\u219d",ratail:"\u291a",rAtail:"\u291c",ratio:"\u2236",rationals:"\u211a",rbarr:"\u290d",rBarr:"\u290f",RBarr:"\u2910",rbbrk:"\u2773",rbrace:"}",rbrack:"]",rbrke:"\u298c",rbrksld:"\u298e",rbrkslu:"\u2990",Rcaron:"\u0158",rcaron:"\u0159",Rcedil:"\u0156",rcedil:"\u0157",rceil:"\u2309",rcub:"}",Rcy:"\u0420",rcy:"\u0440",rdca:"\u2937",rdldhar:"\u2969",rdquo:"\u201d",rdquor:"\u201d",rdsh:"\u21b3",real:"\u211c",realine:"\u211b",realpart:"\u211c",reals:"\u211d",Re:"\u211c",rect:"\u25ad",reg:"\xae",REG:"\xae",ReverseElement:"\u220b",ReverseEquilibrium:"\u21cb",ReverseUpEquilibrium:"\u296f",rfisht:"\u297d",rfloor:"\u230b",rfr:"\ud835\udd2f",Rfr:"\u211c",rHar:"\u2964",rhard:"\u21c1",rharu:"\u21c0",rharul:"\u296c",Rho:"\u03a1",rho:"\u03c1",rhov:"\u03f1",RightAngleBracket:"\u27e9",RightArrowBar:"\u21e5",rightarrow:"\u2192",RightArrow:"\u2192",Rightarrow:"\u21d2",RightArrowLeftArrow:"\u21c4",rightarrowtail:"\u21a3",RightCeiling:"\u2309",RightDoubleBracket:"\u27e7",RightDownTeeVector:"\u295d",RightDownVectorBar:"\u2955",RightDownVector:"\u21c2",RightFloor:"\u230b",rightharpoondown:"\u21c1",rightharpoonup:"\u21c0",rightleftarrows:"\u21c4",rightleftharpoons:"\u21cc",rightrightarrows:"\u21c9",rightsquigarrow:"\u219d",RightTeeArrow:"\u21a6",RightTee:"\u22a2",RightTeeVector:"\u295b",rightthreetimes:"\u22cc",RightTriangleBar:"\u29d0",RightTriangle:"\u22b3",RightTriangleEqual:"\u22b5",RightUpDownVector:"\u294f",RightUpTeeVector:"\u295c",RightUpVectorBar:"\u2954",RightUpVector:"\u21be",RightVectorBar:"\u2953",RightVector:"\u21c0",ring:"\u02da",risingdotseq:"\u2253",rlarr:"\u21c4",rlhar:"\u21cc",rlm:"\u200f",rmoustache:"\u23b1",rmoust:"\u23b1",rnmid:"\u2aee",roang:"\u27ed",roarr:"\u21fe",robrk:"\u27e7",ropar:"\u2986",ropf:"\ud835\udd63",Ropf:"\u211d",roplus:"\u2a2e",rotimes:"\u2a35",RoundImplies:"\u2970",rpar:")",rpargt:"\u2994",rppolint:"\u2a12",rrarr:"\u21c9",Rrightarrow:"\u21db",rsaquo:"\u203a",rscr:"\ud835\udcc7",Rscr:"\u211b",rsh:"\u21b1",Rsh:"\u21b1",rsqb:"]",rsquo:"\u2019",rsquor:"\u2019",rthree:"\u22cc",rtimes:"\u22ca",rtri:"\u25b9",rtrie:"\u22b5",rtrif:"\u25b8",rtriltri:"\u29ce",RuleDelayed:"\u29f4",ruluhar:"\u2968",rx:"\u211e",Sacute:"\u015a",sacute:"\u015b",sbquo:"\u201a",scap:"\u2ab8",Scaron:"\u0160",scaron:"\u0161",Sc:"\u2abc",sc:"\u227b",sccue:"\u227d",sce:"\u2ab0",scE:"\u2ab4",Scedil:"\u015e",scedil:"\u015f",Scirc:"\u015c",scirc:"\u015d",scnap:"\u2aba",scnE:"\u2ab6",scnsim:"\u22e9",scpolint:"\u2a13",scsim:"\u227f",Scy:"\u0421",scy:"\u0441",sdotb:"\u22a1",sdot:"\u22c5",sdote:"\u2a66",searhk:"\u2925",searr:"\u2198",seArr:"\u21d8",searrow:"\u2198",sect:"\xa7",semi:";",seswar:"\u2929",setminus:"\u2216",setmn:"\u2216",sext:"\u2736",Sfr:"\ud835\udd16",sfr:"\ud835\udd30",sfrown:"\u2322",sharp:"\u266f",SHCHcy:"\u0429",shchcy:"\u0449",SHcy:"\u0428",shcy:"\u0448",ShortDownArrow:"\u2193",ShortLeftArrow:"\u2190",shortmid:"\u2223",shortparallel:"\u2225",ShortRightArrow:"\u2192",ShortUpArrow:"\u2191",shy:"\xad",Sigma:"\u03a3",sigma:"\u03c3",sigmaf:"\u03c2",sigmav:"\u03c2",sim:"\u223c",simdot:"\u2a6a",sime:"\u2243",simeq:"\u2243",simg:"\u2a9e",simgE:"\u2aa0",siml:"\u2a9d",simlE:"\u2a9f",simne:"\u2246",simplus:"\u2a24",simrarr:"\u2972",slarr:"\u2190",SmallCircle:"\u2218",smallsetminus:"\u2216",smashp:"\u2a33",smeparsl:"\u29e4",smid:"\u2223",smile:"\u2323",smt:"\u2aaa",smte:"\u2aac",smtes:"\u2aac\ufe00",SOFTcy:"\u042c",softcy:"\u044c",solbar:"\u233f",solb:"\u29c4",sol:"/",Sopf:"\ud835\udd4a",sopf:"\ud835\udd64",spades:"\u2660",spadesuit:"\u2660",spar:"\u2225",sqcap:"\u2293",sqcaps:"\u2293\ufe00",sqcup:"\u2294",sqcups:"\u2294\ufe00",Sqrt:"\u221a",sqsub:"\u228f",sqsube:"\u2291",sqsubset:"\u228f",sqsubseteq:"\u2291",sqsup:"\u2290",sqsupe:"\u2292",sqsupset:"\u2290",sqsupseteq:"\u2292",square:"\u25a1",Square:"\u25a1",SquareIntersection:"\u2293",SquareSubset:"\u228f",SquareSubsetEqual:"\u2291",SquareSuperset:"\u2290",SquareSupersetEqual:"\u2292",SquareUnion:"\u2294",squarf:"\u25aa",squ:"\u25a1",squf:"\u25aa",srarr:"\u2192",Sscr:"\ud835\udcae",sscr:"\ud835\udcc8",ssetmn:"\u2216",ssmile:"\u2323",sstarf:"\u22c6",Star:"\u22c6",star:"\u2606",starf:"\u2605",straightepsilon:"\u03f5",straightphi:"\u03d5",strns:"\xaf",sub:"\u2282",Sub:"\u22d0",subdot:"\u2abd",subE:"\u2ac5",sube:"\u2286",subedot:"\u2ac3",submult:"\u2ac1",subnE:"\u2acb",subne:"\u228a",subplus:"\u2abf",subrarr:"\u2979",subset:"\u2282",Subset:"\u22d0",subseteq:"\u2286",subseteqq:"\u2ac5",SubsetEqual:"\u2286",subsetneq:"\u228a",subsetneqq:"\u2acb",subsim:"\u2ac7",subsub:"\u2ad5",subsup:"\u2ad3",succapprox:"\u2ab8",succ:"\u227b",succcurlyeq:"\u227d",Succeeds:"\u227b",SucceedsEqual:"\u2ab0",SucceedsSlantEqual:"\u227d",SucceedsTilde:"\u227f",succeq:"\u2ab0",succnapprox:"\u2aba",succneqq:"\u2ab6",succnsim:"\u22e9",succsim:"\u227f",SuchThat:"\u220b",sum:"\u2211",Sum:"\u2211",sung:"\u266a",sup1:"\xb9",sup2:"\xb2",sup3:"\xb3",sup:"\u2283",Sup:"\u22d1",supdot:"\u2abe",supdsub:"\u2ad8",supE:"\u2ac6",supe:"\u2287",supedot:"\u2ac4",Superset:"\u2283",SupersetEqual:"\u2287",suphsol:"\u27c9",suphsub:"\u2ad7",suplarr:"\u297b",supmult:"\u2ac2",supnE:"\u2acc",supne:"\u228b",supplus:"\u2ac0",supset:"\u2283",Supset:"\u22d1",supseteq:"\u2287",supseteqq:"\u2ac6",supsetneq:"\u228b",supsetneqq:"\u2acc",supsim:"\u2ac8",supsub:"\u2ad4",supsup:"\u2ad6",swarhk:"\u2926",swarr:"\u2199",swArr:"\u21d9",swarrow:"\u2199",swnwar:"\u292a",szlig:"\xdf",Tab:"\t",target:"\u2316",Tau:"\u03a4",tau:"\u03c4",tbrk:"\u23b4",Tcaron:"\u0164",tcaron:"\u0165",Tcedil:"\u0162",tcedil:"\u0163",Tcy:"\u0422",tcy:"\u0442",tdot:"\u20db",telrec:"\u2315",Tfr:"\ud835\udd17",tfr:"\ud835\udd31",there4:"\u2234",therefore:"\u2234",Therefore:"\u2234",Theta:"\u0398",theta:"\u03b8",thetasym:"\u03d1",thetav:"\u03d1",thickapprox:"\u2248",thicksim:"\u223c",ThickSpace:"\u205f\u200a",ThinSpace:"\u2009",thinsp:"\u2009",thkap:"\u2248",thksim:"\u223c",THORN:"\xde",thorn:"\xfe",tilde:"\u02dc",Tilde:"\u223c",TildeEqual:"\u2243",TildeFullEqual:"\u2245",TildeTilde:"\u2248",timesbar:"\u2a31",timesb:"\u22a0",times:"\xd7",timesd:"\u2a30",tint:"\u222d",toea:"\u2928",topbot:"\u2336",topcir:"\u2af1",top:"\u22a4",Topf:"\ud835\udd4b",topf:"\ud835\udd65",topfork:"\u2ada",tosa:"\u2929",tprime:"\u2034",trade:"\u2122",TRADE:"\u2122",triangle:"\u25b5",triangledown:"\u25bf",triangleleft:"\u25c3",trianglelefteq:"\u22b4",triangleq:"\u225c",triangleright:"\u25b9",trianglerighteq:"\u22b5",tridot:"\u25ec",trie:"\u225c",triminus:"\u2a3a",TripleDot:"\u20db",triplus:"\u2a39",trisb:"\u29cd",tritime:"\u2a3b",trpezium:"\u23e2",Tscr:"\ud835\udcaf",tscr:"\ud835\udcc9",TScy:"\u0426",tscy:"\u0446",TSHcy:"\u040b",tshcy:"\u045b",Tstrok:"\u0166",tstrok:"\u0167",twixt:"\u226c",twoheadleftarrow:"\u219e",twoheadrightarrow:"\u21a0",Uacute:"\xda",uacute:"\xfa",uarr:"\u2191",Uarr:"\u219f",uArr:"\u21d1",Uarrocir:"\u2949",Ubrcy:"\u040e",ubrcy:"\u045e",Ubreve:"\u016c",ubreve:"\u016d",Ucirc:"\xdb",ucirc:"\xfb",Ucy:"\u0423",ucy:"\u0443",udarr:"\u21c5",Udblac:"\u0170",udblac:"\u0171",udhar:"\u296e",ufisht:"\u297e",Ufr:"\ud835\udd18",ufr:"\ud835\udd32",Ugrave:"\xd9",ugrave:"\xf9",uHar:"\u2963",uharl:"\u21bf",uharr:"\u21be",uhblk:"\u2580",ulcorn:"\u231c",ulcorner:"\u231c",ulcrop:"\u230f",ultri:"\u25f8",Umacr:"\u016a",umacr:"\u016b",uml:"\xa8",UnderBar:"_",UnderBrace:"\u23df",UnderBracket:"\u23b5",UnderParenthesis:"\u23dd",Union:"\u22c3",UnionPlus:"\u228e",Uogon:"\u0172",uogon:"\u0173",Uopf:"\ud835\udd4c",uopf:"\ud835\udd66",UpArrowBar:"\u2912",uparrow:"\u2191",UpArrow:"\u2191",Uparrow:"\u21d1",UpArrowDownArrow:"\u21c5",updownarrow:"\u2195",UpDownArrow:"\u2195",Updownarrow:"\u21d5",UpEquilibrium:"\u296e",upharpoonleft:"\u21bf",upharpoonright:"\u21be",uplus:"\u228e",UpperLeftArrow:"\u2196",UpperRightArrow:"\u2197",upsi:"\u03c5",Upsi:"\u03d2",upsih:"\u03d2",Upsilon:"\u03a5",upsilon:"\u03c5",UpTeeArrow:"\u21a5",UpTee:"\u22a5",upuparrows:"\u21c8",urcorn:"\u231d",urcorner:"\u231d",urcrop:"\u230e",Uring:"\u016e",uring:"\u016f",urtri:"\u25f9",Uscr:"\ud835\udcb0",uscr:"\ud835\udcca",utdot:"\u22f0",Utilde:"\u0168",utilde:"\u0169",utri:"\u25b5",utrif:"\u25b4",uuarr:"\u21c8",Uuml:"\xdc",uuml:"\xfc",uwangle:"\u29a7",vangrt:"\u299c",varepsilon:"\u03f5",varkappa:"\u03f0",varnothing:"\u2205",varphi:"\u03d5",varpi:"\u03d6",varpropto:"\u221d",varr:"\u2195",vArr:"\u21d5",varrho:"\u03f1",varsigma:"\u03c2",varsubsetneq:"\u228a\ufe00",varsubsetneqq:"\u2acb\ufe00",varsupsetneq:"\u228b\ufe00",varsupsetneqq:"\u2acc\ufe00",vartheta:"\u03d1",vartriangleleft:"\u22b2",vartriangleright:"\u22b3",vBar:"\u2ae8",Vbar:"\u2aeb",vBarv:"\u2ae9",Vcy:"\u0412",vcy:"\u0432",vdash:"\u22a2",vDash:"\u22a8",Vdash:"\u22a9",VDash:"\u22ab",Vdashl:"\u2ae6",veebar:"\u22bb",vee:"\u2228",Vee:"\u22c1",veeeq:"\u225a",vellip:"\u22ee",verbar:"|",Verbar:"\u2016",vert:"|",Vert:"\u2016",VerticalBar:"\u2223",VerticalLine:"|",VerticalSeparator:"\u2758",VerticalTilde:"\u2240",VeryThinSpace:"\u200a",Vfr:"\ud835\udd19",vfr:"\ud835\udd33",vltri:"\u22b2",vnsub:"\u2282\u20d2",vnsup:"\u2283\u20d2",Vopf:"\ud835\udd4d",vopf:"\ud835\udd67",vprop:"\u221d",vrtri:"\u22b3",Vscr:"\ud835\udcb1",vscr:"\ud835\udccb",vsubnE:"\u2acb\ufe00",vsubne:"\u228a\ufe00",vsupnE:"\u2acc\ufe00",vsupne:"\u228b\ufe00",Vvdash:"\u22aa",vzigzag:"\u299a",Wcirc:"\u0174",wcirc:"\u0175",wedbar:"\u2a5f",wedge:"\u2227",Wedge:"\u22c0",wedgeq:"\u2259",weierp:"\u2118",Wfr:"\ud835\udd1a",wfr:"\ud835\udd34",Wopf:"\ud835\udd4e",wopf:"\ud835\udd68",wp:"\u2118",wr:"\u2240",wreath:"\u2240",Wscr:"\ud835\udcb2",wscr:"\ud835\udccc",xcap:"\u22c2",xcirc:"\u25ef",xcup:"\u22c3",xdtri:"\u25bd",Xfr:"\ud835\udd1b",xfr:"\ud835\udd35",xharr:"\u27f7",xhArr:"\u27fa",Xi:"\u039e",xi:"\u03be",xlarr:"\u27f5",xlArr:"\u27f8",xmap:"\u27fc",xnis:"\u22fb",xodot:"\u2a00",Xopf:"\ud835\udd4f",xopf:"\ud835\udd69",xoplus:"\u2a01",xotime:"\u2a02",xrarr:"\u27f6",xrArr:"\u27f9",Xscr:"\ud835\udcb3",xscr:"\ud835\udccd",xsqcup:"\u2a06",xuplus:"\u2a04",xutri:"\u25b3",xvee:"\u22c1",xwedge:"\u22c0",Yacute:"\xdd",yacute:"\xfd",YAcy:"\u042f",yacy:"\u044f",Ycirc:"\u0176",ycirc:"\u0177",Ycy:"\u042b",ycy:"\u044b",yen:"\xa5",Yfr:"\ud835\udd1c",yfr:"\ud835\udd36",YIcy:"\u0407",yicy:"\u0457",Yopf:"\ud835\udd50",yopf:"\ud835\udd6a",Yscr:"\ud835\udcb4",yscr:"\ud835\udcce",YUcy:"\u042e",yucy:"\u044e",yuml:"\xff",Yuml:"\u0178",Zacute:"\u0179",zacute:"\u017a",Zcaron:"\u017d",zcaron:"\u017e",Zcy:"\u0417",zcy:"\u0437",Zdot:"\u017b",zdot:"\u017c",zeetrf:"\u2128",ZeroWidthSpace:"\u200b",Zeta:"\u0396",zeta:"\u03b6",zfr:"\ud835\udd37",Zfr:"\u2128",ZHcy:"\u0416",zhcy:"\u0436",zigrarr:"\u21dd",zopf:"\ud835\udd6b",Zopf:"\u2124",Zscr:"\ud835\udcb5",zscr:"\ud835\udccf",zwj:"\u200d",zwnj:"\u200c"},t=/[!-#%-\*,-\/:;\?@\[-\]_\{\}\xA1\xA7\xAB\xB6\xB7\xBB\xBF\u037E\u0387\u055A-\u055F\u0589\u058A\u05BE\u05C0\u05C3\u05C6\u05F3\u05F4\u0609\u060A\u060C\u060D\u061B\u061E\u061F\u066A-\u066D\u06D4\u0700-\u070D\u07F7-\u07F9\u0830-\u083E\u085E\u0964\u0965\u0970\u09FD\u0A76\u0AF0\u0C84\u0DF4\u0E4F\u0E5A\u0E5B\u0F04-\u0F12\u0F14\u0F3A-\u0F3D\u0F85\u0FD0-\u0FD4\u0FD9\u0FDA\u104A-\u104F\u10FB\u1360-\u1368\u1400\u166D\u166E\u169B\u169C\u16EB-\u16ED\u1735\u1736\u17D4-\u17D6\u17D8-\u17DA\u1800-\u180A\u1944\u1945\u1A1E\u1A1F\u1AA0-\u1AA6\u1AA8-\u1AAD\u1B5A-\u1B60\u1BFC-\u1BFF\u1C3B-\u1C3F\u1C7E\u1C7F\u1CC0-\u1CC7\u1CD3\u2010-\u2027\u2030-\u2043\u2045-\u2051\u2053-\u205E\u207D\u207E\u208D\u208E\u2308-\u230B\u2329\u232A\u2768-\u2775\u27C5\u27C6\u27E6-\u27EF\u2983-\u2998\u29D8-\u29DB\u29FC\u29FD\u2CF9-\u2CFC\u2CFE\u2CFF\u2D70\u2E00-\u2E2E\u2E30-\u2E4E\u3001-\u3003\u3008-\u3011\u3014-\u301F\u3030\u303D\u30A0\u30FB\uA4FE\uA4FF\uA60D-\uA60F\uA673\uA67E\uA6F2-\uA6F7\uA874-\uA877\uA8CE\uA8CF\uA8F8-\uA8FA\uA8FC\uA92E\uA92F\uA95F\uA9C1-\uA9CD\uA9DE\uA9DF\uAA5C-\uAA5F\uAADE\uAADF\uAAF0\uAAF1\uABEB\uFD3E\uFD3F\uFE10-\uFE19\uFE30-\uFE52\uFE54-\uFE61\uFE63\uFE68\uFE6A\uFE6B\uFF01-\uFF03\uFF05-\uFF0A\uFF0C-\uFF0F\uFF1A\uFF1B\uFF1F\uFF20\uFF3B-\uFF3D\uFF3F\uFF5B\uFF5D\uFF5F-\uFF65]|\uD800[\uDD00-\uDD02\uDF9F\uDFD0]|\uD801\uDD6F|\uD802[\uDC57\uDD1F\uDD3F\uDE50-\uDE58\uDE7F\uDEF0-\uDEF6\uDF39-\uDF3F\uDF99-\uDF9C]|\uD803[\uDF55-\uDF59]|\uD804[\uDC47-\uDC4D\uDCBB\uDCBC\uDCBE-\uDCC1\uDD40-\uDD43\uDD74\uDD75\uDDC5-\uDDC8\uDDCD\uDDDB\uDDDD-\uDDDF\uDE38-\uDE3D\uDEA9]|\uD805[\uDC4B-\uDC4F\uDC5B\uDC5D\uDCC6\uDDC1-\uDDD7\uDE41-\uDE43\uDE60-\uDE6C\uDF3C-\uDF3E]|\uD806[\uDC3B\uDE3F-\uDE46\uDE9A-\uDE9C\uDE9E-\uDEA2]|\uD807[\uDC41-\uDC45\uDC70\uDC71\uDEF7\uDEF8]|\uD809[\uDC70-\uDC74]|\uD81A[\uDE6E\uDE6F\uDEF5\uDF37-\uDF3B\uDF44]|\uD81B[\uDE97-\uDE9A]|\uD82F\uDC9F|\uD836[\uDE87-\uDE8B]|\uD83A[\uDD5E\uDD5F]/,n={};function s(e,r,t){var o,i,a,c,l,u="";for("string"!=typeof r&&(t=r,r=s.defaultChars),void 0===t&&(t=!0),l=function(e){var r,t,s=n[e];if(s)return s;for(s=n[e]=[],r=0;r<128;r++)t=String.fromCharCode(r),/^[0-9a-z]$/i.test(t)?s.push(t):s.push("%"+("0"+r.toString(16).toUpperCase()).slice(-2));for(r=0;r=55296&&a<=57343){if(a>=55296&&a<=56319&&o+1=56320&&c<=57343){u+=encodeURIComponent(e[o]+e[o+1]),o++;continue}u+="%EF%BF%BD"}else u+=encodeURIComponent(e[o]);return u}s.defaultChars=";/?:@&=+$,-_.!~*'()#",s.componentChars="-_.!~*'()";var o=s,i={};function a(e,r){var t;return"string"!=typeof r&&(r=a.defaultChars),t=function(e){var r,t,n=i[e];if(n)return n;for(n=i[e]=[],r=0;r<128;r++)t=String.fromCharCode(r),n.push(t);for(r=0;r=55296&&c<=57343?"\ufffd\ufffd\ufffd":String.fromCharCode(c),r+=6):240==(248&s)&&r+91114111?l+="\ufffd\ufffd\ufffd\ufffd":(c-=65536,l+=String.fromCharCode(55296+(c>>10),56320+(1023&c))),r+=9):l+="\ufffd";return l}))}a.defaultChars=";/?:@&=+$,#",a.componentChars="";var c=a;function l(){this.protocol=null,this.slashes=null,this.auth=null,this.port=null,this.hostname=null,this.hash=null,this.search=null,this.pathname=null}var u=/^([a-z0-9.+-]+:)/i,p=/:[0-9]*$/,h=/^(\/\/?(?!\/)[^\?\s]*)(\?[^\s]*)?$/,f=["{","}","|","\\","^","`"].concat(["<",">",'"',"`"," ","\r","\n","\t"]),d=["'"].concat(f),m=["%","/","?",";","#"].concat(d),g=["/","?","#"],_=/^[+a-z0-9A-Z_-]{0,63}$/,k=/^([+a-z0-9A-Z_-]{0,63})(.*)$/,b={javascript:!0,"javascript:":!0},v={http:!0,https:!0,ftp:!0,gopher:!0,file:!0,"http:":!0,"https:":!0,"ftp:":!0,"gopher:":!0,"file:":!0};l.prototype.parse=function(e,r){var t,n,s,o,i,a=e;if(a=a.trim(),!r&&1===e.split("#").length){var c=h.exec(a);if(c)return this.pathname=c[1],c[2]&&(this.search=c[2]),this}var l=u.exec(a);if(l&&(s=(l=l[0]).toLowerCase(),this.protocol=l,a=a.substr(l.length)),(r||l||a.match(/^\/\/[^@\/]+@[^@\/]+/))&&(!(i="//"===a.substr(0,2))||l&&b[l]||(a=a.substr(2),this.slashes=!0)),!b[l]&&(i||l&&!v[l])){var p,f,d=-1;for(t=0;t127?D+="x":D+=x[w];if(!D.match(_)){var q=A.slice(0,t),S=A.slice(t+1),F=x.match(k);F&&(q.push(F[1]),S.unshift(F[2])),S.length&&(a=S.join(".")+a),this.hostname=q.join(".");break}}}}this.hostname.length>255&&(this.hostname=""),y&&(this.hostname=this.hostname.substr(1,this.hostname.length-2))}var L=a.indexOf("#");-1!==L&&(this.hash=a.substr(L),a=a.slice(0,L));var z=a.indexOf("?");return-1!==z&&(this.search=a.substr(z),a=a.slice(0,z)),a&&(this.pathname=a),v[s]&&this.hostname&&!this.pathname&&(this.pathname=""),this},l.prototype.parseHost=function(e){var r=p.exec(e);r&&(":"!==(r=r[0])&&(this.port=r.substr(1)),e=e.substr(0,e.length-r.length)),e&&(this.hostname=e)};var C={encode:o,decode:c,format:function(e){var r="";return r+=e.protocol||"",r+=e.slashes?"//":"",r+=e.auth?e.auth+"@":"",e.hostname&&-1!==e.hostname.indexOf(":")?r+="["+e.hostname+"]":r+=e.hostname||"",r+=e.port?":"+e.port:"",r+=e.pathname||"",r+=e.search||"",r+=e.hash||""},parse:function(e,r){if(e&&e instanceof l)return e;var t=new l;return t.parse(e,r),t}},y=/[\0-\uD7FF\uE000-\uFFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]/,A=/[\0-\x1F\x7F-\x9F]/,x=/[ \xA0\u1680\u2000-\u200A\u2028\u2029\u202F\u205F\u3000]/,D={Any:y,Cc:A,Cf:/[\xAD\u0600-\u0605\u061C\u06DD\u070F\u08E2\u180E\u200B-\u200F\u202A-\u202E\u2060-\u2064\u2066-\u206F\uFEFF\uFFF9-\uFFFB]|\uD804[\uDCBD\uDCCD]|\uD82F[\uDCA0-\uDCA3]|\uD834[\uDD73-\uDD7A]|\uDB40[\uDC01\uDC20-\uDC7F]/,P:t,Z:x},w=function(e,r,t){return t={path:r,exports:{},require:function(e,r){return function(){throw new Error("Dynamic requires are not currently supported by @rollup/plugin-commonjs")}(null==r&&t.path)}},e(t,t.exports),t.exports}((function(e,n){var s=Object.prototype.hasOwnProperty;function o(e,r){return s.call(e,r)}function i(e){return!(e>=55296&&e<=57343)&&(!(e>=64976&&e<=65007)&&(65535!=(65535&e)&&65534!=(65535&e)&&(!(e>=0&&e<=8)&&(11!==e&&(!(e>=14&&e<=31)&&(!(e>=127&&e<=159)&&!(e>1114111)))))))}function a(e){if(e>65535){var r=55296+((e-=65536)>>10),t=56320+(1023&e);return String.fromCharCode(r,t)}return String.fromCharCode(e)}var c=/\\([!"#$%&'()*+,\-.\/:;<=>?@[\\\]^_`{|}~])/g,l=new RegExp(c.source+"|"+/&([a-z#][a-z0-9]{1,31});/gi.source,"gi"),u=/^#((?:x[a-f0-9]{1,8}|[0-9]{1,8}))/i;var p=/[&<>"]/,h=/[&<>"]/g,f={"&":"&","<":"<",">":">",'"':"""};function d(e){return f[e]}var m=/[.?*+^$[\]\\(){}|-]/g;n.lib={},n.lib.mdurl=C,n.lib.ucmicro=D,n.assign=function(e){var r=Array.prototype.slice.call(arguments,1);return r.forEach((function(r){if(r){if("object"!=typeof r)throw new TypeError(r+"must be object");Object.keys(r).forEach((function(t){e[t]=r[t]}))}})),e},n.isString=function(e){return"[object String]"===function(e){return Object.prototype.toString.call(e)}(e)},n.has=o,n.unescapeMd=function(e){return e.indexOf("\\")<0?e:e.replace(c,"$1")},n.unescapeAll=function(e){return e.indexOf("\\")<0&&e.indexOf("&")<0?e:e.replace(l,(function(e,t,n){return t||function(e,t){var n=0;return o(r,t)?r[t]:35===t.charCodeAt(0)&&u.test(t)&&i(n="x"===t[1].toLowerCase()?parseInt(t.slice(2),16):parseInt(t.slice(1),10))?a(n):e}(e,n)}))},n.isValidEntityCode=i,n.fromCodePoint=a,n.escapeHtml=function(e){return p.test(e)?e.replace(h,d):e},n.arrayReplaceAt=function(e,r,t){return[].concat(e.slice(0,r),t,e.slice(r+1))},n.isSpace=function(e){switch(e){case 9:case 32:return!0}return!1},n.isWhiteSpace=function(e){if(e>=8192&&e<=8202)return!0;switch(e){case 9:case 10:case 11:case 12:case 13:case 32:case 160:case 5760:case 8239:case 8287:case 12288:return!0}return!1},n.isMdAsciiPunct=function(e){switch(e){case 33:case 34:case 35:case 36:case 37:case 38:case 39:case 40:case 41:case 42:case 43:case 44:case 45:case 46:case 47:case 58:case 59:case 60:case 61:case 62:case 63:case 64:case 91:case 92:case 93:case 94:case 95:case 96:case 123:case 124:case 125:case 126:return!0;default:return!1}},n.isPunctChar=function(e){return t.test(e)},n.escapeRE=function(e){return e.replace(m,"\\$&")},n.normalizeReference=function(e){return e=e.trim().replace(/\s+/g," "),"\u1e7e"==="\u1e9e".toLowerCase()&&(e=e.replace(/\u1e9e/g,"\xdf")),e.toLowerCase().toUpperCase()}})),E=w.unescapeAll,q=w.unescapeAll,S=function(e,r,t){var n,s,o=r,i={ok:!1,pos:0,lines:0,str:""};if(60===e.charCodeAt(r)){for(r++;r32)return i;if(41===n){if(0===s)break;s--}r++}return o===r||0!==s||(i.str=E(e.slice(o,r)),i.lines=0,i.pos=r,i.ok=!0),i},F=function(e,r,t){var n,s,o=0,i=r,a={ok:!1,pos:0,lines:0,str:""};if(r>=t)return a;if(34!==(s=e.charCodeAt(r))&&39!==s&&40!==s)return a;for(r++,40===s&&(s=41);r"+I(e[r].content)+""},M.code_block=function(e,r,t,n,s){var o=e[r];return""+I(e[r].content)+"\n"},M.fence=function(e,r,t,n,s){var o,i,a,c,l,u=e[r],p=u.info?T(u.info).trim():"",h="",f="";return p&&(h=(a=p.split(/(\s+)/g))[0],f=a.slice(2).join("")),0===(o=t.highlight&&t.highlight(u.content,h,f)||I(u.content)).indexOf(""+o+"\n"):"
    "+o+"
    \n"},M.image=function(e,r,t,n,s){var o=e[r];return o.attrs[o.attrIndex("alt")][1]=s.renderInlineAsText(o.children,t,n),s.renderToken(e,r,t)},M.hardbreak=function(e,r,t){return t.xhtmlOut?"
    \n":"
    \n"},M.softbreak=function(e,r,t){return t.breaks?t.xhtmlOut?"
    \n":"
    \n":"\n"},M.text=function(e,r){return I(e[r].content)},M.html_block=function(e,r){return e[r].content},M.html_inline=function(e,r){return e[r].content},R.prototype.renderAttrs=function(e){var r,t,n;if(!e.attrs)return"";for(n="",r=0,t=e.attrs.length;r\n":">")},R.prototype.renderInline=function(e,r,t){for(var n,s="",o=this.rules,i=0,a=e.length;i/i.test(e)}var Z=/\+-|\.\.|\?\?\?\?|!!!!|,,|--/,$=/\((c|tm|r)\)/i,G=/\((c|tm|r)\)/gi,H={c:"\xa9",r:"\xae",tm:"\u2122"};function J(e,r){return H[r.toLowerCase()]}function W(e){var r,t,n=0;for(r=e.length-1;r>=0;r--)"text"!==(t=e[r]).type||n||(t.content=t.content.replace(G,J)),"link_open"===t.type&&"auto"===t.info&&n--,"link_close"===t.type&&"auto"===t.info&&n++}function Y(e){var r,t,n=0;for(r=e.length-1;r>=0;r--)"text"!==(t=e[r]).type||n||Z.test(t.content)&&(t.content=t.content.replace(/\+-/g,"\xb1").replace(/\.{2,}/g,"\u2026").replace(/([?!])\u2026/g,"$1..").replace(/([?!]){4,}/g,"$1$1$1").replace(/,{2,}/g,",").replace(/(^|[^-])---(?=[^-]|$)/gm,"$1\u2014").replace(/(^|\s)--(?=\s|$)/gm,"$1\u2013").replace(/(^|[^-\s])--(?=[^-\s]|$)/gm,"$1\u2013")),"link_open"===t.type&&"auto"===t.info&&n--,"link_close"===t.type&&"auto"===t.info&&n++}var K=w.isWhiteSpace,Q=w.isPunctChar,X=w.isMdAsciiPunct,ee=/['"]/,re=/['"]/g;function te(e,r,t){return e.slice(0,r)+t+e.slice(r+1)}function ne(e,r){var t,n,s,o,i,a,c,l,u,p,h,f,d,m,g,_,k,b,v,C,y;for(v=[],t=0;t=0&&!(v[k].level<=c);k--);if(v.length=k+1,"text"===n.type){i=0,a=(s=n.content).length;e:for(;i=0)u=s.charCodeAt(o.index-1);else for(k=t-1;k>=0&&("softbreak"!==e[k].type&&"hardbreak"!==e[k].type);k--)if(e[k].content){u=e[k].content.charCodeAt(e[k].content.length-1);break}if(p=32,i=48&&u<=57&&(_=g=!1),g&&_&&(g=h,_=f),g||_){if(_)for(k=v.length-1;k>=0&&(l=v[k],!(v[k].level=0&&(t=this.attrs[r][1]),t},se.prototype.attrJoin=function(e,r){var t=this.attrIndex(e);t<0?this.attrPush([e,r]):this.attrs[t][1]=this.attrs[t][1]+" "+r};var oe=se;function ie(e,r,t){this.src=e,this.env=t,this.tokens=[],this.inlineMode=!1,this.md=r}ie.prototype.Token=oe;var ae=ie,ce=[["normalize",function(e){var r;r=(r=e.src.replace(P,"\n")).replace(j,"\ufffd"),e.src=r}],["block",function(e){var r;e.inlineMode?((r=new e.Token("inline","",0)).content=e.src,r.map=[0,1],r.children=[],e.tokens.push(r)):e.md.block.parse(e.src,e.md,e.env,e.tokens)}],["inline",function(e){var r,t,n,s=e.tokens;for(t=0,n=s.length;t=0;r--)if("link_close"!==(i=s[r]).type){if("html_inline"===i.type&&(k=i.content,/^\s]/i.test(k)&&f>0&&f--,V(i.content)&&f++),!(f>0)&&"text"===i.type&&e.md.linkify.test(i.content)){for(l=i.content,_=e.md.linkify.match(l),a=[],h=i.level,p=0,_.length>0&&0===_[0].index&&r>0&&"text_special"===s[r-1].type&&(_=_.slice(1)),c=0;c<_.length;c++)d=_[c].url,m=e.md.normalizeLink(d),e.md.validateLink(m)&&(g=_[c].text,g=_[c].schema?"mailto:"!==_[c].schema||/^mailto:/i.test(g)?e.md.normalizeLinkText(g):e.md.normalizeLinkText("mailto:"+g).replace(/^mailto:/,""):e.md.normalizeLinkText("http://"+g).replace(/^http:\/\//,""),(u=_[c].index)>p&&((o=new e.Token("text","",0)).content=l.slice(p,u),o.level=h,a.push(o)),(o=new e.Token("link_open","a",1)).attrs=[["href",m]],o.level=h++,o.markup="linkify",o.info="auto",a.push(o),(o=new e.Token("text","",0)).content=g,o.level=h,a.push(o),(o=new e.Token("link_close","a",-1)).level=--h,o.markup="linkify",o.info="auto",a.push(o),p=_[c].lastIndex);p=0;r--)"inline"===e.tokens[r].type&&($.test(e.tokens[r].content)&&W(e.tokens[r].children),Z.test(e.tokens[r].content)&&Y(e.tokens[r].children))}],["smartquotes",function(e){var r;if(e.md.options.typographer)for(r=e.tokens.length-1;r>=0;r--)"inline"===e.tokens[r].type&&ee.test(e.tokens[r].content)&&ne(e.tokens[r].children,e)}],["text_join",function(e){var r,t,n,s,o,i,a=e.tokens;for(r=0,t=a.length;r=o)return-1;if((t=e.src.charCodeAt(s++))<48||t>57)return-1;for(;;){if(s>=o)return-1;if(!((t=e.src.charCodeAt(s++))>=48&&t<=57)){if(41===t||46===t)break;return-1}if(s-n>=10)return-1}return s`\\x00-\\x20]+|'[^']*'|\"[^\"]*\"))?)*\\s*\\/?>",ye="<\\/[A-Za-z][A-Za-z0-9\\-]*\\s*>",Ae={HTML_TAG_RE:new RegExp("^(?:"+Ce+"|"+ye+"|\x3c!----\x3e|\x3c!--(?:-?[^>-])(?:-?[^-])*--\x3e|<[?][\\s\\S]*?[?]>|]*>|)"),HTML_OPEN_CLOSE_TAG_RE:new RegExp("^(?:"+Ce+"|"+ye+")")},xe=Ae.HTML_OPEN_CLOSE_TAG_RE,De=[[/^<(script|pre|style|textarea)(?=(\s|>|$))/i,/<\/(script|pre|style|textarea)>/i,!0],[/^/,!0],[/^<\?/,/\?>/,!0],[/^/,!0],[/^/,!0],[new RegExp("^|$))","i"),/^$/,!0],[new RegExp(xe.source+"\\s*$"),/^$/,!1]],we=w.isSpace,Ee=w.isSpace;function qe(e,r,t,n){var s,o,i,a,c,l,u,p;for(this.src=e,this.md=r,this.env=t,this.tokens=n,this.bMarks=[],this.eMarks=[],this.tShift=[],this.sCount=[],this.bsCount=[],this.blkIndent=0,this.line=0,this.lineMax=0,this.tight=!1,this.ddIndent=-1,this.listIndent=-1,this.parentType="root",this.level=0,this.result="",p=!1,i=a=l=u=0,c=(o=this.src).length;a0&&this.level++,this.tokens.push(n),n},qe.prototype.isEmpty=function(e){return this.bMarks[e]+this.tShift[e]>=this.eMarks[e]},qe.prototype.skipEmptyLines=function(e){for(var r=this.lineMax;er;)if(!Ee(this.src.charCodeAt(--e)))return e+1;return e},qe.prototype.skipChars=function(e,r){for(var t=this.src.length;et;)if(r!==this.src.charCodeAt(--e))return e+1;return e},qe.prototype.getLines=function(e,r,t,n){var s,o,i,a,c,l,u,p=e;if(e>=r)return"";for(l=new Array(r-e),s=0;pt?new Array(o-t+1).join(" ")+this.src.slice(a,c):this.src.slice(a,c)}return l.join("")},qe.prototype.Token=oe;var Se=qe,Fe=[["table",function(e,r,t,n){var s,o,i,a,c,l,u,p,h,f,d,m,g,_,k,b,v,C;if(r+2>t)return!1;if(l=r+1,e.sCount[l]=4)return!1;if((i=e.bMarks[l]+e.tShift[l])>=e.eMarks[l])return!1;if(124!==(v=e.src.charCodeAt(i++))&&45!==v&&58!==v)return!1;if(i>=e.eMarks[l])return!1;if(124!==(C=e.src.charCodeAt(i++))&&45!==C&&58!==C&&!pe(C))return!1;if(45===v&&pe(C))return!1;for(;i=4)return!1;if((u=fe(o)).length&&""===u[0]&&u.shift(),u.length&&""===u[u.length-1]&&u.pop(),0===(p=u.length)||p!==f.length)return!1;if(n)return!0;for(_=e.parentType,e.parentType="table",b=e.md.block.ruler.getRules("blockquote"),(h=e.push("table_open","table",1)).map=m=[r,0],(h=e.push("thead_open","thead",1)).map=[r,r+1],(h=e.push("tr_open","tr",1)).map=[r,r+1],a=0;a=4)break;for((u=fe(o)).length&&""===u[0]&&u.shift(),u.length&&""===u[u.length-1]&&u.pop(),l===r+2&&((h=e.push("tbody_open","tbody",1)).map=g=[r+2,0]),(h=e.push("tr_open","tr",1)).map=[l,l+1],a=0;a=4))break;s=++n}return e.line=s,(o=e.push("code_block","code",0)).content=e.getLines(r,s,4+e.blkIndent,!1)+"\n",o.map=[r,e.line],!0}],["fence",function(e,r,t,n){var s,o,i,a,c,l,u,p=!1,h=e.bMarks[r]+e.tShift[r],f=e.eMarks[r];if(e.sCount[r]-e.blkIndent>=4)return!1;if(h+3>f)return!1;if(126!==(s=e.src.charCodeAt(h))&&96!==s)return!1;if(c=h,(o=(h=e.skipChars(h,s))-c)<3)return!1;if(u=e.src.slice(c,h),i=e.src.slice(h,f),96===s&&i.indexOf(String.fromCharCode(s))>=0)return!1;if(n)return!0;for(a=r;!(++a>=t)&&!((h=c=e.bMarks[a]+e.tShift[a])<(f=e.eMarks[a])&&e.sCount[a]=4||(h=e.skipChars(h,s))-c=4)return!1;if(62!==e.src.charCodeAt(D++))return!1;if(n)return!0;for(a=h=e.sCount[r]+1,32===e.src.charCodeAt(D)?(D++,a++,h++,s=!1,b=!0):9===e.src.charCodeAt(D)?(b=!0,(e.bsCount[r]+h)%4==3?(D++,a++,h++,s=!1):s=!0):b=!1,f=[e.bMarks[r]],e.bMarks[r]=D;D=w,_=[e.sCount[r]],e.sCount[r]=h-a,k=[e.tShift[r]],e.tShift[r]=D-e.bMarks[r],C=e.md.block.ruler.getRules("blockquote"),g=e.parentType,e.parentType="blockquote",p=r+1;p=(w=e.eMarks[p])));p++)if(62!==e.src.charCodeAt(D++)||A){if(l)break;for(v=!1,i=0,c=C.length;i=w,d.push(e.bsCount[p]),e.bsCount[p]=e.sCount[p]+1+(b?1:0),_.push(e.sCount[p]),e.sCount[p]=h-a,k.push(e.tShift[p]),e.tShift[p]=D-e.bMarks[p]}for(m=e.blkIndent,e.blkIndent=0,(y=e.push("blockquote_open","blockquote",1)).markup=">",y.map=u=[r,0],e.md.block.tokenize(e,r,p),(y=e.push("blockquote_close","blockquote",-1)).markup=">",e.lineMax=x,e.parentType=g,u[1]=e.line,i=0;i=4)return!1;if(42!==(s=e.src.charCodeAt(c++))&&45!==s&&95!==s)return!1;for(o=1;c=4)return!1;if(e.listIndent>=0&&e.sCount[r]-e.listIndent>=4&&e.sCount[r]=e.blkIndent&&(z=!0),(w=ke(e,r))>=0){if(u=!0,q=e.bMarks[r]+e.tShift[r],g=Number(e.src.slice(q,w-1)),z&&1!==g)return!1}else{if(!((w=_e(e,r))>=0))return!1;u=!1}if(z&&e.skipSpaces(w)>=e.eMarks[r])return!1;if(m=e.src.charCodeAt(w-1),n)return!0;for(d=e.tokens.length,u?(L=e.push("ordered_list_open","ol",1),1!==g&&(L.attrs=[["start",g]])):L=e.push("bullet_list_open","ul",1),L.map=f=[r,0],L.markup=String.fromCharCode(m),k=r,E=!1,F=e.md.block.ruler.getRules("list"),C=e.parentType,e.parentType="list";k=_?1:b-l)>4&&(c=1),a=l+c,(L=e.push("list_item_open","li",1)).markup=String.fromCharCode(m),L.map=p=[r,0],u&&(L.info=e.src.slice(q,w-1)),x=e.tight,A=e.tShift[r],y=e.sCount[r],v=e.listIndent,e.listIndent=e.blkIndent,e.blkIndent=a,e.tight=!0,e.tShift[r]=o-e.bMarks[r],e.sCount[r]=b,o>=_&&e.isEmpty(r+1)?e.line=Math.min(e.line+2,t):e.md.block.tokenize(e,r,t,!0),e.tight&&!E||(T=!1),E=e.line-r>1&&e.isEmpty(e.line-1),e.blkIndent=e.listIndent,e.listIndent=v,e.tShift[r]=A,e.sCount[r]=y,e.tight=x,(L=e.push("list_item_close","li",-1)).markup=String.fromCharCode(m),k=r=e.line,p[1]=k,o=e.bMarks[r],k>=t)break;if(e.sCount[k]=4)break;for(S=!1,i=0,h=F.length;i=4)return!1;if(91!==e.src.charCodeAt(C))return!1;for(;++C3||e.sCount[A]<0)){for(_=!1,l=0,u=k.length;l=4)return!1;if(!e.md.options.html)return!1;if(60!==e.src.charCodeAt(c))return!1;for(a=e.src.slice(c,l),s=0;s=4)return!1;if(35!==(s=e.src.charCodeAt(c))||c>=l)return!1;for(o=1,s=e.src.charCodeAt(++c);35===s&&c6||cc&&we(e.src.charCodeAt(i-1))&&(l=i),e.line=r+1,(a=e.push("heading_open","h"+String(o),1)).markup="########".slice(0,o),a.map=[r,e.line],(a=e.push("inline","",0)).content=e.src.slice(c,l).trim(),a.map=[r,e.line],a.children=[],(a=e.push("heading_close","h"+String(o),-1)).markup="########".slice(0,o)),!0)},["paragraph","reference","blockquote"]],["lheading",function(e,r,t){var n,s,o,i,a,c,l,u,p,h,f=r+1,d=e.md.block.ruler.getRules("paragraph");if(e.sCount[r]-e.blkIndent>=4)return!1;for(h=e.parentType,e.parentType="paragraph";f3)){if(e.sCount[f]>=e.blkIndent&&(c=e.bMarks[f]+e.tShift[f])<(l=e.eMarks[f])&&(45===(p=e.src.charCodeAt(c))||61===p)&&(c=e.skipChars(c,p),(c=e.skipSpaces(c))>=l)){u=61===p?1:2;break}if(!(e.sCount[f]<0)){for(s=!1,o=0,i=d.length;o3||e.sCount[c]<0)){for(n=!1,s=0,o=l.length;s=t))&&!(e.sCount[i]=c){e.line=t;break}for(n=0;n?@[]^_`{|}~-".split("").forEach((function(e){Be[e.charCodeAt(0)]=1}));function Oe(e,r){var t,n,s,o,i,a=[],c=r.length;for(t=0;t=0;t--)95!==(n=r[t]).marker&&42!==n.marker||-1!==n.end&&(s=r[n.end],a=t>0&&r[t-1].end===n.end+1&&r[t-1].marker===n.marker&&r[t-1].token===n.token-1&&r[n.end+1].token===s.token+1,i=String.fromCharCode(n.marker),(o=e.tokens[n.token]).type=a?"strong_open":"em_open",o.tag=a?"strong":"em",o.nesting=1,o.markup=a?i+i:i,o.content="",(o=e.tokens[s.token]).type=a?"strong_close":"em_close",o.tag=a?"strong":"em",o.nesting=-1,o.markup=a?i+i:i,o.content="",a&&(e.tokens[r[t-1].token].content="",e.tokens[r[n.end+1].token].content="",t--))}var Ue={tokenize:function(e,r){var t,n,s=e.pos,o=e.src.charCodeAt(s);if(r)return!1;if(95!==o&&42!==o)return!1;for(n=e.scanDelims(e.pos,42===o),t=0;t\x00-\x20]*)$/,We=Ae.HTML_TAG_RE;var Ye=w.has,Ke=w.isValidEntityCode,Qe=w.fromCodePoint,Xe=/^&#((?:x[a-f0-9]{1,6}|[0-9]{1,7}));/i,er=/^&([a-z][a-z0-9]{1,31});/i;function rr(e,r){var t,n,s,o,i,a,c,l,u={},p=r.length;if(p){var h=0,f=-2,d=[];for(t=0;ti;n-=d[n]+1)if((o=r[n]).marker===s.marker&&o.open&&o.end<0&&(c=!1,(o.close||s.open)&&(o.length+s.length)%3==0&&(o.length%3==0&&s.length%3==0||(c=!0)),!c)){l=n>0&&!r[n-1].open?d[n-1]+1:0,d[t]=t-n+l,d[n]=l,s.open=!1,o.end=t,o.close=!1,a=-1,f=-2;break}-1!==a&&(u[s.marker][(s.open?3:0)+(s.length||0)%3]=a)}}}var tr=w.isWhiteSpace,nr=w.isPunctChar,sr=w.isMdAsciiPunct;function or(e,r,t,n){this.src=e,this.env=t,this.md=r,this.tokens=n,this.tokens_meta=Array(n.length),this.pos=0,this.posMax=this.src.length,this.level=0,this.pending="",this.pendingLevel=0,this.cache={},this.delimiters=[],this._prev_delimiters=[],this.backticks={},this.backticksScanned=!1,this.linkLevel=0}or.prototype.pushPending=function(){var e=new oe("text","",0);return e.content=this.pending,e.level=this.pendingLevel,this.tokens.push(e),this.pending="",e},or.prototype.push=function(e,r,t){this.pending&&this.pushPending();var n=new oe(e,r,t),s=null;return t<0&&(this.level--,this.delimiters=this._prev_delimiters.pop()),n.level=this.level,t>0&&(this.level++,this._prev_delimiters.push(this.delimiters),this.delimiters=[],s={delimiters:this.delimiters}),this.pendingLevel=this.level,this.tokens.push(n),this.tokens_meta.push(s),n},or.prototype.scanDelims=function(e,r){var t,n,s,o,i,a,c,l,u,p=e,h=!0,f=!0,d=this.posMax,m=this.src.charCodeAt(e);for(t=e>0?this.src.charCodeAt(e-1):32;p0)&&(!((t=e.pos)+3>e.posMax)&&(58===e.src.charCodeAt(t)&&(47===e.src.charCodeAt(t+1)&&(47===e.src.charCodeAt(t+2)&&(!!(n=e.pending.match(Ie))&&(s=n[1],!!(o=e.md.linkify.matchAtStart(e.src.slice(t-s.length)))&&(i=(i=o.url).replace(/\*+$/,""),a=e.md.normalizeLink(i),!!e.md.validateLink(a)&&(r||(e.pending=e.pending.slice(0,-s.length),(c=e.push("link_open","a",1)).attrs=[["href",a]],c.markup="linkify",c.info="auto",(c=e.push("text","",0)).content=e.md.normalizeLinkText(i),(c=e.push("link_close","a",-1)).markup="linkify",c.info="auto"),e.pos+=i.length-s.length,!0)))))))))}],["newline",function(e,r){var t,n,s,o=e.pos;if(10!==e.src.charCodeAt(o))return!1;if(t=e.pending.length-1,n=e.posMax,!r)if(t>=0&&32===e.pending.charCodeAt(t))if(t>=1&&32===e.pending.charCodeAt(t-1)){for(s=t-1;s>=1&&32===e.pending.charCodeAt(s-1);)s--;e.pending=e.pending.slice(0,s),e.push("hardbreak","br",0)}else e.pending=e.pending.slice(0,-1),e.push("softbreak","br",0);else e.push("softbreak","br",0);for(o++;o=c)return!1;if(10===(t=e.src.charCodeAt(a))){for(r||e.push("hardbreak","br",0),a++;a=55296&&t<=56319&&a+1=56320&&n<=57343&&(o+=e.src[a+1],a++),s="\\"+o,r||(i=e.push("text_special","",0),t<256&&0!==Be[t]?i.content=o:i.content=s,i.markup=s,i.info="escape"),e.pos=a+1,!0}],["backticks",function(e,r){var t,n,s,o,i,a,c,l,u=e.pos;if(96!==e.src.charCodeAt(u))return!1;for(t=u,u++,n=e.posMax;u=f)return!1;if(d=a,(c=e.md.helpers.parseLinkDestination(e.src,a,e.posMax)).ok){for(u=e.md.normalizeLink(c.str),e.md.validateLink(u)?a=c.pos:u="",d=a;a=f||41!==e.src.charCodeAt(a))&&(m=!0),a++}if(m){if(void 0===e.env.references)return!1;if(a=0?s=e.src.slice(d,a++):a=o+1):a=o+1,s||(s=e.src.slice(i,o)),!(l=e.env.references[Ve(s)]))return e.pos=h,!1;u=l.href,p=l.title}return r||(e.pos=i,e.posMax=o,e.push("link_open","a",1).attrs=t=[["href",u]],p&&t.push(["title",p]),e.linkLevel++,e.md.inline.tokenize(e),e.linkLevel--,e.push("link_close","a",-1)),e.pos=a,e.posMax=f,!0}],["image",function(e,r){var t,n,s,o,i,a,c,l,u,p,h,f,d,m="",g=e.pos,_=e.posMax;if(33!==e.src.charCodeAt(e.pos))return!1;if(91!==e.src.charCodeAt(e.pos+1))return!1;if(a=e.pos+2,(i=e.md.helpers.parseLinkLabel(e,e.pos+1,!1))<0)return!1;if((c=i+1)<_&&40===e.src.charCodeAt(c)){for(c++;c<_&&(n=e.src.charCodeAt(c),Ge(n)||10===n);c++);if(c>=_)return!1;for(d=c,(u=e.md.helpers.parseLinkDestination(e.src,c,e.posMax)).ok&&(m=e.md.normalizeLink(u.str),e.md.validateLink(m)?c=u.pos:m=""),d=c;c<_&&(n=e.src.charCodeAt(c),Ge(n)||10===n);c++);if(u=e.md.helpers.parseLinkTitle(e.src,c,e.posMax),c<_&&d!==c&&u.ok)for(p=u.str,c=u.pos;c<_&&(n=e.src.charCodeAt(c),Ge(n)||10===n);c++);else p="";if(c>=_||41!==e.src.charCodeAt(c))return e.pos=g,!1;c++}else{if(void 0===e.env.references)return!1;if(c<_&&91===e.src.charCodeAt(c)?(d=c+1,(c=e.md.helpers.parseLinkLabel(e,c))>=0?o=e.src.slice(d,c++):c=i+1):c=i+1,o||(o=e.src.slice(a,i)),!(l=e.env.references[$e(o)]))return e.pos=g,!1;m=l.href,p=l.title}return r||(s=e.src.slice(a,i),e.md.inline.parse(s,e.md,e.env,f=[]),(h=e.push("image","img",0)).attrs=t=[["src",m],["alt",""]],h.children=f,h.content=s,p&&t.push(["title",p])),e.pos=c,e.posMax=_,!0}],["autolink",function(e,r){var t,n,s,o,i,a,c=e.pos;if(60!==e.src.charCodeAt(c))return!1;for(i=e.pos,a=e.posMax;;){if(++c>=a)return!1;if(60===(o=e.src.charCodeAt(c)))return!1;if(62===o)break}return t=e.src.slice(i+1,c),Je.test(t)?(n=e.md.normalizeLink(t),!!e.md.validateLink(n)&&(r||((s=e.push("link_open","a",1)).attrs=[["href",n]],s.markup="autolink",s.info="auto",(s=e.push("text","",0)).content=e.md.normalizeLinkText(t),(s=e.push("link_close","a",-1)).markup="autolink",s.info="auto"),e.pos+=t.length+2,!0)):!!He.test(t)&&(n=e.md.normalizeLink("mailto:"+t),!!e.md.validateLink(n)&&(r||((s=e.push("link_open","a",1)).attrs=[["href",n]],s.markup="autolink",s.info="auto",(s=e.push("text","",0)).content=e.md.normalizeLinkText(t),(s=e.push("link_close","a",-1)).markup="autolink",s.info="auto"),e.pos+=t.length+2,!0))}],["html_inline",function(e,r){var t,n,s,o,i,a=e.pos;return!!e.md.options.html&&(s=e.posMax,!(60!==e.src.charCodeAt(a)||a+2>=s)&&(!(33!==(t=e.src.charCodeAt(a+1))&&63!==t&&47!==t&&!function(e){var r=32|e;return r>=97&&r<=122}(t))&&(!!(n=e.src.slice(a).match(We))&&(r||((o=e.push("html_inline","",0)).content=e.src.slice(a,a+n[0].length),i=o.content,/^\s]/i.test(i)&&e.linkLevel++,function(e){return/^<\/a\s*>/i.test(e)}(o.content)&&e.linkLevel--),e.pos+=n[0].length,!0))))}],["entity",function(e,t){var n,s,o,i=e.pos,a=e.posMax;if(38!==e.src.charCodeAt(i))return!1;if(i+1>=a)return!1;if(35===e.src.charCodeAt(i+1)){if(s=e.src.slice(i).match(Xe))return t||(n="x"===s[1][0].toLowerCase()?parseInt(s[1].slice(1),16):parseInt(s[1],10),(o=e.push("text_special","",0)).content=Ke(n)?Qe(n):Qe(65533),o.markup=s[0],o.info="entity"),e.pos+=s[0].length,!0}else if((s=e.src.slice(i).match(er))&&Ye(r,s[1]))return t||((o=e.push("text_special","",0)).content=r[s[1]],o.markup=s[0],o.info="entity"),e.pos+=s[0].length,!0;return!1}]],cr=[["balance_pairs",function(e){var r,t=e.tokens_meta,n=e.tokens_meta.length;for(rr(0,e.delimiters),r=0;r0&&n++,"text"===s[r].type&&r+1=o)break}else e.pending+=e.src[e.pos++]}e.pending&&e.pushPending()},lr.prototype.parse=function(e,r,t,n){var s,o,i,a=new this.State(e,r,t,n);for(this.tokenize(a),i=(o=this.ruler2.getRules("")).length,s=0;s=3&&":"===e[r-3]||r>=3&&"/"===e[r-3]?0:n.match(t.re.no_http)[0].length:0}},"mailto:":{validate:function(e,r,t){var n=e.slice(r);return t.re.mailto||(t.re.mailto=new RegExp("^"+t.re.src_email_name+"@"+t.re.src_host_strict,"i")),t.re.mailto.test(n)?n.match(t.re.mailto)[0].length:0}}},_r="biz|com|edu|gov|net|org|pro|web|xxx|aero|asia|coop|info|museum|name|shop|\u0440\u0444".split("|");function kr(e){var r=e.re=function(e){var r={};return e=e||{},r.src_Any=y.source,r.src_Cc=A.source,r.src_Z=x.source,r.src_P=t.source,r.src_ZPCc=[r.src_Z,r.src_P,r.src_Cc].join("|"),r.src_ZCc=[r.src_Z,r.src_Cc].join("|"),r.src_pseudo_letter="(?:(?![><\uff5c]|"+r.src_ZPCc+")"+r.src_Any+")",r.src_ip4="(?:(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)",r.src_auth="(?:(?:(?!"+r.src_ZCc+"|[@/\\[\\]()]).)+@)?",r.src_port="(?::(?:6(?:[0-4]\\d{3}|5(?:[0-4]\\d{2}|5(?:[0-2]\\d|3[0-5])))|[1-5]?\\d{1,4}))?",r.src_host_terminator="(?=$|[><\uff5c]|"+r.src_ZPCc+")(?!"+(e["---"]?"-(?!--)|":"-|")+"_|:\\d|\\.-|\\.(?!$|"+r.src_ZPCc+"))",r.src_path="(?:[/?#](?:(?!"+r.src_ZCc+"|[><\uff5c]|[()[\\]{}.,\"'?!\\-;]).|\\[(?:(?!"+r.src_ZCc+"|\\]).)*\\]|\\((?:(?!"+r.src_ZCc+"|[)]).)*\\)|\\{(?:(?!"+r.src_ZCc+'|[}]).)*\\}|\\"(?:(?!'+r.src_ZCc+'|["]).)+\\"|\\\'(?:(?!'+r.src_ZCc+"|[']).)+\\'|\\'(?="+r.src_pseudo_letter+"|[-])|\\.{2,}[a-zA-Z0-9%/&]|\\.(?!"+r.src_ZCc+"|[.]|$)|"+(e["---"]?"\\-(?!--(?:[^-]|$))(?:-*)|":"\\-+|")+",(?!"+r.src_ZCc+"|$)|;(?!"+r.src_ZCc+"|$)|\\!+(?!"+r.src_ZCc+"|[!]|$)|\\?(?!"+r.src_ZCc+"|[?]|$))+|\\/)?",r.src_email_name='[\\-;:&=\\+\\$,\\.a-zA-Z0-9_][\\-;:&=\\+\\$,\\"\\.a-zA-Z0-9_]*',r.src_xn="xn--[a-z0-9\\-]{1,59}",r.src_domain_root="(?:"+r.src_xn+"|"+r.src_pseudo_letter+"{1,63})",r.src_domain="(?:"+r.src_xn+"|(?:"+r.src_pseudo_letter+")|(?:"+r.src_pseudo_letter+"(?:-|"+r.src_pseudo_letter+"){0,61}"+r.src_pseudo_letter+"))",r.src_host="(?:(?:(?:(?:"+r.src_domain+")\\.)*"+r.src_domain+"))",r.tpl_host_fuzzy="(?:"+r.src_ip4+"|(?:(?:(?:"+r.src_domain+")\\.)+(?:%TLDS%)))",r.tpl_host_no_ip_fuzzy="(?:(?:(?:"+r.src_domain+")\\.)+(?:%TLDS%))",r.src_host_strict=r.src_host+r.src_host_terminator,r.tpl_host_fuzzy_strict=r.tpl_host_fuzzy+r.src_host_terminator,r.src_host_port_strict=r.src_host+r.src_port+r.src_host_terminator,r.tpl_host_port_fuzzy_strict=r.tpl_host_fuzzy+r.src_port+r.src_host_terminator,r.tpl_host_port_no_ip_fuzzy_strict=r.tpl_host_no_ip_fuzzy+r.src_port+r.src_host_terminator,r.tpl_host_fuzzy_test="localhost|www\\.|\\.\\d{1,3}\\.|(?:\\.(?:%TLDS%)(?:"+r.src_ZPCc+"|>|$))",r.tpl_email_fuzzy='(^|[><\uff5c]|"|\\(|'+r.src_ZCc+")("+r.src_email_name+"@"+r.tpl_host_fuzzy_strict+")",r.tpl_link_fuzzy="(^|(?![.:/\\-_@])(?:[$+<=>^`|\uff5c]|"+r.src_ZPCc+"))((?![$+<=>^`|\uff5c])"+r.tpl_host_port_fuzzy_strict+r.src_path+")",r.tpl_link_no_ip_fuzzy="(^|(?![.:/\\-_@])(?:[$+<=>^`|\uff5c]|"+r.src_ZPCc+"))((?![$+<=>^`|\uff5c])"+r.tpl_host_port_no_ip_fuzzy_strict+r.src_path+")",r}(e.__opts__),n=e.__tlds__.slice();function s(e){return e.replace("%TLDS%",r.src_tlds)}e.onCompile(),e.__tlds_replaced__||n.push("a[cdefgilmnoqrstuwxz]|b[abdefghijmnorstvwyz]|c[acdfghiklmnoruvwxyz]|d[ejkmoz]|e[cegrstu]|f[ijkmor]|g[abdefghilmnpqrstuwy]|h[kmnrtu]|i[delmnoqrst]|j[emop]|k[eghimnprwyz]|l[abcikrstuvy]|m[acdeghklmnopqrstuvwxyz]|n[acefgilopruz]|om|p[aefghklmnrstwy]|qa|r[eosuw]|s[abcdeghijklmnortuvxyz]|t[cdfghjklmnortvwz]|u[agksyz]|v[aceginu]|w[fs]|y[et]|z[amw]"),n.push(r.src_xn),r.src_tlds=n.join("|"),r.email_fuzzy=RegExp(s(r.tpl_email_fuzzy),"i"),r.link_fuzzy=RegExp(s(r.tpl_link_fuzzy),"i"),r.link_no_ip_fuzzy=RegExp(s(r.tpl_link_no_ip_fuzzy),"i"),r.host_fuzzy_test=RegExp(s(r.tpl_host_fuzzy_test),"i");var o=[];function i(e,r){throw new Error('(LinkifyIt) Invalid schema "'+e+'": '+r)}e.__compiled__={},Object.keys(e.__schemas__).forEach((function(r){var t=e.__schemas__[r];if(null!==t){var n={validate:null,link:null};if(e.__compiled__[r]=n,"[object Object]"===hr(t))return!function(e){return"[object RegExp]"===hr(e)}(t.validate)?fr(t.validate)?n.validate=t.validate:i(r,t):n.validate=function(e){return function(r,t){var n=r.slice(t);return e.test(n)?n.match(e)[0].length:0}}(t.validate),void(fr(t.normalize)?n.normalize=t.normalize:t.normalize?i(r,t):n.normalize=function(e,r){r.normalize(e)});!function(e){return"[object String]"===hr(e)}(t)?i(r,t):o.push(r)}})),o.forEach((function(r){e.__compiled__[e.__schemas__[r]]&&(e.__compiled__[r].validate=e.__compiled__[e.__schemas__[r]].validate,e.__compiled__[r].normalize=e.__compiled__[e.__schemas__[r]].normalize)})),e.__compiled__[""]={validate:null,normalize:function(e,r){r.normalize(e)}};var a=Object.keys(e.__compiled__).filter((function(r){return r.length>0&&e.__compiled__[r]})).map(dr).join("|");e.re.schema_test=RegExp("(^|(?!_)(?:[><\uff5c]|"+r.src_ZPCc+"))("+a+")","i"),e.re.schema_search=RegExp("(^|(?!_)(?:[><\uff5c]|"+r.src_ZPCc+"))("+a+")","ig"),e.re.schema_at_start=RegExp("^"+e.re.schema_search.source,"i"),e.re.pretest=RegExp("("+e.re.schema_test.source+")|("+e.re.host_fuzzy_test.source+")|@","i"),function(e){e.__index__=-1,e.__text_cache__=""}(e)}function br(e,r){var t=e.__index__,n=e.__last_index__,s=e.__text_cache__.slice(t,n);this.schema=e.__schema__.toLowerCase(),this.index=t+r,this.lastIndex=n+r,this.raw=s,this.text=s,this.url=s}function vr(e,r){var t=new br(e,r);return e.__compiled__[t.schema].normalize(t,e),t}function Cr(e,r){if(!(this instanceof Cr))return new Cr(e,r);var t;r||(t=e,Object.keys(t||{}).reduce((function(e,r){return e||mr.hasOwnProperty(r)}),!1)&&(r=e,e={})),this.__opts__=pr({},mr,r),this.__index__=-1,this.__last_index__=-1,this.__schema__="",this.__text_cache__="",this.__schemas__=pr({},gr,e),this.__compiled__={},this.__tlds__=_r,this.__tlds_replaced__=!1,this.re={},kr(this)}Cr.prototype.add=function(e,r){return this.__schemas__[e]=r,kr(this),this},Cr.prototype.set=function(e){return this.__opts__=pr(this.__opts__,e),this},Cr.prototype.test=function(e){if(this.__text_cache__=e,this.__index__=-1,!e.length)return!1;var r,t,n,s,o,i,a,c;if(this.re.schema_test.test(e))for((a=this.re.schema_search).lastIndex=0;null!==(r=a.exec(e));)if(s=this.testSchemaAt(e,r[2],a.lastIndex)){this.__schema__=r[2],this.__index__=r.index+r[1].length,this.__last_index__=r.index+r[0].length+s;break}return this.__opts__.fuzzyLink&&this.__compiled__["http:"]&&(c=e.search(this.re.host_fuzzy_test))>=0&&(this.__index__<0||c=0&&null!==(n=e.match(this.re.email_fuzzy))&&(o=n.index+n[1].length,i=n.index+n[0].length,(this.__index__<0||othis.__last_index__)&&(this.__schema__="mailto:",this.__index__=o,this.__last_index__=i)),this.__index__>=0},Cr.prototype.pretest=function(e){return this.re.pretest.test(e)},Cr.prototype.testSchemaAt=function(e,r,t){return this.__compiled__[r.toLowerCase()]?this.__compiled__[r.toLowerCase()].validate(e,t,this):0},Cr.prototype.match=function(e){var r=0,t=[];this.__index__>=0&&this.__text_cache__===e&&(t.push(vr(this,r)),r=this.__last_index__);for(var n=r?e.slice(r):e;this.test(n);)t.push(vr(this,r)),n=n.slice(this.__last_index__),r+=this.__last_index__;return t.length?t:null},Cr.prototype.matchAtStart=function(e){if(this.__text_cache__=e,this.__index__=-1,!e.length)return null;var r=this.re.schema_at_start.exec(e);if(!r)return null;var t=this.testSchemaAt(e,r[2],r[0].length);return t?(this.__schema__=r[2],this.__index__=r.index+r[1].length,this.__last_index__=r.index+r[0].length+t,vr(this,0)):null},Cr.prototype.tlds=function(e,r){return e=Array.isArray(e)?e:[e],r?(this.__tlds__=this.__tlds__.concat(e).sort().filter((function(e,r,t){return e!==t[r-1]})).reverse(),kr(this),this):(this.__tlds__=e.slice(),this.__tlds_replaced__=!0,kr(this),this)},Cr.prototype.normalize=function(e){e.schema||(e.url="http://"+e.url),"mailto:"!==e.schema||/^mailto:/i.test(e.url)||(e.url="mailto:"+e.url)},Cr.prototype.onCompile=function(){};var yr=Cr,Ar=2147483647,xr=36,Dr=/^xn--/,wr=/[^\x20-\x7E]/,Er=/[\x2E\u3002\uFF0E\uFF61]/g,qr={overflow:"Overflow: input needs wider integers to process","not-basic":"Illegal input >= 0x80 (not a basic code point)","invalid-input":"Invalid input"},Sr=Math.floor,Fr=String.fromCharCode; +/*! https://mths.be/punycode v1.4.1 by @mathias */function Lr(e){throw new RangeError(qr[e])}function zr(e,r){for(var t=e.length,n=[];t--;)n[t]=r(e[t]);return n}function Tr(e,r){var t=e.split("@"),n="";return t.length>1&&(n=t[0]+"@",e=t[1]),n+zr((e=e.replace(Er,".")).split("."),r).join(".")}function Ir(e){for(var r,t,n=[],s=0,o=e.length;s=55296&&r<=56319&&s65535&&(r+=Fr((e-=65536)>>>10&1023|55296),e=56320|1023&e),r+=Fr(e)})).join("")}function Rr(e,r){return e+22+75*(e<26)-((0!=r)<<5)}function Br(e,r,t){var n=0;for(e=t?Sr(e/700):e>>1,e+=Sr(e/r);e>455;n+=xr)e=Sr(e/35);return Sr(n+36*e/(e+38))}function Nr(e){var r,t,n,s,o,i,a,c,l,u,p,h=[],f=e.length,d=0,m=128,g=72;for((t=e.lastIndexOf("-"))<0&&(t=0),n=0;n=128&&Lr("not-basic"),h.push(e.charCodeAt(n));for(s=t>0?t+1:0;s=f&&Lr("invalid-input"),((c=(p=e.charCodeAt(s++))-48<10?p-22:p-65<26?p-65:p-97<26?p-97:xr)>=xr||c>Sr((Ar-d)/i))&&Lr("overflow"),d+=c*i,!(c<(l=a<=g?1:a>=g+26?26:a-g));a+=xr)i>Sr(Ar/(u=xr-l))&&Lr("overflow"),i*=u;g=Br(d-o,r=h.length+1,0==o),Sr(d/r)>Ar-m&&Lr("overflow"),m+=Sr(d/r),d%=r,h.splice(d++,0,m)}return Mr(h)}function Or(e){var r,t,n,s,o,i,a,c,l,u,p,h,f,d,m,g=[];for(h=(e=Ir(e)).length,r=128,t=0,o=72,i=0;i=r&&pSr((Ar-t)/(f=n+1))&&Lr("overflow"),t+=(a-r)*f,r=a,i=0;iAr&&Lr("overflow"),p==r){for(c=t,l=xr;!(c<(u=l<=o?1:l>=o+26?26:l-o));l+=xr)m=c-u,d=xr-u,g.push(Fr(Rr(u+m%d,0))),c=Sr(m/d);g.push(Fr(Rr(c,0))),o=Br(t,f,n==s),t=0,++n}++t,++r}return g.join("")}function Pr(e){return Tr(e,(function(e){return Dr.test(e)?Nr(e.slice(4).toLowerCase()):e}))}function jr(e){return Tr(e,(function(e){return wr.test(e)?"xn--"+Or(e):e}))}var Ur="1.4.1",Vr={decode:Ir,encode:Mr},Zr={version:Ur,ucs2:Vr,toASCII:jr,toUnicode:Pr,encode:Or,decode:Nr},$r=e(Object.freeze({__proto__:null,decode:Nr,encode:Or,toUnicode:Pr,toASCII:jr,version:Ur,ucs2:Vr,default:Zr})),Gr={default:{options:{html:!1,xhtmlOut:!1,breaks:!1,langPrefix:"language-",linkify:!1,typographer:!1,quotes:"\u201c\u201d\u2018\u2019",highlight:null,maxNesting:100},components:{core:{},block:{},inline:{}}},zero:{options:{html:!1,xhtmlOut:!1,breaks:!1,langPrefix:"language-",linkify:!1,typographer:!1,quotes:"\u201c\u201d\u2018\u2019",highlight:null,maxNesting:20},components:{core:{rules:["normalize","block","inline","text_join"]},block:{rules:["paragraph"]},inline:{rules:["text"],rules2:["balance_pairs","fragments_join"]}}},commonmark:{options:{html:!0,xhtmlOut:!0,breaks:!1,langPrefix:"language-",linkify:!1,typographer:!1,quotes:"\u201c\u201d\u2018\u2019",highlight:null,maxNesting:20},components:{core:{rules:["normalize","block","inline","text_join"]},block:{rules:["blockquote","code","fence","heading","hr","html_block","lheading","list","reference","paragraph"]},inline:{rules:["autolink","backticks","emphasis","entity","escape","html_inline","image","link","newline","text"],rules2:["balance_pairs","emphasis","fragments_join"]}}}},Hr=/^(vbscript|javascript|file|data):/,Jr=/^data:image\/(gif|png|jpeg|webp);/;function Wr(e){var r=e.trim().toLowerCase();return!Hr.test(r)||!!Jr.test(r)}var Yr=["http:","https:","mailto:"];function Kr(e){var r=C.parse(e,!0);if(r.hostname&&(!r.protocol||Yr.indexOf(r.protocol)>=0))try{r.hostname=$r.toASCII(r.hostname)}catch(e){}return C.encode(C.format(r))}function Qr(e){var r=C.parse(e,!0);if(r.hostname&&(!r.protocol||Yr.indexOf(r.protocol)>=0))try{r.hostname=$r.toUnicode(r.hostname)}catch(e){}return C.decode(C.format(r),C.decode.defaultChars+"%")}function Xr(e,r){if(!(this instanceof Xr))return new Xr(e,r);r||w.isString(e)||(r=e||{},e="default"),this.inline=new ur,this.block=new ze,this.core=new ue,this.renderer=new B,this.linkify=new yr,this.validateLink=Wr,this.normalizeLink=Kr,this.normalizeLinkText=Qr,this.utils=w,this.helpers=w.assign({},L),this.options={},this.configure(e),r&&this.set(r)}return Xr.prototype.set=function(e){return w.assign(this.options,e),this},Xr.prototype.configure=function(e){var r,t=this;if(w.isString(e)&&!(e=Gr[r=e]))throw new Error('Wrong `markdown-it` preset "'+r+'", check name');if(!e)throw new Error("Wrong `markdown-it` preset, can't be empty");return e.options&&t.set(e.options),e.components&&Object.keys(e.components).forEach((function(r){e.components[r].rules&&t[r].ruler.enableOnly(e.components[r].rules),e.components[r].rules2&&t[r].ruler2.enableOnly(e.components[r].rules2)})),this},Xr.prototype.enable=function(e,r){var t=[];Array.isArray(e)||(e=[e]),["core","block","inline"].forEach((function(r){t=t.concat(this[r].ruler.enable(e,!0))}),this),t=t.concat(this.inline.ruler2.enable(e,!0));var n=e.filter((function(e){return t.indexOf(e)<0}));if(n.length&&!r)throw new Error("MarkdownIt. Failed to enable unknown rule(s): "+n);return this},Xr.prototype.disable=function(e,r){var t=[];Array.isArray(e)||(e=[e]),["core","block","inline"].forEach((function(r){t=t.concat(this[r].ruler.disable(e,!0))}),this),t=t.concat(this.inline.ruler2.disable(e,!0));var n=e.filter((function(e){return t.indexOf(e)<0}));if(n.length&&!r)throw new Error("MarkdownIt. Failed to disable unknown rule(s): "+n);return this},Xr.prototype.use=function(e){var r=[this].concat(Array.prototype.slice.call(arguments,1));return e.apply(e,r),this},Xr.prototype.parse=function(e,r){if("string"!=typeof e)throw new Error("Input data should be a String");var t=new this.core.State(e,this,r);return this.core.process(t),t.tokens},Xr.prototype.render=function(e,r){return r=r||{},this.renderer.render(this.parse(e,r),this.options,r)},Xr.prototype.parseInline=function(e,r){var t=new this.core.State(e,this,r);return t.inlineMode=!0,this.core.process(t),t.tokens},Xr.prototype.renderInline=function(e,r){return r=r||{},this.renderer.render(this.parseInline(e,r),this.options,r)},Xr})); diff --git a/qubership-apihub-service/static/templates/single_page.html b/qubership-apihub-service/static/templates/single_page.html new file mode 100644 index 0000000..addbfeb --- /dev/null +++ b/qubership-apihub-service/static/templates/single_page.html @@ -0,0 +1,39 @@ + + + + + %s + + + + + +
    + +
    +

    %s

    +

    Version: %s

    +
    +
    + +
    +
    + +
    + +
    + + diff --git a/qubership-apihub-service/tests/ValidatePackageOperations_test.go b/qubership-apihub-service/tests/ValidatePackageOperations_test.go new file mode 100644 index 0000000..d59b27b --- /dev/null +++ b/qubership-apihub-service/tests/ValidatePackageOperations_test.go @@ -0,0 +1,98 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package tests + +import ( + "testing" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +func TestValidateObjectErrors(t *testing.T) { + var updateOperationGroupReqNil view.UpdateOperationGroupReq + var groupOperationsNil *[]view.GroupOperations = nil + updateOperationGroupReqNil.Operations = groupOperationsNil + updateOperationGroupReqExpectedErrorNil := "Required parameters are missing: [0]operations.operationId, [1]operations.operationId" + if err := utils.ValidateObject(updateOperationGroupReqNil); err != nil { + if updateOperationGroupReqExpectedErrorNil != err.Error() { + t.Fatalf("UpdateOperationGroupReq Validation errors test is failed. Actual error: %v", err.Error()) + } + } + + var updateOperationGroupReq view.UpdateOperationGroupReq + var groupOperations = make([]view.GroupOperations, 2) + updateOperationGroupReq.Operations = &groupOperations + updateOperationGroupReqExpectedError := "Required parameters are missing: operations[0].operationId, operations[1].operationId" + if err := utils.ValidateObject(updateOperationGroupReq); err != nil { + if updateOperationGroupReqExpectedError != err.Error() { + t.Fatalf("UpdateOperationGroupReq Validation errors test is failed. Actual error: %v", err.Error()) + } + } + + var packageOperationsFile view.PackageOperationsFile + var operations = make([]view.Operation, 2) + packageOperationsFile.Operations = operations + packageOperationsFileExpectedError := "Required parameters are missing: operations[0].operationId, operations[0].title, operations[0].apiType, operations[0].dataHash, operations[0].apiKind, operations[0].metadata, operations[0].searchScopes, operations[0].apiAudience, operations[1].operationId, operations[1].title, operations[1].apiType, operations[1].dataHash, operations[1].apiKind, operations[1].metadata, operations[1].searchScopes, operations[1].apiAudience" + if err := utils.ValidateObject(packageOperationsFile); err != nil { + if packageOperationsFileExpectedError != err.Error() { + t.Fatalf("Package Operations File Validation errors test is failed. Actual error: %v", err.Error()) + } + } + + var packageInfoFile view.PackageInfoFile + info := view.MakeChangelogInfoFileView(packageInfoFile) + packageInfoFileExpectedError := "Required parameters are missing: packageId, version, previousVersionPackageId, previousVersion" + if err := utils.ValidateObject(info); err != nil { + if packageInfoFileExpectedError != err.Error() { + t.Fatalf("Package Info File Validation errors test is failed. Actual error: %v", err.Error()) + } + } + + var packageComparisonsFile view.PackageComparisonsFile + var versionComparison = make([]view.VersionComparison, 2) + var operationTypes = make([]view.OperationType, 2) + versionComparison[0].OperationTypes = operationTypes + versionComparison[1].OperationTypes = operationTypes + packageComparisonsFile.Comparisons = versionComparison + packageComparisonsFileExpectedError := "Required parameters are missing: comparisons[0].operationTypes[0].apiType, comparisons[0].operationTypes[1].apiType, comparisons[1].operationTypes[0].apiType, comparisons[1].operationTypes[1].apiType" + if err := utils.ValidateObject(packageComparisonsFile); err != nil { + if packageComparisonsFileExpectedError != err.Error() { + t.Fatalf("Package Comparisons File Validation errors test is failed. Actual error: %v", err.Error()) + } + } + + var builderNotificationsFile view.BuilderNotificationsFile + var builderNotification = make([]view.BuilderNotification, 2) + builderNotificationsFile.Notifications = builderNotification + //no required params. empty error expected + builderNotificationsFileExpectedError := "" + if err := utils.ValidateObject(builderNotificationsFile); err != nil { + if builderNotificationsFileExpectedError != err.Error() { + t.Fatalf("Builder Notifications File Validation errors test is failed. Actual error: %v", err.Error()) + } + } + + var packageDocumentsFile view.PackageDocumentsFile + var packageDocument = make([]view.PackageDocument, 2) + packageDocumentsFile.Documents = packageDocument + packageDocumentsFileExpectedError := "Required parameters are missing: documents[0].fileId, documents[0].type, documents[0].slug, documents[0].title, documents[0].operationIds, documents[0].filename, documents[1].fileId, documents[1].type, documents[1].slug, documents[1].title, documents[1].operationIds, documents[1].filename" + if err := utils.ValidateObject(packageDocumentsFile); err != nil { + if packageDocumentsFileExpectedError != err.Error() { + t.Fatalf("Package Documents File Validation errors test is failed. Actual error: %v", err.Error()) + } + } + +} diff --git a/qubership-apihub-service/utils/ArrayUtils.go b/qubership-apihub-service/utils/ArrayUtils.go new file mode 100644 index 0000000..d2cba01 --- /dev/null +++ b/qubership-apihub-service/utils/ArrayUtils.go @@ -0,0 +1,45 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package utils + +func SliceIndex(slice []string, val string) int { + for i, v := range slice { + if v == val { + return i + } + } + return -1 +} + +func SliceContains(slice []string, val string) bool { + for _, v := range slice { + if v == val { + return true + } + } + return false +} + +func UniqueSet(slice []string) []string { + set := map[string]bool{} + for _, v := range slice { + set[v] = true + } + result := []string{} + for key := range set { + result = append(result, key) + } + return result +} diff --git a/qubership-apihub-service/utils/GoRoutines.go b/qubership-apihub-service/utils/GoRoutines.go new file mode 100644 index 0000000..557cb7d --- /dev/null +++ b/qubership-apihub-service/utils/GoRoutines.go @@ -0,0 +1,40 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package utils + +import ( + log "github.com/sirupsen/logrus" + "runtime/debug" +) + +type noPanicFunc func() + +func (f noPanicFunc) run() { + defer internalRecover() + f() +} + +func SafeAsync(function noPanicFunc) { + go function.run() +} + +func internalRecover() { + if err := recover(); err != nil { + log.Errorf("Request failed with panic: %v", err) + log.Tracef("Stacktrace: %v", string(debug.Stack())) + debug.PrintStack() + return + } +} diff --git a/qubership-apihub-service/utils/HashUtils.go b/qubership-apihub-service/utils/HashUtils.go new file mode 100644 index 0000000..9729d91 --- /dev/null +++ b/qubership-apihub-service/utils/HashUtils.go @@ -0,0 +1,29 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package utils + +import ( + "crypto/md5" + "encoding/hex" +) + +func GetEncodedChecksum(data ...[]byte) string { + allData := []byte{} + for _, bytes := range data { + allData = append(allData, bytes...) + } + sum := md5.Sum(allData) + return hex.EncodeToString(sum[:]) +} diff --git a/qubership-apihub-service/utils/PGUtils.go b/qubership-apihub-service/utils/PGUtils.go new file mode 100644 index 0000000..4d9f8ae --- /dev/null +++ b/qubership-apihub-service/utils/PGUtils.go @@ -0,0 +1,24 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package utils + +import "strings" + +func LikeEscaped(s string) string { + s = strings.Replace(s, "\\", "\\\\\\\\", -1) + s = strings.Replace(s, "%", "\\%", -1) + s = strings.Replace(s, "_", "\\_", -1) + return s +} diff --git a/qubership-apihub-service/utils/PackageUtils.go b/qubership-apihub-service/utils/PackageUtils.go new file mode 100644 index 0000000..2f33138 --- /dev/null +++ b/qubership-apihub-service/utils/PackageUtils.go @@ -0,0 +1,21 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package utils + +import "strings" + +func GetPackageWorkspaceId(packageId string) string { + return strings.SplitN(packageId, ".", 2)[0] +} diff --git a/qubership-apihub-service/utils/PagingUtils.go b/qubership-apihub-service/utils/PagingUtils.go new file mode 100644 index 0000000..259330b --- /dev/null +++ b/qubership-apihub-service/utils/PagingUtils.go @@ -0,0 +1,37 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package utils + +func PaginateList(listSize int, limit int, page int) (int, int) { + // page count starts with 0 + if limit < 0 || page < 0 { + return 0, 0 + } + if limit == 0 { + return 0, listSize + } + startIndex := (page) * limit + endIndex := startIndex + limit + + if startIndex >= listSize { + return 0, 0 // Return invalid indices if start index is out of range + } + + if endIndex > listSize { + endIndex = listSize // Adjust end index to the last index if it exceeds the list size + } + + return startIndex, endIndex +} diff --git a/qubership-apihub-service/utils/PagingUtils_test.go b/qubership-apihub-service/utils/PagingUtils_test.go new file mode 100644 index 0000000..01015b4 --- /dev/null +++ b/qubership-apihub-service/utils/PagingUtils_test.go @@ -0,0 +1,95 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package utils + +import "testing" + +func TestPaginateList(t *testing.T) { + + startIndex, endIndex := PaginateList(100, 10, 1) + if startIndex != 10 || endIndex != 20 { + t.Errorf("Expected start index: 10, end index: 20; Got start index: %d, end index: %d", startIndex, endIndex) + } + + startIndex, endIndex = PaginateList(100, 10, 3) + if startIndex != 30 || endIndex != 40 { + t.Errorf("Expected start index: 30, end index: 40; Got start index: %d, end index: %d", startIndex, endIndex) + } + + startIndex, endIndex = PaginateList(100, 10, 10) + if startIndex != 0 || endIndex != 0 { + t.Errorf("Expected start index: 0, end index: 0; Got start index: %d, end index: %d", startIndex, endIndex) + } + + startIndex, endIndex = PaginateList(0, 10, 1) + if startIndex != 0 || endIndex != 0 { + t.Errorf("Expected start index: 0, end index: 0; Got start index: %d, end index: %d", startIndex, endIndex) + } + + startIndex, endIndex = PaginateList(10, 0, 1) + if startIndex != 0 || endIndex != 10 { + t.Errorf("Expected start index: 0, end index: 10; Got start index: %d, end index: %d", startIndex, endIndex) + } + + startIndex, endIndex = PaginateList(10, 10, 0) + if startIndex != 0 || endIndex != 10 { + t.Errorf("Expected start index: 0, end index: 10; Got start index: %d, end index: %d", startIndex, endIndex) + } + + startIndex, endIndex = PaginateList(10, -10, 1) + if startIndex != 0 || endIndex != 0 { + t.Errorf("Expected start index: 0, end index: 0; Got start index: %d, end index: %d", startIndex, endIndex) + } + + startIndex, endIndex = PaginateList(10, 10, -1) + if startIndex != 0 || endIndex != 0 { + t.Errorf("Expected start index: 0, end index: 0; Got start index: %d, end index: %d", startIndex, endIndex) + } + + startIndex, endIndex = PaginateList(-10, 10, 1) + if startIndex != 0 || endIndex != 0 { + t.Errorf("Expected start index: 0, end index: 0; Got start index: %d, end index: %d", startIndex, endIndex) + } + + startIndex, endIndex = PaginateList(0, -10, -1) + if startIndex != 0 || endIndex != 0 { + t.Errorf("Expected start index: 0, end index: 0; Got start index: %d, end index: %d", startIndex, endIndex) + } + + startIndex, endIndex = PaginateList(-10, 0, -1) + if startIndex != 0 || endIndex != 0 { + t.Errorf("Expected start index: 0, end index: 0; Got start index: %d, end index: %d", startIndex, endIndex) + } + + startIndex, endIndex = PaginateList(10, -10, 1) + if startIndex != 0 || endIndex != 0 { + t.Errorf("Expected start index: 0, end index: 0; Got start index: %d, end index: %d", startIndex, endIndex) + } + + startIndex, endIndex = PaginateList(10, 10, -1) + if startIndex != 0 || endIndex != 0 { + t.Errorf("Expected start index: 0, end index: 0; Got start index: %d, end index: %d", startIndex, endIndex) + } + + startIndex, endIndex = PaginateList(-10, 10, 1) + if startIndex != 0 || endIndex != 0 { + t.Errorf("Expected start index: 0, end index: 0; Got start index: %d, end index: %d", startIndex, endIndex) + } + + startIndex, endIndex = PaginateList(0, 0, 0) + if startIndex != 0 || endIndex != 0 { + t.Errorf("Expected start index: 0, end index: 0; Got start index: %d, end index: %d", startIndex, endIndex) + } +} diff --git a/qubership-apihub-service/utils/PathUtils.go b/qubership-apihub-service/utils/PathUtils.go new file mode 100644 index 0000000..7d22392 --- /dev/null +++ b/qubership-apihub-service/utils/PathUtils.go @@ -0,0 +1,68 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package utils + +import ( + "path" + "strings" +) + +// Returns normalised fileId +func NormalizeFileId(fileId string) string { + // add prefix '/' since we are always operating from the root folder + filePath, fileName := SplitFileId("/" + fileId) + + normalizedFileId := ConcatToFileId(filePath, fileName) + return strings.TrimPrefix(normalizedFileId, "/") +} + +// Returns normalised file Path +func NormalizeFilePath(filePath string) string { + // add prefix '/' since we are always operating from the root folder + filePath = path.Clean("/" + filePath) + + if filePath == "." || filePath == "/" { + filePath = "" + } + return strings.TrimPrefix(filePath, "/") +} + +// Splits fileId to normalized Path and Name +func SplitFileId(fileId string) (string, string) { + filePath := path.Dir(fileId) + var fileName string + if strings.HasSuffix(fileId, "/") { + fileName = "" + } else { + fileName = path.Base(fileId) + } + + if filePath == "." || filePath == "/" { + filePath = "" + } + + return filePath, fileName +} + +// Concatenates file Path and Name to fileId +func ConcatToFileId(filePath string, fileName string) string { + if filePath == "" { + return fileName + } else if strings.HasSuffix(filePath, "/") { + return filePath + fileName + } else { + return filePath + "/" + fileName + } +} diff --git a/qubership-apihub-service/utils/PerfUtils.go b/qubership-apihub-service/utils/PerfUtils.go new file mode 100644 index 0000000..0aace62 --- /dev/null +++ b/qubership-apihub-service/utils/PerfUtils.go @@ -0,0 +1,25 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package utils + +import "github.com/sirupsen/logrus" + +func PerfLog(timeMs int64, thresholdMs int64, str string) { + if timeMs > thresholdMs { + logrus.Warnf("PERF: "+str+" took %d ms more than expected (%d ms)", timeMs, thresholdMs) + } else { + logrus.Debugf("PERF: "+str+" took %dms", timeMs) + } +} diff --git a/qubership-apihub-service/utils/UrlUtils.go b/qubership-apihub-service/utils/UrlUtils.go new file mode 100644 index 0000000..afd3ccd --- /dev/null +++ b/qubership-apihub-service/utils/UrlUtils.go @@ -0,0 +1,22 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package utils + +import "net/url" + +func IsUrl(str string) bool { + u, err := url.Parse(str) + return err == nil && u.Scheme != "" && u.Host != "" +} diff --git a/qubership-apihub-service/utils/Validation.go b/qubership-apihub-service/utils/Validation.go new file mode 100644 index 0000000..dd27127 --- /dev/null +++ b/qubership-apihub-service/utils/Validation.go @@ -0,0 +1,146 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package utils + +import ( + "fmt" + "net/http" + "reflect" + "strings" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" + "github.com/go-playground/validator/v10" +) + +var validate *validator.Validate + +func getValidator() *validator.Validate { + if validate == nil { + validate = validator.New() + } + return validate +} + +func ValidateObject(object interface{}) error { + err := getValidator().Struct(object) + if err == nil { + return nil + } + missingParams := make([]string, 0) //todo do not add or remove duplicate fields (e.g. arrays validations) + for _, err := range err.(validator.ValidationErrors) { + if err.Tag() == "required" { + missingParams = append(missingParams, err.StructNamespace()) + } + } + if len(missingParams) == 0 { + return nil + } + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.RequiredParamsMissing, + Message: exception.RequiredParamsMissingMsg, + Params: map[string]interface{}{"params": strings.Join(getValidatedValuesTags(object, missingParams), ", ")}, + } +} + +func getValidatedValuesTags(b interface{}, targetNames []string) []string { + //TODO: doesn't work for documents + result := make([]string, 0) + for i := 0; i < len(targetNames); i++ { + reflectValue := reflect.ValueOf(b) + //split and remove Highest struct name + splitName := strings.Split(targetNames[i], ".")[1:] + fullErrorName := getTagReflective(reflectValue.Type(), splitName, "") + result = append(result, fullErrorName) + } + + return result +} + +func getTagReflective(value reflect.Type, splitName []string, name string) string { + currentElem, splitName := splitName[0], splitName[1:] + currentElemSplit := strings.Split(currentElem, "[") + arrayIndex := "" + isArr := false + if len(currentElemSplit) > 1 { + arrayIndex = strings.Split(currentElemSplit[1], "]")[0] + isArr = true + } + for i := 0; i < value.NumField(); i++ { + t := value.Field(i) + + if t.Name != currentElemSplit[0] { + continue + } + + if len(splitName) > 0 { + if name == "" { + name = getJsonTag(t, arrayIndex, isArr) + } else { + name = name + "." + getJsonTag(t, arrayIndex, isArr) + } + + var nextValueType reflect.Type + switch os := t.Type.Kind(); os { + case reflect.Struct: + nextValueType = t.Type + + case reflect.Slice: + nextValueType = t.Type.Elem() + + case reflect.Pointer: + switch pointerType := t.Type.Elem().Kind(); pointerType { + case reflect.Struct: + nextValueType = t.Type.Elem() + case reflect.Slice: + nextValueType = t.Type.Elem().Elem() + default: + nextValueType = t.Type.Elem() + } + default: + nextValueType = t.Type + } + return getTagReflective(nextValueType, splitName, name) + } else { + + if name == "" { + return getJsonTag(t, arrayIndex, isArr) + } else { + return name + "." + getJsonTag(t, arrayIndex, isArr) + } + } + } + return "" +} + +func getJsonTag(field reflect.StructField, arrayIndex string, isArr bool) string { + jsonTag := field.Tag.Get("json") + fieldName := "" + switch jsonTag { + case "-", "": + fieldName = field.Name + default: + parts := strings.Split(jsonTag, ",") + fieldName = parts[0] + if fieldName == "" { + fieldName = field.Name + } + } + if isArr { + return fmt.Sprintf("%s[%s]", fieldName, arrayIndex) + } else { + return fieldName + } +} diff --git a/qubership-apihub-service/view/ActivityTracking.go b/qubership-apihub-service/view/ActivityTracking.go new file mode 100644 index 0000000..4f52e9b --- /dev/null +++ b/qubership-apihub-service/view/ActivityTracking.go @@ -0,0 +1,113 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +import "time" + +type ActivityTrackingEvent struct { + Type ATEventType `json:"eventType,omitempty"` + Data map[string]interface{} `json:"params,omitempty"` + PackageId string `json:"packageId,omitempty"` + Date time.Time `json:"date"` + UserId string `json:"userId,omitempty"` +} + +type PkgActivityResponseItem_depracated struct { + PackageName string `json:"packageName"` + PackageKind string `json:"kind"` + UserName string `json:"userName"` + ActivityTrackingEvent +} +type PkgActivityResponse_deprecated struct { + Events []PkgActivityResponseItem_depracated `json:"events"` +} + +type PkgActivityResponseItem struct { + PackageName string `json:"packageName"` + PackageKind string `json:"kind"` + Principal map[string]interface{} `json:"principal,omitempty"` + ActivityTrackingEvent +} +type PkgActivityResponse struct { + Events []PkgActivityResponseItem `json:"events"` +} + +type ActivityHistoryReq struct { + OnlyFavorite bool + TextFilter string + Types []string + Limit int + Page int + OnlyShared bool + Kind []string +} + +type ATEventType string + +// access control + +const ATETGrantRole ATEventType = "grant_role" +const ATETUpdateRole ATEventType = "update_role" +const ATETDeleteRole ATEventType = "delete_role" + +// Apihub API keys + +const ATETGenerateApiKey ATEventType = "generate_api_key" +const ATETRevokeApiKey ATEventType = "revoke_api_key" + +// package actions + +const ATETPatchPackageMeta ATEventType = "patch_package_meta" +const ATETCreatePackage ATEventType = "create_package" +const ATETDeletePackage ATEventType = "delete_package" + +// publish/versioning + +const ATETPublishNewVersion ATEventType = "publish_new_version" +const ATETPublishNewRevision ATEventType = "publish_new_revision" +const ATETPatchVersionMeta ATEventType = "patch_version_meta" +const ATETDeleteVersion ATEventType = "delete_version" + +// manual groups + +const ATETCreateManualGroup ATEventType = "create_manual_group" +const ATETDeleteManualGroup ATEventType = "delete_manual_group" +const ATETOperationsGroupParameters ATEventType = "update_operations_group_parameters" + +func ConvertEventTypes(input []string) []string { + var output []string + for _, iType := range input { + switch iType { + case "package_members": + output = append(output, string(ATETGrantRole), string(ATETUpdateRole), string(ATETDeleteRole)) + case "package_security": + output = append(output, string(ATETGenerateApiKey), string(ATETRevokeApiKey)) + case "new_version": + output = append(output, string(ATETPublishNewVersion)) + case "package_version": + output = append(output, string(ATETPublishNewRevision), string(ATETPatchVersionMeta), string(ATETDeleteVersion)) + case "package_management": + output = append(output, string(ATETPatchPackageMeta), string(ATETCreatePackage), string(ATETDeletePackage)) + case "operations_group": + output = append(output, string(ATETCreateManualGroup), string(ATETDeleteManualGroup), string(ATETOperationsGroupParameters)) + } + } + return output +} + +type EventRoleView struct { + RoleId string `json:"roleId"` + Role string `json:"role"` +} diff --git a/qubership-apihub-service/view/Admin.go b/qubership-apihub-service/view/Admin.go new file mode 100644 index 0000000..5f91636 --- /dev/null +++ b/qubership-apihub-service/view/Admin.go @@ -0,0 +1,23 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type Admins struct { + Admins []User `json:"admins"` +} + +type AddSysadmReq struct { + UserId string `json:"userId" validate:"required"` +} diff --git a/qubership-apihub-service/view/Agent.go b/qubership-apihub-service/view/Agent.go new file mode 100644 index 0000000..ad1a11a --- /dev/null +++ b/qubership-apihub-service/view/Agent.go @@ -0,0 +1,70 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +import ( + "strings" + "time" +) + +type AgentKeepaliveMessage struct { + Cloud string `json:"cloud" validate:"required"` + Namespace string `json:"namespace" validate:"required"` + Url string `json:"url" validate:"required"` + BackendVersion string `json:"backendVersion" validate:"required"` + Name string `json:"name"` + AgentVersion string `json:"agentVersion"` +} + +type AgentStatus string + +const AgentStatusActive AgentStatus = "active" +const AgentStatusInactive AgentStatus = "inactive" + +type AgentInstance struct { + AgentId string `json:"agentId"` + AgentDeploymentCloud string `json:"agentDeploymentCloud"` + AgentDeploymentNamespace string `json:"agentDeploymentNamespace"` + AgentUrl string `json:"agentUrl"` + LastActive time.Time `json:"lastActive"` + Status AgentStatus `json:"status"` + BackendVersion string `json:"backendVersion"` + Name string `json:"name"` + AgentVersion string `json:"agentVersion"` + CompatibilityError *AgentCompatibilityError `json:"compatibilityError,omitempty"` +} + +func MakeAgentId(cloud, namespace string) string { + return strings.ToLower(cloud) + "_" + strings.ToLower(namespace) +} + +type AgentNamespaces struct { + Namespaces []string `json:"namespaces"` + CloudName string `json:"cloudName"` +} + +type AgentVersion struct { + Version string `json:"version"` +} + +type AgentCompatibilityError struct { + Severity AgentCompatibilityErrorSeverity `json:"severity"` + Message string `json:"message"` +} + +type AgentCompatibilityErrorSeverity string + +const SeverityError AgentCompatibilityErrorSeverity = "error" +const SeverityWarning AgentCompatibilityErrorSeverity = "warning" diff --git a/qubership-apihub-service/view/ApiConfig.go b/qubership-apihub-service/view/ApiConfig.go new file mode 100644 index 0000000..15dca37 --- /dev/null +++ b/qubership-apihub-service/view/ApiConfig.go @@ -0,0 +1,25 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type ApiConfig struct { + ConfigUrl string `json:"configUrl"` + Urls []Url `json:"urls"` +} + +type Url struct { + Url string `json:"url"` + Name string `json:"name"` +} diff --git a/qubership-apihub-service/view/ApiKey.go b/qubership-apihub-service/view/ApiKey.go new file mode 100644 index 0000000..afde728 --- /dev/null +++ b/qubership-apihub-service/view/ApiKey.go @@ -0,0 +1,28 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type ApiKeyStatus struct { + Status string `json:"status"` +} + +type ApiKeyRequest struct { + ApiKey string `json:"apikey"` +} + +type ApiKey struct { + Id string `json:"id"` + Name string `json:"name"` +} diff --git a/qubership-apihub-service/view/ApihubApiKey.go b/qubership-apihub-service/view/ApihubApiKey.go new file mode 100644 index 0000000..e806e9e --- /dev/null +++ b/qubership-apihub-service/view/ApihubApiKey.go @@ -0,0 +1,87 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +import ( + "time" +) + +type ApihubApiKey_deprecated struct { + Id string `json:"id"` + PackageId string `json:"packageId"` + Name string `json:"name"` + CreatedBy string `json:"createdBy"` + CreatedAt time.Time `json:"createdAt"` + DeletedBy string `json:"deletedBy,omitempty"` + DeletedAt *time.Time `json:"deletedAt,omitempty"` + ApiKey string `json:"apiKey,omitempty"` + Roles []string `json:"roles"` +} + +type ApihubApiKeys_deprecated struct { + ApiKeys []ApihubApiKey_deprecated `json:"apiKeys"` +} + +type ApihubApiKey_v3_deprecated struct { + Id string `json:"id"` + PackageId string `json:"packageId"` + Name string `json:"name"` + CreatedBy User `json:"createdBy"` + CreatedAt time.Time `json:"createdAt"` + DeletedBy string `json:"deletedBy,omitempty"` + DeletedAt *time.Time `json:"deletedAt,omitempty"` + ApiKey string `json:"apiKey,omitempty"` + Roles []string `json:"roles"` +} + +type ApihubApiKeys_v3_deprecated struct { + ApiKeys []ApihubApiKey_v3_deprecated `json:"apiKeys"` +} + +type ApihubApiKey struct { + Id string `json:"id"` + PackageId string `json:"packageId"` + Name string `json:"name"` + CreatedBy User `json:"createdBy"` + CreatedFor *User `json:"createdFor,omitempty"` + CreatedAt time.Time `json:"createdAt"` + DeletedBy string `json:"deletedBy,omitempty"` + DeletedAt *time.Time `json:"deletedAt,omitempty"` + ApiKey string `json:"apiKey,omitempty"` + Roles []string `json:"roles"` +} + +type ApihubApiKeys struct { + ApiKeys []ApihubApiKey `json:"apiKeys"` +} + +type ApihubApiKeyCreateReq_deprecated struct { + Name string `json:"name" validate:"required"` + Roles []string `json:"roles"` +} + +type ApihubApiKeyCreateReq struct { + Name string `json:"name" validate:"required"` + CreatedFor string `json:"createdFor"` + Roles []string `json:"roles"` +} + +type ApihubApiKeyExtAuthView struct { + Id string `json:"id"` + PackageId string `json:"packageId"` + Name string `json:"name"` + Revoked bool `json:"revoked"` + Roles []string `json:"roles"` +} diff --git a/qubership-apihub-service/view/Audience.go b/qubership-apihub-service/view/Audience.go new file mode 100644 index 0000000..93adde2 --- /dev/null +++ b/qubership-apihub-service/view/Audience.go @@ -0,0 +1,27 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +const ApiAudienceInternal = "internal" +const ApiAudienceExternal = "external" +const ApiAudienceUnknown = "unknown" + +func ValidApiAudience(apiAudience string) bool { + switch apiAudience { + case ApiAudienceInternal, ApiAudienceExternal, ApiAudienceUnknown: + return true + } + return false +} diff --git a/qubership-apihub-service/view/Branch.go b/qubership-apihub-service/view/Branch.go new file mode 100644 index 0000000..178143c --- /dev/null +++ b/qubership-apihub-service/view/Branch.go @@ -0,0 +1,89 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type Branch struct { + ProjectId string `json:"projectId" msgpack:"projectId"` + Editors []User `json:"editors" msgpack:"editors"` + ConfigFileId string `json:"configFileId,omitempty" msgpack:"configFileId,omitempty"` + ChangeType ChangeType `json:"changeType,omitempty" msgpack:"changeType,omitempty"` + Permissions *[]string `json:"permissions,omitempty" msgpack:"permissions,omitempty"` //show only when permissions are calculated + Files []Content `json:"files" validate:"dive,required" msgpack:"files"` + Refs []Ref `json:"refs" msgpack:"refs"` +} + +type BranchGitConfigView struct { + ProjectId string `json:"projectId"` + Files []ContentGitConfigView `json:"files"` + Refs []RefGitConfigView `json:"refs"` +} + +type Branches struct { + Branches []Branch `json:"branches"` +} + +func (b *Branch) RemoveFolders() { + onlyFiles := make([]Content, 0) + for _, content := range b.Files { + if !content.IsFolder { + onlyFiles = append(onlyFiles, content) + } + } + b.Files = onlyFiles +} + +func TransformBranchToGitView(branch Branch) *BranchGitConfigView { + resFiles := make([]ContentGitConfigView, 0) + resRefs := make([]RefGitConfigView, 0) + + for _, f := range branch.Files { + if f.FromFolder { + continue + } + if f.IsFolder { + f.Publish = false + f.Labels = []string{} + } + resFiles = append(resFiles, TransformContentToGitView(f)) + } + + for _, r := range branch.Refs { + resRefs = append(resRefs, TransformRefToGitView(r)) + } + + return &BranchGitConfigView{ + ProjectId: branch.ProjectId, + Files: resFiles, + Refs: resRefs, + } +} + +func TransformGitToBranchView(branch *BranchGitConfigView, refs []Ref) *Branch { + + resContent := make([]Content, 0) + for _, file := range branch.Files { + resContent = append(resContent, TransformGitViewToContent(file)) + } + + if refs == nil { + refs = make([]Ref, 0) + } + + return &Branch{ + ProjectId: branch.ProjectId, + Files: resContent, + Refs: refs, + } +} diff --git a/qubership-apihub-service/view/BranchChanges.go b/qubership-apihub-service/view/BranchChanges.go new file mode 100644 index 0000000..2a83a42 --- /dev/null +++ b/qubership-apihub-service/view/BranchChanges.go @@ -0,0 +1,22 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type ChangeType string + +const CTAdded ChangeType = "added" +const CTUpdated ChangeType = "updated" +const CTDeleted ChangeType = "deleted" +const CTUnchanged ChangeType = "none" diff --git a/qubership-apihub-service/view/BranchConflicts.go b/qubership-apihub-service/view/BranchConflicts.go new file mode 100644 index 0000000..a5ffe51 --- /dev/null +++ b/qubership-apihub-service/view/BranchConflicts.go @@ -0,0 +1,25 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type BranchConflicts struct { + Files []string `json:"files"` +} + +type FileConflict struct { + FileId string + ConflictedBlobId string + ConflictedFileId *string +} diff --git a/qubership-apihub-service/view/Build.go b/qubership-apihub-service/view/Build.go new file mode 100644 index 0000000..00273a9 --- /dev/null +++ b/qubership-apihub-service/view/Build.go @@ -0,0 +1,240 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +import ( + "encoding/json" + "fmt" + "net/http" + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" +) + +type BuildConfig struct { + PackageId string `json:"packageId"` + Version string `json:"version"` + BuildType string `json:"buildType"` + PreviousVersion string `json:"previousVersion"` + PreviousVersionPackageId string `json:"previousVersionPackageId"` + Status string `json:"status"` + Refs []BCRef `json:"refs,omitempty"` + Files []BCFile `json:"files,omitempty"` + PublishId string `json:"publishId"` + Metadata BuildConfigMetadata `json:"metadata,omitempty"` + CreatedBy string `json:"createdBy"` + NoChangelog bool `json:"noChangeLog,omitempty"` // for migration + PublishedAt time.Time `json:"publishedAt,omitempty"` // for migration + MigrationBuild bool `json:"migrationBuild,omitempty"` //for migration + MigrationId string `json:"migrationId,omitempty"` //for migration + ComparisonRevision int `json:"comparisonRevision,omitempty"` + ComparisonPrevRevision int `json:"comparisonPrevRevision,omitempty"` + UnresolvedRefs bool `json:"unresolvedRefs,omitempty"` + ResolveRefs bool `json:"resolveRefs,omitempty"` + ResolveConflicts bool `json:"resolveConflicts,omitempty"` + ServiceName string `json:"serviceName,omitempty"` + ApiType string `json:"apiType,omitempty"` //for operation group + GroupName string `json:"groupName,omitempty"` //for operation group + Format string `json:"format,omitempty"` //for operation group + ExternalMetadata map[string]interface{} `json:"externalMetadata,omitempty"` +} + +type BuildConfigMetadata struct { + BranchName string `json:"branchName,omitempty"` + RepositoryUrl string `json:"repositoryUrl,omitempty"` + CloudName string `json:"cloudName,omitempty"` + CloudUrl string `json:"cloudUrl,omitempty"` + Namespace string `json:"namespace,omitempty"` + VersionLabels []string `json:"versionLabels,omitempty"` +} + +type BCRef struct { + RefId string `json:"refId"` + Version string `json:"version"` //format: version@revision + ParentRefId string `json:"parentRefId"` + ParentVersion string `json:"parentVersion"` //format: version@revision + Excluded bool `json:"excluded,omitempty"` +} + +type BCFile struct { + FileId string `json:"fileId"` + Slug string `json:"slug"` //for migration + Index int `json:"index"` //for migration + Publish *bool `json:"publish"` + Labels []string `json:"labels"` + BlobId string `json:"blobId,omitempty"` + XApiKind string `json:"xApiKind,omitempty"` +} + +type BuildStatusEnum string + +const StatusNotStarted BuildStatusEnum = "none" +const StatusRunning BuildStatusEnum = "running" +const StatusComplete BuildStatusEnum = "complete" +const StatusError BuildStatusEnum = "error" + +// todo string -> BuildType type +const ChangelogType string = "changelog" +const BuildType string = "build" +const DocumentGroupType_deprecated string = "documentGroup" +const ReducedSourceSpecificationsType string = "reducedSourceSpecifications" +const MergedSpecificationType string = "mergedSpecification" + +func ValidateGroupBuildType(buildType string) error { + switch buildType { + case ReducedSourceSpecificationsType, MergedSpecificationType: + return nil + } + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameterValue, + Message: exception.InvalidParameterValueMsg, + Params: map[string]interface{}{"param": "buildType", "value": buildType}, + } +} + +func BuildStatusFromString(str string) (BuildStatusEnum, error) { + switch str { + case "none": + return StatusNotStarted, nil + case "running": + return StatusRunning, nil + case "complete": + return StatusComplete, nil + case "error": + return StatusError, nil + } + return StatusNotStarted, fmt.Errorf("unknown build status: %s", str) +} + +func BuildConfigToMap(bc BuildConfig) (*map[string]interface{}, error) { + var confAsMap map[string]interface{} + cBytes, err := json.Marshal(bc) + if err != nil { + return nil, err + } + err = json.Unmarshal(cBytes, &confAsMap) + if err != nil { + return nil, err + } + return &confAsMap, nil +} + +func BuildConfigFromMap(confAsMap map[string]interface{}, publishId string) (*BuildConfig, error) { + var bc BuildConfig + cBytes, err := json.Marshal(confAsMap) + if err != nil { + return nil, err + } + err = json.Unmarshal(cBytes, &bc) + if err != nil { + return nil, err + } + bc.PublishId = publishId + return &bc, nil +} + +func BcRefsToRefs(refs []BCRef) []Ref { + var result []Ref + for _, ref := range refs { + result = append(result, BCRefToRef(ref)) + } + return result +} + +func BCRefToRef(r BCRef) Ref { + return Ref{ + RefPackageId: r.RefId, + RefPackageName: "", + RefPackageVersion: r.Version, + Status: "", + VersionStatus: "", + Kind: "", + } +} + +type PublishStatusResponse struct { + PublishId string `json:"publishId"` + Status string `json:"status"` + Message string `json:"message"` +} + +func IsBuildFinished(status BuildStatusEnum) bool { + if status == StatusComplete || status == StatusError { + return true + } + return false +} + +type BuildsStatusRequest struct { + PublishIds []string `json:"publishIds"` +} + +type ChangelogBuildSearchRequest struct { + PackageId string `json:"packageId"` + Version string `json:"version"` + PreviousVersionPackageId string `json:"previousVersionPackageId"` + PreviousVersion string `json:"previousVersion"` + BuildType string `json:"buildType"` + ComparisonRevision int `json:"comparisonRevision"` + ComparisonPrevRevision int `json:"comparisonPrevRevision"` +} + +type DocumentGroupBuildSearchRequest struct { + PackageId string `json:"packageId"` + Version string `json:"version"` + BuildType string `json:"buildType"` + Format string `json:"format"` + ApiType string `json:"apiType"` + GroupName string `json:"groupName"` +} + +type BuildView struct { + BuildId string `json:"buildId,omitempty"` + Status string `json:"status"` + Details string `json:"details"` + PackageId string `json:"packageId"` + Version string `json:"version"` + CreatedAt time.Time `json:"createdAt"` + LastActive time.Time `json:"lastActive"` + CreatedBy string `json:"createdBy,omitempty"` + RestartCount int `json:"restart_count"` +} + +type PublishedVersionSourceDataConfig struct { + Sources []byte `json:"sources"` + Config BuildConfig `json:"config"` +} + +type ChangelogBuildConfigView struct { + PackageId string `json:"packageId"` + Version string `json:"version"` + BuildType string `json:"buildType"` + PreviousVersion string `json:"previousVersion"` + PreviousVersionPackageId string `json:"previousVersionPackageId"` + CreatedBy string `json:"createdBy"` + BuildId string `json:"buildId"` +} + +type DocumentTransformConfigView struct { + PackageId string `json:"packageId"` + Version string `json:"version"` + BuildType string `json:"buildType"` + Format string `json:"format,omitempty"` + ApiType string `json:"apiType"` + GroupName string `json:"groupName"` + CreatedBy string `json:"createdBy"` + BuildId string `json:"buildId"` +} diff --git a/qubership-apihub-service/view/BusinessMetric.go b/qubership-apihub-service/view/BusinessMetric.go new file mode 100644 index 0000000..a8c49ac --- /dev/null +++ b/qubership-apihub-service/view/BusinessMetric.go @@ -0,0 +1,23 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type BusinessMetric struct { + Date string `json:"date"` + PackageId string `json:"packageId"` + Metric string `json:"metric"` + Username string `json:"username"` + Value int `json:"value"` +} diff --git a/qubership-apihub-service/view/CommonOperation.go b/qubership-apihub-service/view/CommonOperation.go new file mode 100644 index 0000000..fad6bc3 --- /dev/null +++ b/qubership-apihub-service/view/CommonOperation.go @@ -0,0 +1,549 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +import ( + "fmt" + + "github.com/iancoleman/orderedmap" +) + +type Operation struct { + OperationId string `json:"operationId" validate:"required"` + Title string `json:"title" validate:"required"` + ApiType string `json:"apiType" validate:"required"` + DataHash string `json:"dataHash" validate:"required"` + Deprecated bool `json:"deprecated"` + ApiKind string `json:"apiKind" validate:"required"` + Metadata map[string]interface{} `json:"metadata" validate:"required"` + SearchScopes map[string]interface{} `json:"searchScopes" validate:"required"` + PreviousReleaseVersions []string `json:"deprecatedInPreviousVersions"` + DeprecatedInfo string `json:"deprecatedInfo"` + DeprecatedItems []DeprecatedItem `json:"deprecatedItems"` + Tags []string `json:"tags"` + Models map[string]string `json:"models"` + ApiAudience string `json:"apiAudience" validate:"required"` +} + +type DocumentsOperation_deprecated struct { + OperationId string `json:"operationId" validate:"required"` + Title string `json:"title" validate:"required"` + ApiType string `json:"apiType" validate:"required"` + DataHash string `json:"dataHash" validate:"required"` + Deprecated bool `json:"deprecated"` + ApiKind string `json:"apiKind" validate:"required"` + Metadata interface{} `json:"metadata" validate:"required"` +} + +type SingleOperationView struct { + Data *orderedmap.OrderedMap `json:"data,omitempty"` + OperationId string `json:"operationId"` + Title string `json:"title"` + DataHash string `json:"dataHash"` + Deprecated bool `json:"deprecated,omitempty"` + ApiKind string `json:"apiKind"` + ApiType string `json:"apiType"` + CustomTags map[string]interface{} `json:"customTags,omitempty"` + ApiAudience string `json:"apiAudience"` +} + +type CommonOperationView struct { + OperationId string `json:"operationId"` + Title string `json:"title"` + DataHash string `json:"dataHash"` + Deprecated bool `json:"deprecated,omitempty"` + ApiKind string `json:"apiKind"` + ApiType string `json:"apiType"` + CustomTags map[string]interface{} `json:"customTags,omitempty"` + ApiAudience string `json:"apiAudience"` +} + +type OperationListView struct { + CommonOperationView + PackageRef string `json:"packageRef,omitempty"` + Data *orderedmap.OrderedMap `json:"data,omitempty"` +} + +type DeprecatedOperationView struct { + PackageRef string `json:"packageRef,omitempty"` + OperationId string `json:"operationId"` + Title string `json:"title"` + DataHash string `json:"dataHash"` + Deprecated bool `json:"deprecated,omitempty"` + ApiKind string `json:"apiKind"` + ApiType string `json:"apiType"` + PreviousReleaseVersions []string `json:"deprecatedInPreviousVersions,omitempty"` + DeprecatedCount int `json:"deprecatedCount"` + DeprecatedInfo string `json:"deprecatedInfo,omitempty"` + DeprecatedItems []DeprecatedItem `json:"deprecatedItems,omitempty"` + ApiAudience string `json:"apiAudience"` +} +type DeprecatedItem struct { + PreviousReleaseVersions []string `json:"deprecatedInPreviousVersions,omitempty"` + DeclarationJsonPaths [][]interface{} `json:"declarationJsonPaths,omitempty"` + Description string `json:"description,omitempty"` + Hash string `json:"hash,omitempty"` + TolerantHash string `json:"tolerantHash,omitempty"` + DeprecatedInfo string `json:"deprecatedInfo,omitempty"` +} + +type DeprecatedItems struct { + DeprecatedItems []DeprecatedItem `json:"deprecatedItems"` +} + +type OperationComparison struct { + OperationId string `json:"operationId" validate:"required"` + DataHash string `json:"dataHash,omitempty"` + PreviousDataHash string `json:"previousDataHash,omitempty"` + ChangeSummary ChangeSummary `json:"changeSummary,omitempty"` + Changes []interface{} `json:"changes" validate:"required,dive,required"` + JsonPath []string `json:"jsonPath,omitempty"` + Action string `json:"action,omitempty"` + Severity string `json:"severity,omitempty"` + Metadata map[string]interface{} `json:"metadata"` +} + +type SingleOperationChangeAdd struct { + SingleOperationChangeCommon + CurrentDeclarationJsonPaths [][]interface{} `json:"currentDeclarationJsonPaths,omitempty"` + CurrentValueHash string `json:"currentValueHash,omitempty"` +} + +type SingleOperationChangeRemove struct { + SingleOperationChangeCommon + PreviousDeclarationJsonPaths [][]interface{} `json:"previousDeclarationJsonPaths,omitempty"` + PreviousValueHash string `json:"previousValueHash,omitempty"` +} + +type SingleOperationChangeReplace struct { + SingleOperationChangeCommon + CurrentDeclarationJsonPaths [][]interface{} `json:"currentDeclarationJsonPaths,omitempty"` + CurrentValueHash string `json:"currentValueHash,omitempty"` + PreviousDeclarationJsonPaths [][]interface{} `json:"previousDeclarationJsonPaths,omitempty"` + PreviousValueHash string `json:"previousValueHash,omitempty"` +} + +type SingleOperationChangeRename struct { + SingleOperationChangeCommon + CurrentDeclarationJsonPaths [][]interface{} `json:"currentDeclarationJsonPaths,omitempty"` + CurrentKey string `json:"currentKey,omitempty"` + PreviousDeclarationJsonPaths [][]interface{} `json:"previousDeclarationJsonPaths,omitempty"` + PreviousKey string `json:"previousKey,omitempty"` +} + +type SingleOperationChangeCommon struct { + Action string `json:"action,omitempty"` + Severity string `json:"severity,omitempty"` + Description string `json:"description,omitempty"` + Scope string `json:"scope,omitempty"` +} + +func GetSingleOperationChangeCommon(change interface{}) SingleOperationChangeCommon { + if change != nil { + if val, ok := change.(SingleOperationChangeAdd); ok { + return val.SingleOperationChangeCommon + } + if val, ok := change.(SingleOperationChangeRemove); ok { + return val.SingleOperationChangeCommon + } + if val, ok := change.(SingleOperationChangeReplace); ok { + return val.SingleOperationChangeCommon + } + if val, ok := change.(SingleOperationChangeRename); ok { + return val.SingleOperationChangeCommon + } + if val, ok := change.(SingleOperationChangeCommon); ok { + return val + } + } + return SingleOperationChangeCommon{} +} + +func ParseSingleOperationChange(change interface{}) interface{} { + if change == nil { + return SingleOperationChangeCommon{} + } + var currentDeclarationJsonPaths [][]interface{} + var previousDeclarationJsonPaths [][]interface{} + var currentValueHash string + var previousValueHash string + var currentKey string + var previousKey string + singleOperationChangeCommon := SingleOperationChangeCommon{} + if change, ok := change.(map[string]interface{}); ok { + if val, ok := change["description"].(string); ok { + singleOperationChangeCommon.Description = val + } + if val, ok := change["severity"].(string); ok { + singleOperationChangeCommon.Severity = val + } + if val, ok := change["scope"].(string); ok { + singleOperationChangeCommon.Scope = val + } + if singleOperationChangeCommon.Action, ok = change["action"].(string); !ok { + return singleOperationChangeCommon + } + if currentJsonPathArr, ok := change["currentDeclarationJsonPaths"].([]interface{}); ok { + for _, currentJsonPath := range currentJsonPathArr { + objPathArr := make([]interface{}, 0) + if pathArr, ok := currentJsonPath.([]interface{}); ok { + objPathArr = append(objPathArr, pathArr...) + } + if len(objPathArr) > 0 { + currentDeclarationJsonPaths = append(currentDeclarationJsonPaths, objPathArr) + } + } + } + if previousJsonPathArr, ok := change["previousDeclarationJsonPaths"].([]interface{}); ok { + for _, previousJsonPath := range previousJsonPathArr { + objPathArr := make([]interface{}, 0) + if pathArr, ok := previousJsonPath.([]interface{}); ok { + objPathArr = append(objPathArr, pathArr...) + } + if len(objPathArr) > 0 { + previousDeclarationJsonPaths = append(previousDeclarationJsonPaths, objPathArr) + } + } + } + if val, ok := change["currentValueHash"].(string); ok { + currentValueHash = val + } + if val, ok := change["previousValueHash"].(string); ok { + previousValueHash = val + } + if val, ok := change["currentKey"].(string); ok { + currentKey = val + } + if val, ok := change["previousKey"].(string); ok { + previousKey = val + } + + switch singleOperationChangeCommon.Action { + case "add": + return SingleOperationChangeAdd{ + SingleOperationChangeCommon: singleOperationChangeCommon, + CurrentDeclarationJsonPaths: currentDeclarationJsonPaths, + CurrentValueHash: currentValueHash, + } + case "remove": + return SingleOperationChangeRemove{ + SingleOperationChangeCommon: singleOperationChangeCommon, + PreviousDeclarationJsonPaths: previousDeclarationJsonPaths, + PreviousValueHash: previousValueHash, + } + case "replace": + return SingleOperationChangeReplace{ + SingleOperationChangeCommon: singleOperationChangeCommon, + CurrentDeclarationJsonPaths: currentDeclarationJsonPaths, + CurrentValueHash: currentValueHash, + PreviousDeclarationJsonPaths: previousDeclarationJsonPaths, + PreviousValueHash: previousValueHash, + } + case "rename": + return SingleOperationChangeRename{ + SingleOperationChangeCommon: singleOperationChangeCommon, + CurrentDeclarationJsonPaths: currentDeclarationJsonPaths, + CurrentKey: currentKey, + PreviousDeclarationJsonPaths: previousDeclarationJsonPaths, + PreviousKey: previousKey, + } + } + } + return singleOperationChangeCommon +} + +type VersionChangesView struct { + PreviousVersion string `json:"previousVersion"` + PreviousVersionPackageId string `json:"previousVersionPackageId"` + Operations []interface{} `json:"operations"` + Packages map[string]PackageVersionRef `json:"packages,omitempty"` +} + +type OperationComparisonChangelogView_deprecated struct { + OperationId string `json:"operationId"` + Title string `json:"title"` + ApiKind string `json:"apiKind,omitempty"` + DataHash string `json:"dataHash,omitempty"` + PreviousDataHash string `json:"previousDataHash,omitempty"` + ChangeSummary ChangeSummary `json:"changeSummary"` + PackageRef string `json:"packageRef"` + PreviousVersionPackageRef string `json:"previousVersionPackageRef"` +} + +type ComparisonOperationView struct { + Title string `json:"title"` + ApiKind string `json:"apiKind,omitempty"` + ApiAudience string `json:"apiAudience"` + DataHash string `json:"dataHash,omitempty"` + PackageRef string `json:"packageRef"` +} + +type OperationComparisonChangelogView struct { + OperationId string `json:"operationId"` + CurrentOperation *ComparisonOperationView `json:"currentOperation,omitempty"` + PreviousOperation *ComparisonOperationView `json:"previousOperation,omitempty"` + ChangeSummary ChangeSummary `json:"changeSummary"` +} + +type OperationComparisonChangesView struct { + OperationId string `json:"operationId"` + Title string `json:"title"` + ApiKind string `json:"apiKind,omitempty"` + DataHash string `json:"dataHash,omitempty"` + PreviousDataHash string `json:"previousDataHash,omitempty"` + ChangeSummary ChangeSummary `json:"changeSummary"` + PackageRef string `json:"packageRef"` + PreviousVersionPackageRef string `json:"previousVersionPackageRef"` + Changes []interface{} `json:"changes"` + Action string `json:"action"` +} + +type OperationChangesView struct { + Changes []interface{} `json:"changes"` +} + +type OperationTags struct { + Tags []string `json:"tags"` +} + +type Operations struct { + Operations []interface{} `json:"operations"` + Packages map[string]PackageVersionRef `json:"packages,omitempty"` +} + +type GroupedOperations struct { + Operations []interface{} `json:"operations"` + Packages map[string]PackageVersionRef `json:"packages,omitempty"` +} + +type GroupedGhostOperations_deprecated struct { + GhostOperations []interface{} `json:"ghostOperations"` + Packages map[string]PackageVersionRef `json:"packages,omitempty"` +} + +type ChangeSummary struct { + Breaking int `json:"breaking"` + SemiBreaking int `json:"semi-breaking"` + Deprecated int `json:"deprecated"` + NonBreaking int `json:"non-breaking"` + Annotation int `json:"annotation"` + Unclassified int `json:"unclassified"` +} + +func (c ChangeSummary) GetTotalSummary() int { + return c.Breaking + c.SemiBreaking + c.Deprecated + c.NonBreaking + c.Annotation + c.Unclassified +} + +const ChangelogActionChange string = "change" +const ChangelogActionAdd string = "add" +const ChangelogActionRemove string = "remove" + +type ApiKind string + +const BwcApiKind ApiKind = "bwc" +const NoBwcApiKind ApiKind = "no-bwc" +const DebugApiKind ApiKind = "debug" +const ExperimentalApiKind ApiKind = "experimental" + +type Severity string + +const Annotation Severity = "annotation" +const Breaking Severity = "breaking" +const SemiBreaking Severity = "semi-breaking" +const Deprecated Severity = "deprecated" +const NonBreaking Severity = "non-breaking" +const Unclassified Severity = "unclassified" + +func ValidSeverity(s string) bool { + switch s { + case string(Annotation), string(Breaking), string(SemiBreaking), string(Deprecated), string(NonBreaking), string(Unclassified): + return true + } + return false +} + +func ParseApiKind(s string) (ApiKind, error) { + switch s { + case string(BwcApiKind): + return BwcApiKind, nil + case string(NoBwcApiKind): + return NoBwcApiKind, nil + case string(DebugApiKind): + return DebugApiKind, nil + case string(ExperimentalApiKind): + return ExperimentalApiKind, nil + default: + return "", fmt.Errorf("unknown API Kind: %v", s) + } +} + +type ApiType string + +const RestApiType ApiType = "rest" +const GraphqlApiType ApiType = "graphql" +const ProtobufApiType ApiType = "protobuf" + +func ParseApiType(s string) (ApiType, error) { + switch s { + case string(RestApiType): + return RestApiType, nil + case string(GraphqlApiType): + return GraphqlApiType, nil + case string(ProtobufApiType): + return ProtobufApiType, nil + default: + return "", fmt.Errorf("unknown API Type: %v", s) + } +} + +func GetDocumentTypesForApiType(apiType string) []string { + switch apiType { + case string(RestApiType): + return []string{OpenAPI20Type, OpenAPI30Type, OpenAPI31Type} + case string(GraphqlApiType): + return []string{GraphQLSchemaType, GraphAPIType, IntrospectionType} + case string(ProtobufApiType): + return []string{Protobuf3Type} + default: + return []string{} + } +} + +type OperationListReq struct { + Deprecated *bool + HashList []string + Ids []string + IncludeData bool + Kind string + EmptyTag bool + Tag string + Limit int + Page int + TextFilter string + ApiType string + DocumentSlug string + EmptyGroup bool + Group string + OnlyAddable bool + RefPackageId string + CustomTagKey string + CustomTagValue string + ApiAudience string +} + +type DeprecatedOperationListReq struct { + Ids []string + Kind string + Tags []string + Limit int + Page int + TextFilter string + ApiType string + DocumentSlug string + IncludeDeprecatedItems bool + RefPackageId string + EmptyTag bool + EmptyGroup bool + Group string + ApiAudience string +} + +type OperationBasicSearchReq struct { + PackageId string + Version string + OperationId string + Revision int + ApiType string + ApiKind string + Limit int + Offset int + TextFilter string + ApiAudience string +} + +type VersionChangesReq struct { + PreviousVersion string + PreviousVersionPackageId string + DocumentSlug string + ApiKind string + EmptyTag bool + RefPackageId string + Tags []string + TextFilter string + Limit int + Offset int + EmptyGroup bool + Group string + Severities []string + ApiAudience string +} + +type PagingFilterReq struct { + Limit int + Offset int + TextFilter string +} + +type DocumentsFilterReq struct { + ApiType string + Limit int + Offset int + TextFilter string +} + +type DocumentsForTransformationFilterReq struct { + ApiType string + Limit int + Offset int + FilterByOperationGroup string +} +type ChangelogCalculationParams struct { + PreviousVersion string `json:"previousVersion"` + PreviousVersionPackageId string `json:"previousVersionPackageId"` + ReCalculate bool `json:"reCalculate"` +} + +type CalculationProcessStatus struct { + Status string `json:"status,omitempty"` + Message string `json:"message"` +} + +type DeprecatedOperationsSummary struct { + OperationTypes *[]DeprecatedOperationType `json:"operationTypes,omitempty"` + Refs *[]DeprecatedOperationTypesRef `json:"refs,omitempty"` + Packages *map[string]PackageVersionRef `json:"packages,omitempty"` +} +type DeprecatedOperationType struct { + ApiType string `json:"apiType"` + DeprecatedCount int `json:"deprecatedCount"` + Tags []string `json:"tags"` +} +type DeprecatedOperationTypesRef struct { + PackageRef string `json:"packageRef,omitempty"` + OperationTypes []DeprecatedOperationType `json:"operationTypes"` +} + +type OperationModelUsages struct { + ModelUsages []OperationModels `json:"modelUsages"` +} + +type OperationModels struct { + OperationId string `json:"operationId"` + ModelNames []string `json:"modelNames"` +} + +type CustomTags struct { + CustomTag map[string]interface{} +} diff --git a/qubership-apihub-service/view/Comparison.go b/qubership-apihub-service/view/Comparison.go new file mode 100644 index 0000000..e4e3660 --- /dev/null +++ b/qubership-apihub-service/view/Comparison.go @@ -0,0 +1,29 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type VersionComparisonSummary struct { + OperationTypes *[]OperationType `json:"operationTypes,omitempty"` + Refs *[]RefComparison `json:"refs,omitempty"` + Packages *map[string]PackageVersionRef `json:"packages,omitempty"` + NoContent bool `json:"noContent,omitempty"` +} + +type RefComparison struct { + PackageRef string `json:"packageRef,omitempty"` + PreviousPackageRef string `json:"previousPackageRef,omitempty"` + OperationTypes []OperationType `json:"operationTypes"` + NoContent bool `json:"noContent,omitempty"` +} diff --git a/qubership-apihub-service/view/Content.go b/qubership-apihub-service/view/Content.go new file mode 100644 index 0000000..6135cd8 --- /dev/null +++ b/qubership-apihub-service/view/Content.go @@ -0,0 +1,99 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +import ( + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" +) + +type Content struct { + FileId string `json:"fileId" validate:"required" msgpack:"fileId"` + Name string `json:"name" msgpack:"name"` + Type ShortcutType `json:"type" msgpack:"type"` + Path string `json:"path" msgpack:"path"` + Publish bool `json:"publish" msgpack:"publish"` + Status FileStatus `json:"status" msgpack:"status"` + LastStatus FileStatus `json:"lastStatus,omitempty" msgpack:"lastStatus,omitempty"` + MovedFrom string `json:"movedFrom,omitempty" msgpack:"movedFrom,omitempty"` + BlobId string `json:"blobId,omitempty" msgpack:"blobId,omitempty"` + ConflictedBlobId string `json:"conflictedBlobId,omitempty" msgpack:"conflictedBlobId,omitempty"` + ConflictedFileId string `json:"conflictedFileId,omitempty" msgpack:"conflictedFileId,omitempty"` + Labels []string `json:"labels,omitempty" msgpack:"labels,omitempty"` + Title string `json:"title,omitempty" msgpack:"title,omitempty"` + ChangeType ChangeType `json:"changeType,omitempty" msgpack:"changeType,omitempty"` + Included bool `json:"-"` //true if file was imported from git + FromFolder bool `json:"-"` + IsFolder bool `json:"-"` +} + +type ContentGitConfigView struct { + FileId string `json:"fileId"` // git file path + Publish *bool `json:"publish,omitempty"` //pointer because absence of flag != false + Labels []string `json:"labels,omitempty"` +} + +func TransformContentToGitView(content Content) ContentGitConfigView { + return ContentGitConfigView{ + FileId: content.FileId, + Publish: &content.Publish, + Labels: content.Labels, + } +} + +func TransformGitViewToContent(content ContentGitConfigView) Content { + publish := true + if content.Publish != nil { + publish = *content.Publish + } + labels := make([]string, 0) + if content.Labels != nil { + labels = content.Labels + } + fileId := utils.NormalizeFileId(content.FileId) + filePath, fileName := utils.SplitFileId(fileId) + return Content{ + FileId: fileId, + Name: fileName, + Type: Unknown, + Path: filePath, + Publish: publish, + Status: StatusUnmodified, + Labels: labels, + } +} + +type ContentAddResponse struct { + FileIds []string `json:"fileIds"` +} + +func (c *Content) EqualsGitView(c2 *Content) bool { + return c.FileId == c2.FileId && c.Publish == c2.Publish && equalStringSets(c.Labels, c2.Labels) +} + +func equalStringSets(first []string, second []string) bool { + if len(first) != len(second) { + return false + } + exists := make(map[string]bool) + for _, value := range first { + exists[value] = true + } + for _, value := range second { + if !exists[value] { + return false + } + } + return true +} diff --git a/qubership-apihub-service/view/ContentChange.go b/qubership-apihub-service/view/ContentChange.go new file mode 100644 index 0000000..b052902 --- /dev/null +++ b/qubership-apihub-service/view/ContentChange.go @@ -0,0 +1,22 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type ContentChange struct { + Before *PublishedContentChange `json:"before"` + After *PublishedContentChange `json:"after"` + Action string `json:"action"` + Changes *ContentChanges `json:"changes,omitempty"` +} diff --git a/qubership-apihub-service/view/ContentChangeDetails.go b/qubership-apihub-service/view/ContentChangeDetails.go new file mode 100644 index 0000000..758edb2 --- /dev/null +++ b/qubership-apihub-service/view/ContentChangeDetails.go @@ -0,0 +1,22 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type ContentChanges struct { + Breaking *int `json:"breaking,omitempty"` + NonBreaking *int `json:"non-breaking,omitempty"` + Annotation *int `json:"annotation,omitempty"` + Unclassified *int `json:"unclassified,omitempty"` +} diff --git a/qubership-apihub-service/view/ContentData.go b/qubership-apihub-service/view/ContentData.go new file mode 100644 index 0000000..971e916 --- /dev/null +++ b/qubership-apihub-service/view/ContentData.go @@ -0,0 +1,22 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type ContentData struct { + FileId string + Data []byte + DataType string + BlobId string +} diff --git a/qubership-apihub-service/view/ContentMetaPatch.go b/qubership-apihub-service/view/ContentMetaPatch.go new file mode 100644 index 0000000..a10ece5 --- /dev/null +++ b/qubership-apihub-service/view/ContentMetaPatch.go @@ -0,0 +1,20 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type ContentMetaPatch struct { + Publish *bool `json:"publish,omitempty"` + Labels *[]string `json:"labels,omitempty"` +} diff --git a/qubership-apihub-service/view/DbCredentials.go b/qubership-apihub-service/view/DbCredentials.go new file mode 100644 index 0000000..a192efb --- /dev/null +++ b/qubership-apihub-service/view/DbCredentials.go @@ -0,0 +1,24 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type DbCredentials struct { + Host string + Port int + Database string + Username string + Password string + SSLMode string +} diff --git a/qubership-apihub-service/view/Document.go b/qubership-apihub-service/view/Document.go new file mode 100644 index 0000000..fc94036 --- /dev/null +++ b/qubership-apihub-service/view/Document.go @@ -0,0 +1,118 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +import ( + "net/http" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" +) + +type DocumentTransformationReq struct { + PackageId string `json:"packageId" validate:"required"` + Version string `json:"version" validate:"required"` + ApiType string `json:"apiType" validate:"required"` + GroupName string `json:"groupName" validate:"required"` +} + +type TransformedDocumentsFormat string + +const JsonDocumentFormat TransformedDocumentsFormat = "json" +const YamlDocumentFormat TransformedDocumentsFormat = "yaml" +const HtmlDocumentFormat TransformedDocumentsFormat = "html" + +func ValidTransformedDocumentsFormat_deprecated(format string) bool { + switch format { + case string(JsonDocumentFormat), string(HtmlDocumentFormat): + return true + } + return false +} + +func ValidateTransformedDocumentsFormat(format string) error { + switch format { + case string(JsonDocumentFormat), string(HtmlDocumentFormat), string(YamlDocumentFormat): + return nil + } + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.InvalidParameterValue, + Message: exception.InvalidParameterValueMsg, + Params: map[string]interface{}{"param": "format", "value": format}, + } +} + +func ValidateFormatForBuildType(buildType string, format string) error { + err := ValidateGroupBuildType(buildType) + if err != nil { + return err + } + err = ValidateTransformedDocumentsFormat(format) + if err != nil { + return err + } + if buildType == MergedSpecificationType && format == string(HtmlDocumentFormat) { + return &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.FormatNotSupportedForBuildType, + Message: exception.FormatNotSupportedForBuildTypeMsg, + Params: map[string]interface{}{"format": format, "buildType": buildType}, + } + } + return nil +} + +type DocumentExtension string + +const JsonExtension DocumentExtension = "json" + +const ( + JsonFormat string = "json" + YamlFormat string = "yaml" + MDFormat string = "md" + GraphQLFormat string = "graphql" + GQLFormat string = "gql" + ProtobufFormat string = "proto" + UnknownFormat string = "unknown" +) + +func InvalidDocumentFormat(s string) bool { + switch s { + case JsonFormat, YamlFormat, MDFormat, GraphQLFormat, GQLFormat, ProtobufFormat, UnknownFormat: + return false + } + return true +} + +const ( + OpenAPI31Type string = "openapi-3-1" + OpenAPI30Type string = "openapi-3-0" + OpenAPI20Type string = "openapi-2-0" + Protobuf3Type string = "protobuf-3" + JsonSchemaType string = "json-schema" + MDType string = "markdown" + GraphQLSchemaType string = "graphql-schema" + GraphAPIType string = "graphapi" + IntrospectionType string = "introspection" + UnknownType string = "unknown" +) + +func InvalidDocumentType(documentType string) bool { + switch documentType { + case OpenAPI31Type, OpenAPI30Type, OpenAPI20Type, Protobuf3Type, JsonSchemaType, MDType, GraphQLSchemaType, GraphAPIType, IntrospectionType, UnknownType: + return false + } + return true +} diff --git a/qubership-apihub-service/view/Excel.go b/qubership-apihub-service/view/Excel.go new file mode 100644 index 0000000..3c3b542 --- /dev/null +++ b/qubership-apihub-service/view/Excel.go @@ -0,0 +1,40 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +const SummarySheetName = "Summary" +const RestAPISheetName = "REST API" +const GraphQLSheetName = "GraphQL" +const ProtobufSheetName = "Protobuf" +const PackageIDColumnName = "Package ID" +const PackageNameColumnName = "Package Name" +const ServiceNameColumnName = "Service Name" +const VersionColumnName = "Version" +const PreviousVersionColumnName = "Previous Version" +const APITypeColumnName = "API Type" +const OperationTitleColumnName = "Operation Title" +const OperationPathColumnName = "Operation Path" +const OperationMethodColumnName = "Operation Method" +const ChangeDescriptionColumnName = "Change Description" +const ChangeSeverityColumnName = "Change Severity" +const OperationTypeColumnName = "Operation Type" +const TagColumnName = "Tag" +const KindColumnName = "Kind" +const DeprecatedColumnName = "Deprecated" +const OperationActionColumnName = "Operation Action" +const DeprecatedSinceColumnName = "Deprecated Since" +const DeprecatedDescriptionColumnName = "Deprecated Description" +const AdditionalInformationColumnName = "Additional Information" +const APIKindColumnName = "API Kind" diff --git a/qubership-apihub-service/view/Export.go b/qubership-apihub-service/view/Export.go new file mode 100644 index 0000000..4fa6914 --- /dev/null +++ b/qubership-apihub-service/view/Export.go @@ -0,0 +1,52 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type ExportApiChangesRequestView struct { + PreviousVersion string + PreviousVersionPackageId string + TextFilter string + Tags []string + ApiKind string + EmptyTag bool + RefPackageId string + Group string + EmptyGroup bool + ApiAudience string +} + +type ExportOperationRequestView struct { + EmptyTag bool + Kind string + Tag string + TextFilter string + Tags []string + RefPackageId string + Group string + EmptyGroup bool + ApiAudience string +} + +const ExportFormatXlsx = "xlsx" +const ExportFormatJson = "json" + +func ValidateApiChangesExportFormat(format string) bool { + switch format { + case ExportFormatXlsx: + return true + default: + return false + } +} diff --git a/qubership-apihub-service/view/ExternalMetadata.go b/qubership-apihub-service/view/ExternalMetadata.go new file mode 100644 index 0000000..550edb8 --- /dev/null +++ b/qubership-apihub-service/view/ExternalMetadata.go @@ -0,0 +1,30 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type OperationExternalMetadataKey struct { + ApiType string `json:"apiType"` + Method string `json:"method"` + Path string `json:"path"` +} + +type OperationExternalMetadata struct { + OperationExternalMetadataKey + ExternalMetadata map[string]interface{} `json:"externalMetadata"` +} + +type ExternalMetadata struct { + Operations []OperationExternalMetadata `json:"operations"` +} diff --git a/qubership-apihub-service/view/FileChange.go b/qubership-apihub-service/view/FileChange.go new file mode 100644 index 0000000..27692ef --- /dev/null +++ b/qubership-apihub-service/view/FileChange.go @@ -0,0 +1,28 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +import "time" + +type FileChange struct { + CommitId string `json:"commitId"` + ModifiedBy User `json:"modifiedBy"` + ModifiedAt time.Time `json:"modifiedAt"` + Comment string `json:"comment"` +} + +type Changes struct { + Changes []FileChange `json:"changes"` +} diff --git a/qubership-apihub-service/view/FileStatus.go b/qubership-apihub-service/view/FileStatus.go new file mode 100644 index 0000000..f49f3d4 --- /dev/null +++ b/qubership-apihub-service/view/FileStatus.go @@ -0,0 +1,70 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type FileStatus string + +const ( + StatusAdded FileStatus = "added" + StatusIncluded FileStatus = "included" + StatusDeleted FileStatus = "deleted" + StatusExcluded FileStatus = "excluded" + StatusModified FileStatus = "modified" + StatusMoved FileStatus = "moved" + StatusUnmodified FileStatus = "unmodified" +) + +func (f FileStatus) String() string { + switch f { + case StatusAdded: + return "added" + case StatusIncluded: + return "included" + case StatusDeleted: + return "deleted" + case StatusExcluded: + return "excluded" + case StatusModified: + return "modified" + case StatusMoved: + return "moved" + case StatusUnmodified: + return "unmodified" + default: + return "" + } + +} + +func ParseFileStatus(s string) FileStatus { + switch s { + case "added": + return StatusAdded + case "included": + return StatusIncluded + case "deleted": + return StatusDeleted + case "excluded": + return StatusExcluded + case "modified": + return StatusModified + case "moved": + return StatusMoved + case "unmodified": + return StatusUnmodified + default: + return "" + } +} diff --git a/qubership-apihub-service/view/GitBranch.go b/qubership-apihub-service/view/GitBranch.go new file mode 100644 index 0000000..7c4a245 --- /dev/null +++ b/qubership-apihub-service/view/GitBranch.go @@ -0,0 +1,23 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type GitBranch struct { + Name string `json:"name"` +} + +type GitBranches struct { + Branches []GitBranch `json:"branches"` +} diff --git a/qubership-apihub-service/view/GitCommit.go b/qubership-apihub-service/view/GitCommit.go new file mode 100644 index 0000000..493d939 --- /dev/null +++ b/qubership-apihub-service/view/GitCommit.go @@ -0,0 +1,25 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +import "time" + +type GitCommit struct { + Id string + CommitterName string + CommitterEmail string + CommittedDate time.Time + Message string +} diff --git a/qubership-apihub-service/view/GitFiles.go b/qubership-apihub-service/view/GitFiles.go new file mode 100644 index 0000000..5bb1ef1 --- /dev/null +++ b/qubership-apihub-service/view/GitFiles.go @@ -0,0 +1,24 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type FileNode struct { + Name string `json:"name"` + IsFolder bool `json:"isFolder"` +} + +type ListFilesView struct { + Files []FileNode `json:"files"` +} diff --git a/qubership-apihub-service/view/GitIntegration.go b/qubership-apihub-service/view/GitIntegration.go new file mode 100644 index 0000000..ac4e558 --- /dev/null +++ b/qubership-apihub-service/view/GitIntegration.go @@ -0,0 +1,38 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +import ( + "net/http" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/exception" +) + +type GitIntegrationType string + +const GitlabIntegration GitIntegrationType = "gitlab" +const UnknownIntegration GitIntegrationType = "unknown" + +func GitIntegrationTypeFromStr(str string) (GitIntegrationType, error) { + if str == "gitlab" { + return GitlabIntegration, nil + } + return UnknownIntegration, &exception.CustomError{ + Status: http.StatusBadRequest, + Code: exception.UnknownIntegrationType, + Message: exception.UnknownIntegrationTypeMsg, + Params: map[string]interface{}{"type": str}, + } +} diff --git a/qubership-apihub-service/view/GitRepository.go b/qubership-apihub-service/view/GitRepository.go new file mode 100644 index 0000000..6991789 --- /dev/null +++ b/qubership-apihub-service/view/GitRepository.go @@ -0,0 +1,30 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type GitRepository struct { + RepositoryId string `json:"repositoryId"` + Name string `json:"name"` + DefaultBranch string `json:"defaultBranch"` +} + +type GitGroup struct { + Name string `json:"name"` +} + +type RepositoriesList struct { + Repositories []GitRepository `json:"repositories"` + Groups []GitGroup `json:"groups"` +} diff --git a/qubership-apihub-service/view/GitVersionPublish.go b/qubership-apihub-service/view/GitVersionPublish.go new file mode 100644 index 0000000..8fddd95 --- /dev/null +++ b/qubership-apihub-service/view/GitVersionPublish.go @@ -0,0 +1,23 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type GitVersionPublish struct { + PackageId string `json:"packageId"` + Version string `json:"version"` + PreviousVersion string `json:"previousVersion"` + PreviousVersionPackageId string `json:"previousVersionPackageId"` + Status string `json:"status"` +} diff --git a/qubership-apihub-service/view/GraphQLOperation.go b/qubership-apihub-service/view/GraphQLOperation.go new file mode 100644 index 0000000..dc5f244 --- /dev/null +++ b/qubership-apihub-service/view/GraphQLOperation.go @@ -0,0 +1,63 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +const ( + QueryType string = "query" + MutationType string = "mutation" + SubscriptionType string = "subscription" +) + +func ValidGraphQLOperationType(typeValue string) bool { + switch typeValue { + case QueryType, MutationType, SubscriptionType: + return true + } + return false +} + +type GraphQLOperationMetadata struct { + Type string `json:"type"` + Method string `json:"method"` + Tags []string `json:"tags"` +} + +type GraphQLOperationSingleView struct { + SingleOperationView + GraphQLOperationMetadata +} + +type GraphQLOperationView struct { + OperationListView + GraphQLOperationMetadata +} +type DeprecateGraphQLOperationView struct { + DeprecatedOperationView + GraphQLOperationMetadata +} + +type GraphQLOperationComparisonChangelogView_deprecated struct { + OperationComparisonChangelogView_deprecated + GraphQLOperationMetadata +} + +type GraphQLOperationComparisonChangelogView struct { + OperationComparisonChangelogView + GraphQLOperationMetadata +} +type GraphQLOperationComparisonChangesView struct { + OperationComparisonChangesView + GraphQLOperationMetadata +} diff --git a/qubership-apihub-service/view/Group.go b/qubership-apihub-service/view/Group.go new file mode 100644 index 0000000..1ab7e4d --- /dev/null +++ b/qubership-apihub-service/view/Group.go @@ -0,0 +1,54 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +import "time" + +type Group struct { + Id string `json:"groupId"` + Name string `json:"name" validate:"required"` + Alias string `json:"alias" validate:"required"` // short alias + ParentId string `json:"parentId"` + ImageUrl string `json:"imageUrl"` + Description string `json:"description"` + CreatedBy string `json:"-"` + CreatedAt time.Time `json:"-"` + DeletedAt *time.Time `json:"-"` + DeletedBy string `json:"-"` + IsFavorite bool `json:"isFavorite"` + LastVersion string `json:"lastVersion,omitempty"` // Required only for group list +} + +type GroupInfo struct { + GroupId string `json:"groupId"` + ParentId string `json:"parentId"` + Name string `json:"name"` + Alias string `json:"alias"` // short alias + ImageUrl string `json:"imageUrl"` + Parents []Group `json:"parents"` + IsFavorite bool `json:"isFavorite"` + LastVersion string `json:"lastVersion,omitempty"` +} + +type Groups struct { + Groups []Group `json:"groups"` +} + +type PublishGroupRequest struct { + Version string `json:"version"` + PreviousVersion string `json:"previousVersion"` + Status string `json:"status"` + Refs []Ref `json:"refs"` +} diff --git a/qubership-apihub-service/view/Integration.go b/qubership-apihub-service/view/Integration.go new file mode 100644 index 0000000..a5d9123 --- /dev/null +++ b/qubership-apihub-service/view/Integration.go @@ -0,0 +1,32 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type ExternalIntegration string + +const ExternalSamlIntegration ExternalIntegration = "saml" +const ExternalGitlabIntegration ExternalIntegration = "gitlab" +const ExternalLdapIntegration ExternalIntegration = "ldap" + +func GetIntegrationExternalId(user User, integration ExternalIntegration) string { + switch integration { + case ExternalSamlIntegration, + ExternalGitlabIntegration, + ExternalLdapIntegration: + return user.Id + default: + return "" + } +} diff --git a/qubership-apihub-service/view/LdapAttributes.go b/qubership-apihub-service/view/LdapAttributes.go new file mode 100644 index 0000000..b171575 --- /dev/null +++ b/qubership-apihub-service/view/LdapAttributes.go @@ -0,0 +1,21 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +const Mail string = "mail" +const DisplayName string = "displayName" +const Surname string = "sn" +const SAMAccountName string = "sAMAccountName" +const ThumbnailPhoto string = "thumbnailPhoto" diff --git a/qubership-apihub-service/view/Minio.go b/qubership-apihub-service/view/Minio.go new file mode 100644 index 0000000..ecc5834 --- /dev/null +++ b/qubership-apihub-service/view/Minio.go @@ -0,0 +1,28 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type MinioStorageCreds struct { + BucketName string + IsActive bool + Endpoint string + Crt string + AccessKeyId string + SecretAccessKey string + IsOnlyForBuildResult bool +} + +const PUBLISHED_SOURCES_ARCHIVES_TABLE = "published_sources_archives" +const BUILD_RESULT_TABLE = "build_result" diff --git a/qubership-apihub-service/view/Monitoring.go b/qubership-apihub-service/view/Monitoring.go new file mode 100644 index 0000000..7fa637e --- /dev/null +++ b/qubership-apihub-service/view/Monitoring.go @@ -0,0 +1,38 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type SearchEndpointOpts struct { + SearchLevel string `json:"searchLevel,omitempty"` + ApiType string `json:"apiType,omitempty"` + Scopes []string `json:"scope,omitempty"` + DetailedScopes []string `json:"detailedScope,omitempty"` + Methods []string `json:"methods,omitempty"` + OperationTypes []string `json:"operationTypes,omitempty"` +} + +func MakeSearchEndpointOptions(searchLevel string, operationSearchParams *OperationSearchParams) SearchEndpointOpts { + searchOpts := SearchEndpointOpts{ + SearchLevel: searchLevel, + } + if operationSearchParams != nil { + searchOpts.ApiType = operationSearchParams.ApiType + searchOpts.Scopes = operationSearchParams.Scopes + searchOpts.DetailedScopes = operationSearchParams.DetailedScopes + searchOpts.Methods = operationSearchParams.Methods + searchOpts.OperationTypes = operationSearchParams.OperationTypes + } + return searchOpts +} diff --git a/qubership-apihub-service/view/Oauth2.go b/qubership-apihub-service/view/Oauth2.go new file mode 100644 index 0000000..154749e --- /dev/null +++ b/qubership-apihub-service/view/Oauth2.go @@ -0,0 +1,36 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +import "time" + +type OAuthAccessResponse struct { + AccessToken string `json:"access_token"` + TokenType string `json:"token_type"` + ExpiresIn int `json:"expires_in"` + RefreshToken string `json:"refresh_token"` + CreatedAt int `json:"created_at"` + Error string `json:"error"` +} + +func GetTokenExpirationDate(expiresIn int) time.Time { + var duration time.Duration + if expiresIn == 0 { + duration = time.Hour * 2 //default gitlab token expiration time + } else { + duration = time.Duration(expiresIn) * time.Second + } + return time.Now().Add(duration) +} diff --git a/qubership-apihub-service/view/OperationGroup.go b/qubership-apihub-service/view/OperationGroup.go new file mode 100644 index 0000000..b3c16c5 --- /dev/null +++ b/qubership-apihub-service/view/OperationGroup.go @@ -0,0 +1,109 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +import ( + "fmt" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" +) + +const OperationGroupOperationsLimit = 5000 +const OperationGroupActionCreate = "create" +const OperationGroupActionUpdate = "update" +const OperationGroupActionDelete = "delete" + +type CreateOperationGroupReq_deprecated struct { + GroupName string `json:"groupName" validate:"required"` + Description string `json:"description"` +} + +type ReplaceOperationGroupReq_deprecated struct { + CreateOperationGroupReq_deprecated + Operations []GroupOperations `json:"operations" validate:"dive,required"` +} + +type CreateOperationGroupReq struct { + GroupName string `json:"groupName" validate:"required"` + Description string `json:"description"` + Template []byte `json:"template"` + TemplateFilename string `json:"templateFilename"` +} + +type ReplaceOperationGroupReq struct { + CreateOperationGroupReq + Operations []GroupOperations `json:"operations" validate:"dive,required"` +} + +type UpdateOperationGroupReq_deprecated struct { + GroupName *string `json:"groupName"` + Description *string `json:"description"` +} + +type UpdateOperationGroupReq struct { + GroupName *string + Description *string + Template *OperationGroupTemplate + Operations *[]GroupOperations `json:"operations" validate:"dive,required"` +} + +type OperationGroupTemplate struct { + TemplateData []byte + TemplateFilename string +} + +type GroupOperations struct { + PackageId string `json:"packageId"` + Version string `json:"version"` + OperationId string `json:"operationId" validate:"required"` +} + +type OperationGroups struct { + OperationGroups []OperationGroup `json:"operationGroups"` +} + +type OperationGroup struct { + GroupName string `json:"groupName"` + Description string `json:"description,omitempty"` + IsPrefixGroup bool `json:"isPrefixGroup"` + OperationsCount int `json:"operationsCount"` +} + +type CalculatedOperationGroups struct { + Groups []string `json:"groups"` +} + +func MakeOperationGroupId(packageId string, version string, revision int, apiType string, groupName string) string { + uniqueString := fmt.Sprintf("%v@%v@%v@%v@%v", packageId, version, revision, apiType, groupName) + return utils.GetEncodedChecksum([]byte(uniqueString)) +} + +type OperationGroupPublishReq struct { + PackageId string `json:"packageId" validate:"required"` + Version string `json:"version" validate:"required"` + PreviousVersion string `json:"previousVersion"` + PreviousVersionPackageId string `json:"previousVersionPackageId"` + Status string `json:"status" validate:"required"` + VersionLabels []string `json:"versionLabels"` +} + +type OperationGroupPublishResp struct { + PublishId string `json:"publishId"` +} + +type OperationGroupPublishStatusResponse struct { + Status string `json:"status"` + Message string `json:"message"` +} diff --git a/qubership-apihub-service/view/Package.go b/qubership-apihub-service/view/Package.go new file mode 100644 index 0000000..6c661d1 --- /dev/null +++ b/qubership-apihub-service/view/Package.go @@ -0,0 +1,341 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +import ( + "fmt" + "strings" + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" +) + +type Package struct { + Id string `json:"projectId"` //todo replace with packageId + GroupId string `json:"groupId"` //todo replace with parentId + Name string `json:"name"` + Alias string `json:"alias"` + Description string `json:"description"` + IsFavorite bool `json:"isFavorite"` + Groups []Group `json:"groups"` + DeletionDate *time.Time `json:"-"` + DeletedBy string `json:"-"` + CreatedBy string `json:"-"` + CreatedAt time.Time `json:"-"` + ServiceName string `json:"serviceName,omitempty"` + LastVersion string `json:"lastVersion,omitempty"` +} + +type Packages_deprecated struct { + Packages []Package `json:"projects"` //todo replace with packages +} + +type PackageInfo struct { + PackageId string `json:"packageId" validate:"required"` + ParentId string `json:"-"` + Alias string `json:"-"` + Version string `json:"version" validate:"required"` + ServiceName string `json:"serviceName"` + Folder string `json:"folder"` + PackageName string `json:"packageName"` // TODO: not used? + PreviousVersion string `json:"previousVersion"` + PreviousVersionPackageId string `json:"previousVersionPackageId"` + Status string `json:"status" validate:"required"` + Refs []PackageInfoRef `json:"refs" validate:"dive,required"` + Files []PackageInfoFile `json:"files" validate:"dive,required"` + VersionLabels []string `json:"versionLabels"` + BranchName string `json:"branchName,omitempty"` + RepositoryUrl string `json:"repositoryUrl,omitempty"` +} + +type PackageInfoRef struct { + RefPackageId string `json:"refId" validate:"required"` + RefVersion string `json:"version" validate:"required"` +} + +type SimplePackage struct { + Id string `json:"packageId"` + Alias string `json:"alias" validate:"required"` + ParentId string `json:"parentId"` + Kind string `json:"kind" validate:"required"` + Name string `json:"name" validate:"required"` + Description string `json:"description"` + IsFavorite bool `json:"isFavorite"` + ServiceName string `json:"serviceName,omitempty"` + ImageUrl string `json:"imageUrl"` + Parents []ParentPackageInfo `json:"parents"` + DefaultRole string `json:"defaultRole"` + UserPermissions []string `json:"permissions"` + DeletionDate *time.Time `json:"-"` + DeletedBy string `json:"-"` + CreatedBy string `json:"-"` + CreatedAt time.Time `json:"-"` + DefaultReleaseVersion string `json:"defaultReleaseVersion"` + DefaultVersion string `json:"defaultVersion"` + ReleaseVersionPattern string `json:"releaseVersionPattern"` + ExcludeFromSearch *bool `json:"excludeFromSearch,omitempty"` + RestGroupingPrefix string `json:"restGroupingPrefix,omitempty"` +} + +type GlobalPackage struct { + PackageId string `json:"packageId"` + Name string `json:"name"` + Description string `json:"description"` + ParentPackages []SimplePackage `json:"parentPackages"` +} + +type Packages struct { + Packages []PackagesInfo `json:"packages"` +} + +type PackagesInfo struct { + Id string `json:"packageId"` + Alias string `json:"alias"` + ParentId string `json:"parentId"` + Kind string `json:"kind"` + Name string `json:"name"` + Description string `json:"description"` + IsFavorite bool `json:"isFavorite"` + ServiceName string `json:"serviceName,omitempty"` + ImageUrl string `json:"imageUrl"` + Parents []ParentPackageInfo `json:"parents"` + DefaultRole string `json:"defaultRole"` + UserPermissions []string `json:"permissions"` + LastReleaseVersionDetails *VersionDetails `json:"lastReleaseVersionDetails,omitempty"` + RestGroupingPrefix string `json:"restGroupingPrefix,omitempty"` + ReleaseVersionPattern string `json:"releaseVersionPattern,omitempty"` +} + +type ParentPackageInfo struct { + Id string `json:"packageId"` + Alias string `json:"alias"` + ParentId string `json:"parentId"` + Kind string `json:"kind"` + Name string `json:"name"` + ImageUrl string `json:"imageUrl"` + HasReadPermission *bool `json:"hasReadPermission,omitempty"` +} + +type VersionDetails struct { + Version string `json:"version"` + NotLatestRevision bool `json:"notLatestRevision,omitempty"` + Summary *ChangeSummary `json:"summary,omitempty"` +} +type PackageListReq struct { + Kind []string + Limit int + OnlyFavorite bool + OnlyShared bool + Offset int + ParentId string + ShowParents bool + TextFilter string + LastReleaseVersionDetails bool + ServiceName string + ShowAllDescendants bool + Ids []string +} + +type PatchPackageReq struct { + Name *string `json:"name"` + Description *string `json:"description"` + ServiceName *string `json:"serviceName"` + ImageUrl *string `json:"imageUrl"` + DefaultRole *string `json:"defaultRole"` + DefaultReleaseVersion *string `json:"defaultReleaseVersion"` + ReleaseVersionPattern *string `json:"releaseVersionPattern"` + ExcludeFromSearch *bool `json:"excludeFromSearch"` + RestGroupingPrefix *string `json:"restGroupingPrefix"` +} + +// build result +type PackageInfoFile struct { + PackageId string `json:"packageId" validate:"required"` + Kind string `json:"-"` + BuildType string `json:"buildType"` + Version string `json:"version" validate:"required"` + Status string `json:"status" validate:"required"` + PreviousVersion string `json:"previousVersion"` + PreviousVersionPackageId string `json:"previousVersionPackageId"` + Metadata map[string]interface{} `json:"metadata"` + Refs []BCRef `json:"refs"` + Revision int `json:"-"` + PreviousVersionRevision int `json:"-"` + CreatedBy string `json:"createdBy"` + BuilderVersion string `json:"builderVersion"` + PublishedAt *time.Time `json:"publishedAt"` //for migration + MigrationBuild bool `json:"migrationBuild"` //for migration + MigrationId string `json:"migrationId"` //for migration + NoChangelog bool `json:"noChangeLog,omitempty"` //for migration + ApiType string `json:"apiType"` + GroupName string `json:"groupName"` + Format string `json:"format"` + ExternalMetadata *ExternalMetadata `json:"externalMetadata,omitempty"` +} + +type ChangelogInfoFile struct { + BuildType string `json:"buildType"` + PackageId string `json:"packageId" validate:"required"` + Version string `json:"version" validate:"required"` + PreviousVersionPackageId string `json:"previousVersionPackageId" validate:"required"` + PreviousVersion string `json:"previousVersion" validate:"required"` + Metadata map[string]interface{} `json:"metadata"` + Revision int `json:"revision"` + PreviousVersionRevision int `json:"previousVersionRevision"` + CreatedBy string `json:"createdBy"` + BuilderVersion string `json:"builderVersion"` + PublishedAt *time.Time `json:"publishedAt"` //for migration +} + +func MakeChangelogInfoFileView(packageInfo PackageInfoFile) ChangelogInfoFile { + return ChangelogInfoFile{ + BuildType: packageInfo.BuildType, + PackageId: packageInfo.PackageId, + Version: packageInfo.Version, + PreviousVersionPackageId: packageInfo.PreviousVersionPackageId, + PreviousVersion: packageInfo.PreviousVersion, + Metadata: packageInfo.Metadata, + Revision: packageInfo.Revision, + PreviousVersionRevision: packageInfo.PreviousVersionRevision, + CreatedBy: packageInfo.CreatedBy, + BuilderVersion: packageInfo.BuilderVersion, + PublishedAt: packageInfo.PublishedAt, + } +} + +type PackageOperationsFile struct { + Operations []Operation `json:"operations" validate:"dive,required"` +} + +type PackageDocumentsFile struct { + Documents []PackageDocument `json:"documents" validate:"dive,required"` +} + +type PackageOperationChanges struct { + OperationComparisons []OperationComparison `json:"operations" validate:"dive,required"` +} + +type PackageComparisonsFile struct { + Comparisons []VersionComparison `json:"comparisons" validate:"dive,required"` +} + +type VersionComparison struct { + PackageId string `json:"packageId"` + Version string `json:"version"` + Revision int `json:"revision"` + PreviousVersionPackageId string `json:"previousVersionPackageId"` + PreviousVersion string `json:"previousVersion"` + PreviousVersionRevision int `json:"previousVersionRevision"` + OperationTypes []OperationType `json:"operationTypes" validate:"required,dive,required"` + FromCache bool `json:"fromCache"` + ComparisonFileId string `json:"comparisonFileId"` +} + +func MakeVersionComparisonId(packageId string, version string, revision int, previousVersionPackageId string, previousVersion string, previousVersionRevision int) string { + uniqueString := fmt.Sprintf("%v@%v@%v@%v@%v@%v", packageId, version, revision, previousVersionPackageId, previousVersion, previousVersionRevision) + return utils.GetEncodedChecksum([]byte(uniqueString)) +} + +type OperationType struct { + ApiType string `json:"apiType" validate:"required"` + ChangesSummary ChangeSummary `json:"changesSummary" validate:"required"` + NumberOfImpactedOperations ChangeSummary `json:"numberOfImpactedOperations"` + ApiAudienceTransitions []ApiAudienceTransition `json:"apiAudienceTransitions,omitempty"` + Tags []string `json:"tags"` +} + +type ApiAudienceTransition struct { + CurrentAudience string `json:"currentAudience"` + PreviousAudience string `json:"previousAudience"` + OperationsCount int `json:"operationsCount"` +} + +type BuilderNotificationsFile struct { + Notifications []BuilderNotification `json:"notifications" validate:"dive,required"` +} + +type PackageRef struct { + RefId string `json:"refId"` + Version string `json:"version"` +} + +type PackageDocument struct { + FileId string `json:"fileId" validate:"required"` + Type string `json:"type" validate:"required"` + Slug string `json:"slug" validate:"required"` + Title string `json:"title" validate:"required"` + Description string `json:"description"` + Version string `json:"version"` + OperationIds []string `json:"operationIds" validate:"required"` + Metadata map[string]interface{} `json:"metadata"` + Filename string `json:"filename" validate:"required"` + Format string `json:"format"` +} + +type BuilderNotification struct { + Severity int `json:"severity"` + Message string `json:"message"` + FileId string `json:"fileId"` +} + +const PackageGroupingPrefixWildcard = "{group}" + +func regexpEscaped(s string) string { + reservedChars := `\!$()*+.:<=>?[]^{|}-` + escapeChar := `\` + for _, c := range reservedChars { + s = strings.ReplaceAll(s, string(c), escapeChar+string(c)) + } + return s +} + +func MakePackageGroupingPrefixRegex(groupingPrefix string) string { + groupingPrefix = regexpEscaped(groupingPrefix) + groupingPrefix = strings.Replace(groupingPrefix, regexpEscaped(PackageGroupingPrefixWildcard), `(.*?)`, 1) + groupingPrefix = "^" + groupingPrefix + return groupingPrefix +} + +func MakePackageRefKey(packageId string, version string, revision int) string { + if packageId == "" || version == "" || revision == 0 { + return "" + } + return fmt.Sprintf("%v@%v@%v", packageId, version, revision) +} + +func MakeVersionRefKey(version string, revision int) string { + if version == "" || revision == 0 { + return "" + } + return fmt.Sprintf("%v@%v", version, revision) +} + +func MakePackageVersionRefKey(packageId string, version string) string { + if packageId == "" || version == "" { + return "" + } + return fmt.Sprintf("%v@%v", packageId, version) +} + +type PackageV2 struct { + Id string `json:"id"` + Alias string `json:"alias" validate:"required"` + Name string `json:"name" validate:"required"` + Kind string `json:"kind" validate:"required"` + ParentId string `json:"parentId" validate:"required"` + Description string `json:"description"` + ServiceName string `json:"serviceName"` + ImageUrl string `json:"imageUrl"` +} diff --git a/qubership-apihub-service/view/PackageMemberRole.go b/qubership-apihub-service/view/PackageMemberRole.go new file mode 100644 index 0000000..efa090e --- /dev/null +++ b/qubership-apihub-service/view/PackageMemberRole.go @@ -0,0 +1,55 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +const ActionAddRole = "add" +const ActionRemoveRole = "remove" + +type PackageMemberRoleView struct { + RoleId string `json:"roleId"` + RoleName string `json:"role"` + Inheritance *ShortPackage `json:"inheritance,omitempty"` +} + +type PackageMember struct { + User User `json:"user"` + Roles []PackageMemberRoleView `json:"roles"` +} + +type PackageMembers struct { + Members []PackageMember `json:"members"` +} + +type ShortPackage struct { + PackageId string `json:"packageId"` + Kind string `json:"kind"` + Name string `json:"name"` +} + +type AvailablePackagePromoteStatuses map[string][]string // map[packageId][]version status + +type PackageMembersAddReq struct { + Emails []string `json:"emails" validate:"required"` + RoleIds []string `json:"roleIds" validate:"required"` +} + +type PackageMemberUpdatePatch struct { + RoleId string `json:"roleId" validate:"required"` + Action string `json:"action" validate:"required"` +} + +type PackagesReq struct { + Packages []string `json:"packages"` +} diff --git a/qubership-apihub-service/view/Paging.go b/qubership-apihub-service/view/Paging.go new file mode 100644 index 0000000..2f1e15a --- /dev/null +++ b/qubership-apihub-service/view/Paging.go @@ -0,0 +1,28 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type Pageable struct { + Content interface{} `json:"content"` + TotalItems int `json:"totalItems"` + TotalPages int `json:"totalPages"` + ItemsPerPage int `json:"itemsPerPage"` + CurrentPage int `json:"currentPage"` +} + +type PagingParams struct { + Page int `json:"page"` + ItemsPerPage int `json:"itemsPerPage"` +} diff --git a/qubership-apihub-service/view/Portal.go b/qubership-apihub-service/view/Portal.go new file mode 100644 index 0000000..4e7f189 --- /dev/null +++ b/qubership-apihub-service/view/Portal.go @@ -0,0 +1,60 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type DocumentationType string + +const DTInteractive DocumentationType = "INTERACTIVE" +const DTStatic DocumentationType = "STATIC" +const DTPdf DocumentationType = "PDF" +const DTRaw DocumentationType = "RAW" + +func GetDtFromStr(str string) DocumentationType { + switch str { + case "INTERACTIVE": + return DTInteractive + case "STATIC": + return DTStatic + case "PDF": + return DTPdf + case "RAW": + return DTRaw + case "": + return DTInteractive + } + return DocumentationType(str) +} + +type VersionDocMetadata struct { + GitLink string `json:"gitLink"` + Branch string `json:"branch"` + DateOfPublication string `json:"dateOfPublication"` + CommitId string `json:"commitId"` + Version string `json:"version"` + Revision int `json:"revision"` + User string `json:"user"` + Labels []string `json:"labels"` + Files []FileMetadata `json:"files"` +} + +type FileMetadata struct { + Type string `json:"type"` + Name string `json:"name"` // title + Format string `json:"format"` + Slug string `json:"slug"` + Labels []string `json:"labels,omitempty"` + Openapi *Openapi `json:"openapi,omitempty"` + Asyncapi *Asyncapi `json:"asyncapi,omitempty"` +} diff --git a/qubership-apihub-service/view/Principal.go b/qubership-apihub-service/view/Principal.go new file mode 100644 index 0000000..972f811 --- /dev/null +++ b/qubership-apihub-service/view/Principal.go @@ -0,0 +1,28 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type PrincipalUserView struct { + PrincipalType PrincipalType `json:"type"` + User +} +type PrincipalApiKeyView struct { + PrincipalType PrincipalType `json:"type"` + ApiKey +} +type PrincipalType string + +const PTUser PrincipalType = "user" +const PTApiKey PrincipalType = "apiKey" diff --git a/qubership-apihub-service/view/Project.go b/qubership-apihub-service/view/Project.go new file mode 100644 index 0000000..f2bc16e --- /dev/null +++ b/qubership-apihub-service/view/Project.go @@ -0,0 +1,50 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +import "time" + +type Project struct { + Id string `json:"projectId"` + GroupId string `json:"groupId" validate:"required"` + Name string `json:"name" validate:"required"` + Alias string `json:"alias" validate:"required"` // short alias + Description string `json:"description"` + IsFavorite bool `json:"isFavorite"` + Integration IntegrationView `json:"integration"` + Groups []Group `json:"groups"` + DeletionDate *time.Time `json:"-"` + DeletedBy string `json:"-"` + LastVersion string `json:"lastVersion,omitempty"` + + PackageId string `json:"packageId"` +} + +type IntegrationView struct { + Type GitIntegrationType `json:"type" validate:"required"` + RepositoryId string `json:"repositoryId" validate:"required"` + RepositoryName string `json:"repositoryName"` + RepositoryUrl string `json:"repositoryUrl"` + DefaultBranch string `json:"defaultBranch" validate:"required"` + DefaultFolder string `json:"defaultFolder" validate:"required"` +} + +type Projects struct { + Projects []Project `json:"projects"` +} + +type GitLabWebhookIntegration struct { + SecretToken string `json:"secretToken" validate:"required"` +} diff --git a/qubership-apihub-service/view/ProtobufOperation.go b/qubership-apihub-service/view/ProtobufOperation.go new file mode 100644 index 0000000..06de9d1 --- /dev/null +++ b/qubership-apihub-service/view/ProtobufOperation.go @@ -0,0 +1,58 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +const ( + UnaryType string = "unary" + ServerStreamingType string = "serverStreaming" + ClientStreamingType string = "clientStreaming" + BidirectionalStreamingType string = "bidirectionalStreaming" +) + +func ValidProtobufOperationType(typeValue string) bool { + switch typeValue { + case UnaryType, ServerStreamingType, ClientStreamingType, BidirectionalStreamingType: + return true + } + return false +} + +type ProtobufOperationMetadata struct { + Type string `json:"type"` + Method string `json:"method"` +} + +type ProtobufOperationSingleView struct { + SingleOperationView + ProtobufOperationMetadata +} + +type ProtobufOperationView struct { + OperationListView + ProtobufOperationMetadata +} +type DeprecateProtobufOperationView struct { + DeprecatedOperationView + ProtobufOperationMetadata +} + +type ProtobufOperationComparisonChangelogView struct { + OperationComparisonChangelogView + ProtobufOperationMetadata +} +type ProtobufOperationComparisonChangesView struct { + OperationComparisonChangesView + ProtobufOperationMetadata +} diff --git a/qubership-apihub-service/view/PublicKey.go b/qubership-apihub-service/view/PublicKey.go new file mode 100644 index 0000000..709cc89 --- /dev/null +++ b/qubership-apihub-service/view/PublicKey.go @@ -0,0 +1,19 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type PublicKey struct { + Value []byte +} diff --git a/qubership-apihub-service/view/PublishV2.go b/qubership-apihub-service/view/PublishV2.go new file mode 100644 index 0000000..afc224a --- /dev/null +++ b/qubership-apihub-service/view/PublishV2.go @@ -0,0 +1,20 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type PublishV2Response struct { + PublishId string `json:"publishId"` + Config *BuildConfig `json:"config,omitempty"` +} diff --git a/qubership-apihub-service/view/PublishedBranchView.go b/qubership-apihub-service/view/PublishedBranchView.go new file mode 100644 index 0000000..f7052bb --- /dev/null +++ b/qubership-apihub-service/view/PublishedBranchView.go @@ -0,0 +1,29 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +import "time" + +type BranchItemView struct { + Name string `json:"name"` + Version string `json:"version,omitempty"` + Status VersionStatus `json:"status,omitempty"` + PublishedAt *time.Time `json:"publishedAt,omitempty"` + Permissions []string `json:"permissions"` +} + +type BranchListView struct { + Branches []BranchItemView `json:"branches"` +} diff --git a/qubership-apihub-service/view/PublishedContent.go b/qubership-apihub-service/view/PublishedContent.go new file mode 100644 index 0000000..c898243 --- /dev/null +++ b/qubership-apihub-service/view/PublishedContent.go @@ -0,0 +1,132 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type PublishedContent struct { + ContentId string `json:"fileId"` + Type ShortcutType `json:"type"` + Format string `json:"format"` + Path string `json:"-"` + Name string `json:"-"` + Index int `json:"-"` + Slug string `json:"slug"` + Labels []string `json:"labels,omitempty"` + Title string `json:"title,omitempty"` + Version string `json:"version,omitempty"` + ReferenceId string `json:"refId,omitempty"` + Openapi *Openapi `json:"openapi,omitempty"` + Asyncapi *Asyncapi `json:"asyncapi,omitempty"` +} + +type PublishedContentInfo struct { + FileId string + Checksum string +} + +type SharedUrlResult_deprecated struct { + SharedId string `json:"sharedId"` +} + +// deprecated +type PublishedDocument_deprecated struct { + FieldId string `json:"fileId"` + Slug string `json:"slug"` + Type string `json:"type"` + Format string `json:"format"` + Title string `json:"title,omitempty"` + Labels []string `json:"labels,omitempty"` + Description string `json:"description,omitempty"` + Version string `json:"version,omitempty"` + Info interface{} `json:"info,omitempty"` + ExternalDocs interface{} `json:"externalDocs,omitempty"` + Operations []DocumentsOperation_deprecated `json:"operations,omitempty"` + Filename string `json:"filename"` + Tags []interface{} `json:"tags"` +} + +type PublishedDocument struct { + FieldId string `json:"fileId"` + Slug string `json:"slug"` + Type string `json:"type"` + Format string `json:"format"` + Title string `json:"title,omitempty"` + Labels []string `json:"labels,omitempty"` + Description string `json:"description,omitempty"` + Version string `json:"version,omitempty"` + Info interface{} `json:"info,omitempty"` + ExternalDocs interface{} `json:"externalDocs,omitempty"` + Operations []interface{} `json:"operations,omitempty"` + Filename string `json:"filename"` + Tags []interface{} `json:"tags"` +} + +type PublishedDocumentRefView struct { + FieldId string `json:"fileId"` + Slug string `json:"slug"` + Type string `json:"type"` + Format string `json:"format"` + Title string `json:"title,omitempty"` + Labels []string `json:"labels,omitempty"` + Description string `json:"description,omitempty"` + Version string `json:"version,omitempty"` + Filename string `json:"filename"` + PackageRef string `json:"packageRef"` + IncludedOperationIds []string `json:"includedOperationIds"` +} + +type DocumentsForTransformationView struct { + Documents []DocumentForTransformationView `json:"documents"` +} + +type DocumentForTransformationView struct { + FieldId string `json:"fileId"` + Slug string `json:"slug"` + Type string `json:"type"` + Format string `json:"format"` + Title string `json:"title,omitempty"` + Labels []string `json:"labels,omitempty"` + Description string `json:"description,omitempty"` + Version string `json:"version,omitempty"` + Filename string `json:"filename"` + IncludedOperationIds []string `json:"includedOperationIds"` + Data []byte `json:"data"` +} + +type Openapi struct { + Operations []OpenapiOperation `json:"operations,omitempty"` + Description string `json:"description,omitempty"` + Version string `json:"version,omitempty"` + Title string `json:"title"` +} + +type OpenapiOperation struct { + Path string `json:"path"` + Method string `json:"method"` + Tile string `json:"tile"` + Tags []string `json:"tags"` +} + +type Asyncapi struct { + Operations []AsyncapiOperation `json:"operations,omitempty"` + Description string `json:"description,omitempty"` + Version string `json:"version,omitempty"` + Title string `json:"title"` +} + +type AsyncapiOperation struct { + Channel string `json:"channel"` + Method string `json:"method"` + Tags []string `json:"tags"` +} diff --git a/qubership-apihub-service/view/PublishedContentChange.go b/qubership-apihub-service/view/PublishedContentChange.go new file mode 100644 index 0000000..d2873ff --- /dev/null +++ b/qubership-apihub-service/view/PublishedContentChange.go @@ -0,0 +1,23 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type PublishedContentChange struct { + FileId string `json:"fileId"` + Type ShortcutType `json:"type"` + Title string `json:"title"` + Slug string `json:"slug"` + Checksum string `json:"-"` +} diff --git a/qubership-apihub-service/view/PublishedRef.go b/qubership-apihub-service/view/PublishedRef.go new file mode 100644 index 0000000..f67f3c8 --- /dev/null +++ b/qubership-apihub-service/view/PublishedRef.go @@ -0,0 +1,24 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type PublishedRef struct { + PackageId string `json:"refId"` + Kind string `json:"kind"` + Name string `json:"name"` + Version string `json:"version"` + VersionStatus string `json:"versionStatus"` + Alias string `json:"alias"` +} diff --git a/qubership-apihub-service/view/PublishedVersion.go b/qubership-apihub-service/view/PublishedVersion.go new file mode 100644 index 0000000..0509c7c --- /dev/null +++ b/qubership-apihub-service/view/PublishedVersion.go @@ -0,0 +1,77 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +import "time" + +type PublishedVersion struct { + PackageId string `json:"-"` + Version string `json:"-"` + Status VersionStatus `json:"status"` + PublishedAt time.Time `json:"publishedAt"` + PreviousVersion string `json:"previousVersion"` + PreviousVersionPackageId string `json:"previousVersionPackageId,omitempty"` + Changes Validation `json:"changes,omitempty"` + Validations ValidationsMap `json:"validations,omitempty"` + DeletedAt *time.Time `json:"-"` + RelatedPackages []PublishedRef `json:"refs"` + Contents []PublishedContent `json:"files"` + Revision int `json:"-"` + BranchName string `json:"-"` + VersionLabels []string `json:"versionLabels"` +} + +type PublishedShortVersion struct { + PackageId string `json:"-"` + Version string `json:"-"` + Status VersionStatus `json:"status"` + PublishedAt time.Time `json:"publishedAt"` +} + +type PublishedVersionListView_deprecated struct { + Version string `json:"version"` + Status VersionStatus `json:"status"` + PublishedAt time.Time `json:"publishedAt"` + PreviousVersion string `json:"previousVersion"` + PreviousVersionPackageId string `json:"previousVersionPackageId,omitempty"` + Revision int `json:"revision"` +} + +type PublishedVersions struct { + Versions []PublishedVersion `json:"versions"` +} + +type PublishedVersionsView_deprecated struct { + Versions []PublishedVersionListView_deprecated `json:"versions"` +} + +type PublishedVersionHistoryView struct { + PackageId string `json:"packageId"` + Version string `json:"version"` + Revision int `json:"revision"` + Status string `json:"status"` + PreviousVersionPackageId string `json:"previousVersionPackageId"` + PreviousVersion string `json:"previousVersion"` + PublishedAt time.Time `json:"publishedAt"` + ApiTypes []string `json:"apiTypes"` +} + +type PublishedVersionHistoryFilter struct { + PublishedAfter *time.Time + PublishedBefore *time.Time + Status *string + Limit int + Page int +} diff --git a/qubership-apihub-service/view/Ref.go b/qubership-apihub-service/view/Ref.go new file mode 100644 index 0000000..da218fa --- /dev/null +++ b/qubership-apihub-service/view/Ref.go @@ -0,0 +1,52 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type Ref struct { + RefPackageId string `json:"refId" msgpack:"refId"` + RefPackageName string `json:"name" msgpack:"name"` + RefPackageVersion string `json:"version" msgpack:"version"` + Status FileStatus `json:"status" msgpack:"status"` + VersionStatus string `json:"versionStatus" msgpack:"versionStatus"` + Kind string `json:"kind" msgpack:"kind"` + IsBroken bool `json:"isBroken" msgpack:"isBroken"` //TODO: Need to support all over the system +} + +type RefGitConfigView struct { + RefPackageId string `json:"refId"` + Version string `json:"version"` +} + +func TransformRefToGitView(ref Ref) RefGitConfigView { + return RefGitConfigView{ + RefPackageId: ref.RefPackageId, + Version: ref.RefPackageVersion, + } +} + +func TransformGitViewToRef(ref RefGitConfigView, refPackageName string, status string, kind string) Ref { + return Ref{ + RefPackageId: ref.RefPackageId, + RefPackageName: refPackageName, + RefPackageVersion: ref.Version, + Status: StatusUnmodified, + VersionStatus: status, + Kind: kind, + } +} + +func (r *Ref) EqualsGitView(r2 *Ref) bool { + return r.RefPackageId == r2.RefPackageId && r.RefPackageVersion == r2.RefPackageVersion +} diff --git a/qubership-apihub-service/view/RefChange.go b/qubership-apihub-service/view/RefChange.go new file mode 100644 index 0000000..99393b0 --- /dev/null +++ b/qubership-apihub-service/view/RefChange.go @@ -0,0 +1,21 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type RefChange struct { + Before RefChangeView `json:"before"` + After RefChangeView `json:"after"` + Status FileStatus `json:"status"` +} diff --git a/qubership-apihub-service/view/RefChangeView.go b/qubership-apihub-service/view/RefChangeView.go new file mode 100644 index 0000000..7f925eb --- /dev/null +++ b/qubership-apihub-service/view/RefChangeView.go @@ -0,0 +1,23 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +// todo maybe a better name? +type RefChangeView struct { + RefProjectId string `json:"projectId"` + RefProjectName string `json:"name"` + RefProjectVersion string `json:"version"` + Status string `json:"status"` +} diff --git a/qubership-apihub-service/view/RefPatch.go b/qubership-apihub-service/view/RefPatch.go new file mode 100644 index 0000000..7ea4039 --- /dev/null +++ b/qubership-apihub-service/view/RefPatch.go @@ -0,0 +1,27 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type RefPatch struct { + RefId string `json:"refId"` + Version string `json:"version"` + Status FileStatus `json:"status"` + Data RefPatchData `json:"data"` +} + +type RefPatchData struct { + RefId string `json:"refId"` + Version string `json:"version"` +} diff --git a/qubership-apihub-service/view/RestOperation.go b/qubership-apihub-service/view/RestOperation.go new file mode 100644 index 0000000..49c862c --- /dev/null +++ b/qubership-apihub-service/view/RestOperation.go @@ -0,0 +1,64 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type RestOperationChange struct { + Path string `json:"path"` + Method string `json:"method"` + Tags []string `json:"tags,omitempty"` +} + +type RestOperationMetadata struct { + Path string `json:"path"` + Method string `json:"method"` + Tags []string `json:"tags,omitempty"` +} + +type RestOperationSingleView struct { + SingleOperationView + RestOperationMetadata +} + +type RestOperationView struct { + OperationListView + RestOperationMetadata +} + +type DeprecatedRestOperationView struct { + DeprecatedOperationView + RestOperationMetadata +} + +type OperationSummary struct { + Endpoints int `json:"endpoints"` + Deprecated int `json:"deprecated"` + Created int `json:"created"` + Deleted int `json:"deleted"` +} + +type RestOperationComparisonChangelogView_deprecated struct { + OperationComparisonChangelogView_deprecated + RestOperationChange +} + +type RestOperationComparisonChangelogView struct { + OperationComparisonChangelogView + RestOperationChange +} + +type RestOperationComparisonChangesView struct { + OperationComparisonChangesView + RestOperationChange +} diff --git a/qubership-apihub-service/view/Role.go b/qubership-apihub-service/view/Role.go new file mode 100644 index 0000000..51337dc --- /dev/null +++ b/qubership-apihub-service/view/Role.go @@ -0,0 +1,47 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +const SysadmRole = "System administrator" + +const AdminRoleId = "admin" +const EditorRoleId = "editor" +const ViewerRoleId = "viewer" +const NoneRoleId = "none" + +type PackageRole struct { + RoleId string `json:"roleId"` + RoleName string `json:"role"` + ReadOnly bool `json:"readOnly,omitempty"` + Permissions []string `json:"permissions"` + Rank int `json:"rank"` +} + +type PackageRoles struct { + Roles []PackageRole `json:"roles"` +} + +type PackageRoleCreateReq struct { + Role string `json:"role" validate:"required"` + Permissions []string `json:"permissions" validate:"required"` +} + +type PackageRoleUpdateReq struct { + Permissions *[]string `json:"permissions"` +} + +type PackageRoleOrderReq struct { + Roles []string `json:"roles" validate:"required"` +} diff --git a/qubership-apihub-service/view/RolePermission.go b/qubership-apihub-service/view/RolePermission.go new file mode 100644 index 0000000..f1a0425 --- /dev/null +++ b/qubership-apihub-service/view/RolePermission.go @@ -0,0 +1,102 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +import "fmt" + +type RolePermission string + +const ( + ReadPermission RolePermission = "read" + CreateAndUpdatePackagePermission RolePermission = "create_and_update_package" + DeletePackagePermission RolePermission = "delete_package" + ManageDraftVersionPermission RolePermission = "manage_draft_version" + ManageReleaseVersionPermission RolePermission = "manage_release_version" + ManageArchivedVersionPermission RolePermission = "manage_archived_version" + UserAccessManagementPermission RolePermission = "user_access_management" + AccessTokenManagementPermission RolePermission = "access_token_management" +) + +func GetAllRolePermissions() []RolePermission { + return []RolePermission{ + ReadPermission, + CreateAndUpdatePackagePermission, + DeletePackagePermission, + ManageDraftVersionPermission, + ManageReleaseVersionPermission, + ManageArchivedVersionPermission, + UserAccessManagementPermission, + AccessTokenManagementPermission, + } +} + +func (r RolePermission) Id() string { + return string(r) +} + +func (r RolePermission) Name() string { + switch r { + case ReadPermission: + return "read content of public packages" + case CreateAndUpdatePackagePermission: + return "create, update group/package" + case DeletePackagePermission: + return "delete group/package" + case ManageDraftVersionPermission: + return "manage version in draft status" + case ManageReleaseVersionPermission: + return "manage version in release status" + case ManageArchivedVersionPermission: + return "manage version in archived status" + case UserAccessManagementPermission: + return "user access management" + case AccessTokenManagementPermission: + return "access token management" + default: + return "" + } +} + +func ParseRolePermission(permissionId string) (RolePermission, error) { + switch permissionId { + case "read": + return ReadPermission, nil + case "create_and_update_package": + return CreateAndUpdatePackagePermission, nil + case "delete_package": + return DeletePackagePermission, nil + case "manage_draft_version": + return ManageDraftVersionPermission, nil + case "manage_release_version": + return ManageReleaseVersionPermission, nil + case "manage_archived_version": + return ManageArchivedVersionPermission, nil + case "user_access_management": + return UserAccessManagementPermission, nil + case "access_token_management": + return AccessTokenManagementPermission, nil + default: + return "", fmt.Errorf("permission '%v' doesn't exist", permissionId) + } +} + +type Permission struct { + PermissionId string `json:"permission"` + Name string `json:"name"` +} + +type Permissions struct { + Permissions []Permission `json:"permissions"` +} diff --git a/qubership-apihub-service/view/Search.go b/qubership-apihub-service/view/Search.go new file mode 100644 index 0000000..ac7095c --- /dev/null +++ b/qubership-apihub-service/view/Search.go @@ -0,0 +1,189 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +import "time" + +const SearchLevelOperations = "operations" +const SearchLevelPackages = "packages" +const SearchLevelDocuments = "documents" + +const ScopeAll = "all" + +const RestScopeRequest = "request" +const RestScopeResponse = "response" +const RestScopeAnnotation = "annotation" +const RestScopeExamples = "examples" +const RestScopeProperties = "properties" + +const GraphqlScopeAnnotation = "annotation" +const GraphqlScopeArgument = "argument" +const GraphqlScopeProperty = "property" + +func ValidRestOperationScope(scope string) bool { + switch scope { + case ScopeAll, RestScopeRequest, RestScopeResponse, RestScopeAnnotation, RestScopeExamples, RestScopeProperties: + return true + } + return false +} + +func ValidGraphqlOperationScope(scope string) bool { + switch scope { + case ScopeAll, GraphqlScopeAnnotation, GraphqlScopeArgument, GraphqlScopeProperty: + return true + } + return false +} + +type PublicationDateInterval struct { + StartDate time.Time `json:"startDate"` + EndDate time.Time `json:"endDate"` +} + +type OperationSearchParams struct { + ApiType string `json:"apiType"` + Scopes []string `json:"scope"` + DetailedScopes []string `json:"detailedScope"` + Methods []string `json:"methods"` + OperationTypes []string `json:"operationTypes"` +} + +type SearchQueryReq struct { + SearchString string `json:"searchString" validate:"required"` + PackageIds []string `json:"packageIds"` + Versions []string `json:"versions"` + Statuses []string `json:"statuses"` + PublicationDateInterval PublicationDateInterval `json:"creationDateInterval"` + OperationSearchParams *OperationSearchParams `json:"operationParams"` + Limit int `json:"-"` + Page int `json:"-"` +} + +// deprecated +type SearchResult_deprecated struct { + Operations *[]OperationSearchResult_deprecated `json:"operations,omitempty"` + Packages *[]PackageSearchResult `json:"packages,omitempty"` + Documents *[]DocumentSearchResult `json:"documents,omitempty"` +} + +type SearchResult struct { + Operations *[]interface{} `json:"operations,omitempty"` + Packages *[]PackageSearchResult `json:"packages,omitempty"` + Documents *[]DocumentSearchResult `json:"documents,omitempty"` +} + +type OperationSearchWeightsDebug struct { + ScopeWeight float64 `json:"scopeWeight"` + ScopeTf float64 `json:"scopeTf"` + TitleTf float64 `json:"titleTf"` + VersionStatusTf float64 `json:"versionStatusTf"` + OperationOpenCountWeight float64 `json:"operationOpenCountWeight"` + OperationOpenCount float64 `json:"operationOpenCount"` +} + +// deprecated +type OperationSearchResult_deprecated struct { + PackageId string `json:"packageId"` + PackageName string `json:"name"` + ParentPackages []string `json:"parentPackages"` + Version string `json:"version"` + VersionStatus string `json:"status"` + OperationId string `json:"operationId"` + Title string `json:"title"` + Deprecated bool `json:"deprecated,omitempty"` + ApiType string `json:"apiType"` + Metadata interface{} `json:"metadata"` + + //debug + Debug OperationSearchWeightsDebug `json:"debug,omitempty"` +} + +type CommonOperationSearchResult struct { + PackageId string `json:"packageId"` + PackageName string `json:"name"` + ParentPackages []string `json:"parentPackages"` + VersionStatus string `json:"status"` + Version string `json:"version"` + Title string `json:"title"` + + //debug + Debug OperationSearchWeightsDebug `json:"debug,omitempty"` +} + +type RestOperationSearchResult struct { + RestOperationView + CommonOperationSearchResult +} + +type GraphQLOperationSearchResult struct { + GraphQLOperationView + CommonOperationSearchResult +} + +type PackageSearchWeightsDebug struct { + PackageIdTf float64 `json:"packageIdTf"` + PackageNameTf float64 `json:"packageNameTf"` + PackageDescriptionTf float64 `json:"packageDescriptionTf"` + PackageServiceNameTf float64 `json:"packageServiceNameTf"` + VersionTf float64 `json:"versionTf"` + VersionLabelsTf float64 `json:"versionLabelsTf"` + DefaultVersionTf float64 `json:"defaultVersionTf"` + VersionStatusTf float64 `json:"versionStatusTf"` + VersionOpenCountWeight float64 `json:"versionOpenCountWeight"` + VersionOpenCount float64 `json:"versionOpenCount"` +} + +type PackageSearchResult struct { + PackageId string `json:"packageId"` + PackageName string `json:"name"` + Description string `json:"description,omitempty"` + ServiceName string `json:"serviceName,omitempty"` + ParentPackages []string `json:"parentPackages"` + Version string `json:"version"` + VersionStatus string `json:"status"` + CreatedAt time.Time `json:"createdAt"` + Labels []string `json:"labels,omitempty"` + LatestRevision bool `json:"latestRevision,omitempty"` + + //debug + Debug PackageSearchWeightsDebug `json:"debug,omitempty"` +} + +type DocumentSearchWeightsDebug struct { + TitleTf float64 `json:"titleTf"` + LabelsTf float64 `json:"labelsTf"` + ContentTf float64 `json:"contentTf"` + VersionStatusTf float64 `json:"versionStatusTf"` + DocumentOpenCountWeight float64 `json:"documentOpenCountWeight"` + DocumentOpenCount float64 `json:"documentOpenCount"` +} + +type DocumentSearchResult struct { + PackageId string `json:"packageId"` + PackageName string `json:"name"` + ParentPackages []string `json:"parentPackages"` + Version string `json:"version"` + VersionStatus string `json:"status"` + CreatedAt time.Time `json:"createdAt"` + Slug string `json:"slug"` + Type string `json:"type"` + Title string `json:"title"` + Labels []string `json:"labels,omitempty"` + Content string `json:"content,omitempty"` + + //debug + Debug DocumentSearchWeightsDebug `json:"debug,omitempty"` +} diff --git a/qubership-apihub-service/view/Services.go b/qubership-apihub-service/view/Services.go new file mode 100644 index 0000000..9cab1fd --- /dev/null +++ b/qubership-apihub-service/view/Services.go @@ -0,0 +1,24 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type ServiceNameItem struct { + Id string `json:"id"` + Name string `json:"name"` +} + +type ServiceNamesResponse struct { + ServiceNames []ServiceNameItem `json:"serviceNames"` +} diff --git a/qubership-apihub-service/view/ShortcutType.go b/qubership-apihub-service/view/ShortcutType.go new file mode 100644 index 0000000..a1219ac --- /dev/null +++ b/qubership-apihub-service/view/ShortcutType.go @@ -0,0 +1,76 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type ShortcutType string + +// todo maybe add plain text type +const ( + OpenAPI31 ShortcutType = "openapi-3-1" + OpenAPI30 ShortcutType = "openapi-3-0" + OpenAPI20 ShortcutType = "openapi-2-0" + AsyncAPI ShortcutType = "asyncapi-2" + JsonSchema ShortcutType = "json-schema" + MD ShortcutType = "markdown" + GraphQLSchema ShortcutType = "graphql-schema" + GraphAPI ShortcutType = "graphapi" + Introspection ShortcutType = "introspection" + Unknown ShortcutType = "unknown" +) + +func (s ShortcutType) String() string { + return string(s) +} + +func ParseTypeFromString(s string) ShortcutType { + switch s { + case "openapi-3-0": + return OpenAPI30 + case "openapi-3-1": + return OpenAPI31 + case "openapi-2-0": + return OpenAPI20 + case "asyncapi-2": + return AsyncAPI + case "markdown": + return MD + case "unknown": + return Unknown + case "json-schema": + return JsonSchema + case "graphql-schema": + return GraphQLSchema + case "graphapi": + return GraphAPI + case "introspection": + return Introspection + default: + return Unknown + } +} + +func ComparableTypes(type1 ShortcutType, type2 ShortcutType) bool { + if type1 == type2 { + return true + } + if type1 == OpenAPI30 && type2 == OpenAPI31 { + return true + } + if type1 == OpenAPI31 && type2 == OpenAPI30 { + return true + } + + return false +} diff --git a/qubership-apihub-service/view/Status.go b/qubership-apihub-service/view/Status.go new file mode 100644 index 0000000..0d81ec3 --- /dev/null +++ b/qubership-apihub-service/view/Status.go @@ -0,0 +1,23 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type Status struct { + Status string `json:"status"` +} + +type Statuses struct { + Statuses []string `json:"statuses"` +} diff --git a/qubership-apihub-service/view/SystemInfo.go b/qubership-apihub-service/view/SystemInfo.go new file mode 100644 index 0000000..2a1a76b --- /dev/null +++ b/qubership-apihub-service/view/SystemInfo.go @@ -0,0 +1,29 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type SystemInfo struct { + BackendVersion string `json:"backendVersion"` + FrontendVersion string `json:"frontendVersion"` + ProductionMode bool `json:"productionMode"` + Notification string `json:"notification,omitempty"` + ExternalLinks []string `json:"externalLinks"` +} + +type SystemConfigurationInfo struct { + SSOIntegrationEnabled bool `json:"ssoIntegrationEnabled"` + AutoRedirect bool `json:"autoRedirect"` + DefaultWorkspaceId string `json:"defaultWorkspaceId"` +} diff --git a/qubership-apihub-service/view/Transition.go b/qubership-apihub-service/view/Transition.go new file mode 100644 index 0000000..b5aedb5 --- /dev/null +++ b/qubership-apihub-service/view/Transition.go @@ -0,0 +1,44 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +import "time" + +type TransitionRequest struct { + From string `json:"from" validate:"required"` + To string `json:"to" validate:"required"` + + OverwriteHistory bool `json:"overwriteHistory"` +} + +type TransitionStatus struct { + Id string `json:"id"` + TrType string `json:"trType"` + FromId string `json:"fromId"` + ToId string `json:"toId"` + Status string `json:"status"` + Details string `json:"details,omitempty"` + StartedBy string `json:"startedBy"` + StartedAt time.Time `json:"startedAt"` + FinishedAt time.Time `json:"finishedAt"` + ProgressPercent int `json:"progressPercent"` + AffectedObjects int `json:"affectedObjects"` + CompletedSerialNumber *int `json:"completedSerialNumber"` +} + +type PackageTransition struct { + OldPackageId string `json:"oldPackageId"` + NewPackageId string `json:"newPackageId"` +} diff --git a/qubership-apihub-service/view/User.go b/qubership-apihub-service/view/User.go new file mode 100644 index 0000000..720b0b0 --- /dev/null +++ b/qubership-apihub-service/view/User.go @@ -0,0 +1,62 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type User struct { + Id string `json:"id"` + Email string `json:"email"` + Name string `json:"name"` + AvatarUrl string `json:"avatarUrl"` +} + +type UserAvatar struct { + Id string + Avatar []byte + Checksum [32]byte +} + +type Users struct { + Users []User `json:"users"` +} + +type LdapUsers struct { + Users []LdapUser +} + +type LdapUser struct { + Id string + Email string + Name string + Avatar []byte +} + +type UsersListReq struct { + Filter string `json:"filter"` + Limit int `json:"limit"` + Page int `json:"page"` +} + +type InternalUser struct { + Id string `json:"-"` + Email string `json:"email" validate:"required"` + Name string `json:"name"` + Password string `json:"password" validate:"required"` + PrivateWorkspaceId string `json:"privateWorkspaceId"` +} + +type LdapSearchFilterReq struct { + FilterToValue map[string]string + Limit int +} diff --git a/qubership-apihub-service/view/Validation.go b/qubership-apihub-service/view/Validation.go new file mode 100644 index 0000000..b380c26 --- /dev/null +++ b/qubership-apihub-service/view/Validation.go @@ -0,0 +1,102 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type Validation struct { + Summary interface{} `json:"summary,omitempty"` + //Data []interface{} `json:"data,omitempty"` +} + +type ValidationsMap map[string]Validation + +type VersionValidationChanges struct { + PreviousVersion string `json:"previousVersion,omitempty"` + PreviousVersionPackageId string `json:"previousVersionPackageId,omitempty"` + Changes []VersionChangelogData `json:"changes"` + Bwc []VersionBwcData `json:"bwcMessages"` +} + +type VersionValidationProblems struct { + Spectral []VersionSpectralData `json:"messages"` +} + +// changelog.json +type VersionChangelog struct { + Summary VersionChangelogSummary `json:"summary,omitempty"` + Data []VersionChangelogData `json:"data,omitempty"` +} + +type VersionChangelogSummary struct { + Breaking int `json:"breaking"` + NonBreaking int `json:"non-breaking"` + Unclassified int `json:"unclassified"` + SemiBreaking int `json:"semi-breaking"` + Annotation int `json:"annotation"` + Deprecate int `json:"deprecate"` +} + +type VersionChangelogData struct { + FileId string `json:"fileId,omitempty"` + Slug string `json:"slug,omitempty"` + PreviousFileId string `json:"previousFileId,omitempty"` + PreviousSlug string `json:"previousSlug,omitempty"` + Openapi *OpenapiOperation `json:"openapi,omitempty"` + Asyncapi *AsyncapiOperation `json:"asyncapi,omitempty"` + JsonPath []string `json:"jsonPath,omitempty" validate:"required"` + Action string `json:"action,omitempty" validate:"required"` + Severity string `json:"severity,omitempty" validate:"required"` +} + +// spectral.json +type VersionSpectral struct { + Summary VersionSpectralSummary `json:"summary,omitempty"` + Data []VersionSpectralData `json:"data,omitempty"` +} + +type VersionSpectralSummary struct { + Errors int `json:"error"` + Warnings int `json:"warnings"` +} + +type VersionSpectralData struct { + FileId string `json:"fileId,omitempty"` + Slug string `json:"slug,omitempty"` + JsonPath []string `json:"jsonPath,omitempty"` + ExternalFilePath string `json:"externalFilePath,omitempty"` + Message string `json:"message" validate:"required"` + Severity int `json:"severity" validate:"required"` +} + +// bwc.json +type VersionBwc struct { + Summary VersionBwcSummary `json:"summary,omitempty"` + Data []VersionBwcData `json:"data,omitempty"` +} + +type VersionBwcSummary struct { + Errors int `json:"error"` + Warnings int `json:"warnings"` +} + +type VersionBwcData struct { + FileId string `json:"fileId,omitempty"` + PreviousFileId string `json:"previousFileId,omitempty"` + Slug string `json:"slug,omitempty"` + PreviousSlug string `json:"previousSlug,omitempty"` + JsonPath []string `json:"jsonPath,omitempty"` + ExternalFilePath string `json:"externalFilePath,omitempty"` + Message string `json:"message" validate:"required"` + Severity int `json:"severity" validate:"required"` +} diff --git a/qubership-apihub-service/view/ValidationReport.go b/qubership-apihub-service/view/ValidationReport.go new file mode 100644 index 0000000..1ac1460 --- /dev/null +++ b/qubership-apihub-service/view/ValidationReport.go @@ -0,0 +1,120 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type ValidationStatus string +type ContentValidationType string + +const ( + RvStatusYes ValidationStatus = "YES" + RvStatusSemi ValidationStatus = "SEMI" + RvStatusNo ValidationStatus = "NO" + RvStatusInProgress ValidationStatus = "IN_PROGRESS" +) + +const ( + CvTypeError ContentValidationType = "ERROR" + CvTypeWarning ContentValidationType = "WARNING" + CvTypeInformation ContentValidationType = "INFORMATION" + CvTypeRecommendation ContentValidationType = "RECOMMENDATION" +) + +type ValidationReport struct { + ValidationId string `json:"validationId"` + GroupName string `json:"groupName"` + Status ValidationStatus `json:"status"` + Error string `json:"error,omitempty"` + Packages []PackageValidationReport `json:"packages,omitempty"` +} + +type PackageValidationReport struct { + PackageId string `json:"packageId"` + PackageName string `json:"packageName"` + Status ValidationStatus `json:"status"` + Files []FileValidationReport `json:"files,omitempty"` +} + +type FileValidationReport struct { + Slug string `json:"slug"` + Status ValidationStatus `json:"status"` + FileMessages []FileValidationMessage `json:"messages,omitempty"` +} + +type FileValidationMessage struct { + Type ContentValidationType `json:"type"` + Path string `json:"path"` + Text string `json:"text"` +} + +type FileDataInfo struct { + Checksum string + Data []byte + Format string +} + +func (f ValidationStatus) String() string { + switch f { + case RvStatusYes: + return "OK" + case RvStatusNo: + return "NO" + case RvStatusSemi: + return "SEMI" + case RvStatusInProgress: + return "IN-PROGRESS" + default: + return "NONE" + } +} + +func (f ContentValidationType) String() string { + switch f { + case CvTypeError: + return "ERROR" + case CvTypeWarning: + return "WARNING" + case CvTypeInformation: + return "INFORMATION" + default: + return "RECOMMENDATION" + } +} +func ParseCvTypeInt(severity int) ContentValidationType { + switch severity { + case 0: + return CvTypeError + case 1: + return CvTypeWarning + case 2: + return CvTypeInformation + default: + return CvTypeRecommendation + } +} + +func (f ContentValidationType) ToInt() int { + switch f { + case CvTypeError: + return 0 + case CvTypeWarning: + return 1 + case CvTypeInformation: + return 2 + case CvTypeRecommendation: + return 3 + default: + return 4 + } +} diff --git a/qubership-apihub-service/view/Version.go b/qubership-apihub-service/view/Version.go new file mode 100644 index 0000000..04089ad --- /dev/null +++ b/qubership-apihub-service/view/Version.go @@ -0,0 +1,267 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +import ( + "time" +) + +type VersionContent_deprecated struct { + PublishedAt time.Time `json:"createdAt"` + PublishedBy string `json:"createdBy"` + PreviousVersion string `json:"previousVersion,omitempty"` + PreviousVersionPackageId string `json:"previousVersionPackageId,omitempty"` + VersionLabels []string `json:"versionLabels,omitempty"` + Status string `json:"status"` + OperationTypes []VersionOperationType `json:"operationTypes,omitempty"` + PackageId string `json:"packageId"` + Version string `json:"version"` + NotLatestRevision bool `json:"notLatestRevision,omitempty"` + RevisionsCount int `json:"revisionsCount,omitempty"` + OperationGroups []VersionOperationGroup `json:"operationGroups,omitempty"` +} +type VersionContent struct { + PublishedAt time.Time `json:"createdAt"` + PublishedBy map[string]interface{} `json:"createdBy"` + PreviousVersion string `json:"previousVersion,omitempty"` + PreviousVersionPackageId string `json:"previousVersionPackageId,omitempty"` + VersionLabels []string `json:"versionLabels,omitempty"` + Status string `json:"status"` + OperationTypes []VersionOperationType `json:"operationTypes,omitempty"` + PackageId string `json:"packageId"` + Version string `json:"version"` + NotLatestRevision bool `json:"notLatestRevision,omitempty"` + RevisionsCount int `json:"revisionsCount,omitempty"` + OperationGroups []VersionOperationGroup `json:"operationGroups,omitempty"` +} + +type VersionOperationType struct { + ApiType string `json:"apiType"` + OperationsCount *int `json:"operationsCount,omitempty"` + DeprecatedCount *int `json:"deprecatedCount,omitempty"` + NoBwcOperationsCount *int `json:"noBwcOperationsCount,omitempty"` + ChangesSummary *ChangeSummary `json:"changesSummary,omitempty"` + NumberOfImpactedOperations *ChangeSummary `json:"numberOfImpactedOperations,omitempty"` + InternalAudienceOperationsCount *int `json:"internalAudienceOperationsCount,omitempty"` + UnknownAudienceOperationsCount *int `json:"unknownAudienceOperationsCount,omitempty"` + ApiAudienceTransitions []ApiAudienceTransition `json:"apiAudienceTransitions,omitempty"` + Operations map[string]string `json:"operations,omitempty"` +} + +type VersionOperationGroup struct { + GroupName string `json:"groupName"` + ApiType string `json:"apiType"` + Description string `json:"description,omitempty"` + IsPrefixGroup bool `json:"isPrefixGroup"` + OperationsCount int `json:"operationsCount"` + GhostOperationsCount int `json:"ghostOperationsCount,omitempty"` + ExportTemplateFilename string `json:"exportTemplateFileName,omitempty"` +} + +type VersionDocuments struct { + Documents []PublishedDocumentRefView `json:"documents"` + Packages map[string]PackageVersionRef `json:"packages,omitempty"` +} + +// deprecated +type VersionReferences struct { + References []VersionReference `json:"references"` +} + +// deprecated +type VersionReference struct { + RefId string `json:"refId"` + Kind string `json:"kind"` + Name string `json:"name"` + Version string `json:"version"` + Revision int `json:"revision"` + Status string `json:"status"` + DeletedAt *time.Time `json:"deletedAt,omitempty"` + DeletedBy string `json:"deletedBy,omitempty"` + Parents []ParentPackageInfo `json:"parents"` +} + +type VersionReferencesV3 struct { + References []VersionReferenceV3 `json:"references"` + Packages map[string]PackageVersionRef `json:"packages,omitempty"` +} + +type VersionReferenceV3 struct { + PackageRef string `json:"packageRef"` + ParentPackageRef string `json:"parentPackageRef,omitempty"` + Excluded bool `json:"excluded,omitempty"` +} + +type File struct { + FieldId string `json:"fieldId"` + Slug string `json:"slug"` + Type string `json:"type"` + Format string `json:"format"` + Title string `json:"title"` + Labels []string `json:"labels"` +} + +type PublishedVersionListView_deprecated_v2 struct { + Version string `json:"version"` + Status string `json:"status"` + CreatedAt time.Time `json:"createdAt"` + CreatedBy string `json:"createdBy"` + VersionLabels []string `json:"versionLabels"` + PreviousVersion string `json:"previousVersion"` + PreviousVersionPackageId string `json:"previousVersionPackageId,omitempty"` + NotLatestRevision bool `json:"notLatestRevision,omitempty"` +} + +type PublishedVersionListView struct { + Version string `json:"version"` + Status string `json:"status"` + CreatedAt time.Time `json:"createdAt"` + CreatedBy map[string]interface{} `json:"createdBy"` + VersionLabels []string `json:"versionLabels"` + PreviousVersion string `json:"previousVersion"` + PreviousVersionPackageId string `json:"previousVersionPackageId,omitempty"` + NotLatestRevision bool `json:"notLatestRevision,omitempty"` +} + +type PublishedVersionsView_deprecated_v2 struct { + Versions []PublishedVersionListView_deprecated_v2 `json:"versions"` +} +type PublishedVersionsView struct { + Versions []PublishedVersionListView `json:"versions"` +} + +type SharedUrlResult struct { + SharedFileId string `json:"sharedFileId"` +} + +type SharedFilesReq struct { + PackageId string `json:"packageId"` + Version string `json:"version"` + Slug string `json:"slug"` +} + +type VersionPatchRequest struct { + Status *string `json:"status"` + VersionLabels *[]string `json:"versionLabels"` +} + +type VersionListReq struct { + PackageId string + Status string + Limit int + Page int + TextFilter string + Label string + CheckRevisions bool + SortBy string + SortOrder string +} +type VersionReferencesReq struct { + Limit int + Page int + TextFilter string + Kind string + ShowAllDescendants bool +} + +type CompareVersionsReq struct { + PackageId string `json:"packageId" validate:"required"` + Version string `json:"version" validate:"required"` + PreviousVersion string `json:"previousVersion" validate:"required"` + PreviousVersionPackageId string `json:"previousVersionPackageId" validate:"required"` +} + +type PackageVersionRef struct { + RefPackageId string `json:"refId"` + Kind string `json:"kind"` + RefPackageName string `json:"name"` + RefPackageVersion string `json:"version"` + Status string `json:"status"` + DeletedAt *time.Time `json:"deletedAt,omitempty"` + DeletedBy string `json:"deletedBy,omitempty"` + ParentNames []string `json:"parentPackages,omitempty"` + ServiceName string `json:"-"` + NotLatestRevision bool `json:"notLatestRevision,omitempty"` +} + +type PackageVersionRevisions_deprecated struct { + Revisions []PackageVersionRevision_deprecated `json:"revisions"` +} +type PackageVersionRevisions struct { + Revisions []PackageVersionRevision `json:"revisions"` +} +type PackageVersionRevision_deprecated struct { + Version string `json:"version"` + Revision int `json:"revision"` + Status string `json:"status"` + CreatedBy User `json:"createdBy"` + CreatedAt time.Time `json:"createdAt"` + RevisionLabels []string `json:"revisionLabels"` + PublishMeta BuildConfigMetadata `json:"publishMeta"` + NotLatestRevision bool `json:"notLatestRevision,omitempty"` +} +type PackageVersionRevision struct { + Version string `json:"version"` + Revision int `json:"revision"` + Status string `json:"status"` + CreatedBy map[string]interface{} `json:"createdBy"` + CreatedAt time.Time `json:"createdAt"` + RevisionLabels []string `json:"revisionLabels"` + PublishMeta BuildConfigMetadata `json:"publishMeta"` + NotLatestRevision bool `json:"notLatestRevision,omitempty"` +} + +type DeleteVersionsRecursivelyReq struct { + OlderThanDate time.Time `json:"olderThanDate"` +} + +type CopyVersionReq struct { + TargetPackageId string `json:"targetPackageId" validate:"required"` + TargetVersion string `json:"targetVersion" validate:"required"` + TargetPreviousVersion string `json:"targetPreviousVersion"` + TargetPreviousVersionPackageId string `json:"targetPreviousVersionPackageId"` + TargetStatus string `json:"targetStatus" validate:"required"` + TargetVersionLabels []string `json:"targetVersionLabels"` +} + +type CopyVersionResp struct { + PublishId string `json:"publishId"` +} + +const VersionSortOrderAsc = "asc" +const VersionSortOrderDesc = "desc" + +const VersionSortByVersion = "version" +const VersionSortByCreatedAt = "createdAt" + +type PublishFromCSVReq struct { + PackageId string `json:"packageId" validate:"required"` + Version string `json:"version" validate:"required"` + PreviousVersion string `json:"previousVersion"` + PreviousVersionPackageId string `json:"previousVersionPackageId"` + Status string `json:"status" validate:"required"` + VersionLabels []string `json:"versionLabels"` + CSVData []byte `json:"csvData"` + ServicesWorkspaceId string `json:"servicesWorkspaceId" validate:"required"` //workspace for matching packages by serviceNames +} + +type PublishFromCSVResp struct { + PublishId string `json:"publishId"` +} + +type CSVDashboardPublishStatusResponse struct { + Status string `json:"status"` + Message string `json:"message"` +} diff --git a/qubership-apihub-service/view/VersionChange.go b/qubership-apihub-service/view/VersionChange.go new file mode 100644 index 0000000..fc07c40 --- /dev/null +++ b/qubership-apihub-service/view/VersionChange.go @@ -0,0 +1,20 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +type VersionChange struct { + Files []ContentChange `json:"files"` + Refs []RefChange `json:"refs"` +} diff --git a/qubership-apihub-service/view/VersionStatus.go b/qubership-apihub-service/view/VersionStatus.go new file mode 100644 index 0000000..bfd7851 --- /dev/null +++ b/qubership-apihub-service/view/VersionStatus.go @@ -0,0 +1,50 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package view + +import "fmt" + +type VersionStatus string + +const ( + Draft VersionStatus = "draft" + Release VersionStatus = "release" + Archived VersionStatus = "archived" +) + +func (v VersionStatus) String() string { + switch v { + case Draft: + return "draft" + case Release: + return "release" + case Archived: + return "archived" + default: + return "" + } +} + +func ParseVersionStatus(s string) (VersionStatus, error) { + switch s { + case "draft": + return Draft, nil + case "release": + return Release, nil + case "archived": + return Archived, nil + } + return "", fmt.Errorf("unknown version status: %v", s) +} diff --git a/qubership-apihub-service/websocket/Handlers.go b/qubership-apihub-service/websocket/Handlers.go new file mode 100644 index 0000000..c68d121 --- /dev/null +++ b/qubership-apihub-service/websocket/Handlers.go @@ -0,0 +1,24 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package websocket + +type WsMessageHandler interface { + HandleMessage(data []byte, wsId string, session *WsEditSession) +} + +type SessionClosedHandler interface { + HandleSessionClosed(editSessionId string) + HandleUserDisconnected(editSessionId string, wsId string) +} diff --git a/qubership-apihub-service/websocket/RemoteEvents.go b/qubership-apihub-service/websocket/RemoteEvents.go new file mode 100644 index 0000000..416259a --- /dev/null +++ b/qubership-apihub-service/websocket/RemoteEvents.go @@ -0,0 +1,68 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package websocket + +type BranchEvent struct { + ProjectId string `json:"projectId" msgpack:"projectId"` + BranchName string `json:"branchName" msgpack:"branchName"` + WsId string `json:"wsId" msgpack:"wsId"` + Action interface{} `json:"action" msgpack:"action"` +} + +func BranchEventFromMap(m map[string]interface{}) BranchEvent { + return BranchEvent{ + ProjectId: m["projectId"].(string), + BranchName: m["branchName"].(string), + WsId: m["wsId"].(string), + Action: m["action"], + } +} + +func BranchEventToMap(e BranchEvent) map[string]interface{} { + result := map[string]interface{}{} + result["projectId"] = e.ProjectId + result["branchName"] = e.BranchName + result["wsId"] = e.WsId + result["action"] = e.Action + return result +} + +type FileEvent struct { + ProjectId string `json:"projectId" msgpack:"projectId"` + BranchName string `json:"branchName" msgpack:"branchName"` + FileId string `json:"fileId" msgpack:"fileId"` + Action string `json:"action" msgpack:"action"` + Content string `json:"content" msgpack:"content"` +} + +func FileEventFromMap(m map[string]interface{}) FileEvent { + return FileEvent{ + ProjectId: m["projectId"].(string), + BranchName: m["branchName"].(string), + FileId: m["fileId"].(string), + Action: m["action"].(string), + Content: m["content"].(string), + } +} + +func FileEventToMap(e FileEvent) map[string]interface{} { + result := map[string]interface{}{} + result["projectId"] = e.ProjectId + result["branchName"] = e.BranchName + result["fileId"] = e.FileId + result["action"] = e.Action + result["content"] = e.Content + return result +} diff --git a/qubership-apihub-service/websocket/WsBranchActions.go b/qubership-apihub-service/websocket/WsBranchActions.go new file mode 100644 index 0000000..214c978 --- /dev/null +++ b/qubership-apihub-service/websocket/WsBranchActions.go @@ -0,0 +1,125 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package websocket + +import ( + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +const ( + BranchConfigSnapshotType = "branch:config:snapshot" + BranchConfigUpdatedType = "branch:config:updated" + BranchFilesUpdatedType = "branch:files:updated" + BranchFilesResetType = "branch:files:reset" + BranchFilesDataModifiedType = "branch:files:data:modified" + BranchRefsUpdatedType = "branch:refs:updated" + BranchSavedType = "branch:saved" + BranchResetType = "branch:reset" + BranchEditorAddedType = "branch:editors:added" + BranchEditorRemovedType = "branch:editors:removed" +) + +type BranchConfigSnapshot struct { + Type string `json:"type" msgpack:"type"` + Data interface{} `json:"data" msgpack:"data"` +} + +type BranchConfigUpdatedPatch struct { + Type string `json:"type" msgpack:"type"` + Data BranchConfigUpdatedPatchData `json:"data" msgpack:"data"` +} + +type BranchConfigUpdatedPatchData struct { + ChangeType view.ChangeType `json:"changeType" msgpack:"changeType"` +} + +type BranchFilesUpdatedPatch struct { + Type string `json:"type" msgpack:"type"` + UserId string `json:"userId" msgpack:"userId"` + Operation string `json:"operation" msgpack:"operation"` + FileId string `json:"fileId,omitempty" msgpack:"fileId,omitempty"` + Data *BranchFilesUpdatedPatchData `json:"data,omitempty" msgpack:"data,omitempty"` +} + +type BranchResetPatch struct { + Type string `json:"type" msgpack:"type"` + UserId string `json:"userId" msgpack:"userId"` +} + +type BranchFilesResetPatch struct { + Type string `json:"type" msgpack:"type"` + UserId string `json:"userId" msgpack:"userId"` + FileId string `json:"fileId" msgpack:"fileId"` +} + +type BranchFilesUpdatedPatchData struct { + FileId string `json:"fileId,omitempty" msgpack:"fileId,omitempty"` + Publish *bool `json:"publish,omitempty" msgpack:"publish,omitempty"` + Labels *[]string `json:"labels,omitempty" msgpack:"labels,omitempty"` + Status view.FileStatus `json:"status,omitempty" msgpack:"status,omitempty"` + MovedFrom *string `json:"movedFrom,omitempty" msgpack:"movedFrom,omitempty"` + ChangeType view.ChangeType `json:"changeType,omitempty" msgpack:"changeType,omitempty"` + BlobId *string `json:"blobId,omitempty" msgpack:"blobId,omitempty"` + ConflictedBlobId *string `json:"conflictedBlobId,omitempty" msgpack:"conflictedBlobId,omitempty"` + ConflictedFileId *string `json:"conflictedFileId,omitempty" msgpack:"conflictedFileId,omitempty"` +} + +type BranchFilesDataModified struct { + Type string `json:"type" msgpack:"type"` + UserId string `json:"userId" msgpack:"userId"` + FileId string `json:"fileId" msgpack:"fileId"` +} + +type BranchRefsUpdatedPatch struct { + Type string `json:"type" msgpack:"type"` + UserId string `json:"userId" msgpack:"userId"` + Operation string `json:"operation" msgpack:"operation"` + RefId string `json:"refId,omitempty" msgpack:"refId,omitempty"` + Version string `json:"version,omitempty" msgpack:"version,omitempty"` + Data *BranchRefsUpdatedPatchData `json:"data,omitempty" msgpack:"data,omitempty"` +} + +type BranchRefsUpdatedPatchData struct { + RefId string `json:"refId,omitempty" msgpack:"refId,omitempty"` + Version string `json:"version,omitempty" msgpack:"version,omitempty"` + Name string `json:"name,omitempty" msgpack:"name,omitempty"` + VersionStatus string `json:"versionStatus,omitempty" msgpack:"versionStatus,omitempty"` + Status view.FileStatus `json:"status,omitempty" msgpack:"status,omitempty"` +} + +type BranchSavedPatch struct { + Type string `json:"type" msgpack:"type"` + UserId string `json:"userId" msgpack:"userId"` + Comment string `json:"comment" msgpack:"comment"` + Branch string `json:"branch,omitempty" msgpack:",omitempty"` + MergeRequestURL *string `json:"mrUrl,omitempty" msgpack:",omitempty"` +} + +type BranchPublishedPatch struct { + Type string `json:"type" msgpack:"type"` + UserId string `json:"userId" msgpack:"userId"` + Version string `json:"version" msgpack:"version"` + Status string `json:"status" msgpack:"status"` +} + +type BranchEditorAddedPatch struct { + Type string `json:"type" msgpack:"type"` + UserId string `json:"userId" msgpack:"userId"` +} + +type BranchEditorRemovedPatch struct { + Type string `json:"type" msgpack:"type"` + UserId string `json:"userId" msgpack:"userId"` +} diff --git a/qubership-apihub-service/websocket/WsClient.go b/qubership-apihub-service/websocket/WsClient.go new file mode 100644 index 0000000..ddeb603 --- /dev/null +++ b/qubership-apihub-service/websocket/WsClient.go @@ -0,0 +1,80 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package websocket + +import ( + "crypto/rand" + "encoding/json" + "fmt" + "math/big" + "sync" + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + ws "github.com/gorilla/websocket" +) + +type WsClient struct { + Connection *ws.Conn + SessionId string + User view.User + ConnectedAt time.Time + UserColor string + mutex sync.RWMutex +} + +func NewWsClient(connection *ws.Conn, sessionId string, user view.User) *WsClient { + client := &WsClient{ + Connection: connection, + SessionId: sessionId, + ConnectedAt: time.Now(), + User: user, + UserColor: generateUserColor(), + mutex: sync.RWMutex{}, + } + return client +} + +func (c *WsClient) send(payload interface{}) error { + c.mutex.Lock() + defer c.mutex.Unlock() + err := c.Connection.WriteJSON(payload) + if err != nil { + c.Connection.Close() + return fmt.Errorf("failed to send message to sess %s: %s", c.SessionId, err.Error()) + } + return nil +} + +func (c *WsClient) MarshalJSON() ([]byte, error) { + return json.Marshal(&struct { + SessionId string `json:"wsId"` + User view.User `json:"user"` + ConnectedAt time.Time `json:"connectedAt"` + UserColor string `json:"userColor"` + }{ + SessionId: c.SessionId, + User: c.User, + ConnectedAt: c.ConnectedAt, + UserColor: c.UserColor, + }) +} + +func generateUserColor() string { + r, _ := rand.Int(rand.Reader, big.NewInt(256)) + g, _ := rand.Int(rand.Reader, big.NewInt(256)) + b, _ := rand.Int(rand.Reader, big.NewInt(256)) + return fmt.Sprintf("rgb(%d, %d, %d)", r, g, b) +} diff --git a/qubership-apihub-service/websocket/WsEditSession.go b/qubership-apihub-service/websocket/WsEditSession.go new file mode 100644 index 0000000..4123afa --- /dev/null +++ b/qubership-apihub-service/websocket/WsEditSession.go @@ -0,0 +1,283 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package websocket + +import ( + "encoding/json" + "sync" + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/utils" + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" + ws "github.com/gorilla/websocket" + log "github.com/sirupsen/logrus" +) + +// WsEditSession General operations for websocket file/branch edit session +type WsEditSession struct { + clients sync.Map + messageHandler WsMessageHandler + sessionClosedHandler SessionClosedHandler + EditSessionId string + registerClientsCh chan *RegMsg + OriginatorUserId string +} + +type RegMsg struct { + client *WsClient + wg *sync.WaitGroup +} + +func NewWsEditSession(editSessionId string, messageHandler WsMessageHandler, sessionClosedHandler SessionClosedHandler, originatorUserId string) *WsEditSession { + sess := &WsEditSession{ + clients: sync.Map{}, + messageHandler: messageHandler, + sessionClosedHandler: sessionClosedHandler, + EditSessionId: editSessionId, + OriginatorUserId: originatorUserId, + registerClientsCh: make(chan *RegMsg), + } + + utils.SafeAsync(func() { + sess.runClientRegistration() + }) + log.Debugf("Started WS edit session with id %s", editSessionId) + return sess +} + +func (b *WsEditSession) ConnectClient(wsId string, conn *ws.Conn, user view.User, extWg *sync.WaitGroup) { + conn.SetReadDeadline(time.Now().Add(PingTime * 2)) + conn.SetPongHandler(func(appData string) error { + conn.SetReadDeadline(time.Now().Add(PingTime * 2)) + return nil + }) + + var wg sync.WaitGroup + wg.Add(1) + + b.registerClientsCh <- &RegMsg{NewWsClient(conn, wsId, user), &wg} + + wg.Wait() + if extWg != nil { + extWg.Done() + } + + utils.SafeAsync(func() { + b.handleIncomingMessages(conn, wsId, user) + }) +} + +func (b *WsEditSession) GetClient(wsId string) *WsClient { + if client, exists := b.clients.Load(wsId); exists { + return client.(*WsClient) + } + return nil +} + +func (b *WsEditSession) runClientRegistration() { + for { + RegMsg, more := <-b.registerClientsCh + if RegMsg != nil { + client := RegMsg.client + b.clients.Store(client.SessionId, client) + + //send "user:connected" notification to all other connected users + b.NotifyOthers(client.SessionId, + UserConnectedPatch{ + Type: UserConnectedType, + SessionId: client.SessionId, + ConnectedAt: client.ConnectedAt, + User: client.User, + UserColor: client.UserColor, + }) + + //send "user:connected" notifications for each connected user to the current user + b.clients.Range(func(key, value interface{}) bool { + c := value.(*WsClient) + // use sync send method here + err := client.send(UserConnectedPatch{ + Type: UserConnectedType, + SessionId: c.SessionId, + ConnectedAt: c.ConnectedAt, + User: c.User, + UserColor: c.UserColor, + }) + if err != nil { + log.Errorf("Failed to send user:connected %v: %v", client.SessionId, err.Error()) + return false + } + return true + }) + RegMsg.wg.Done() + } + if !more { + return + } + } +} + +func (b *WsEditSession) NotifyClient(wsId string, message interface{}) { + utils.SafeAsync(func() { + v, exists := b.clients.Load(wsId) + if exists { + client := v.(*WsClient) + err := client.send(message) + if err != nil { + log.Errorf("Failed to notify client %v: %v", client.SessionId, err.Error()) + } + } else { + log.Debugf("Unable to send message '%s' since client %s not found", message, wsId) + } + }) +} + +func (b *WsEditSession) NotifyClientSync(wsId string, message interface{}) { + v, exists := b.clients.Load(wsId) + if exists { + client := v.(*WsClient) + err := client.send(message) + if err != nil { + log.Errorf("Failed to notify client sync %v: %v", client.SessionId, err.Error()) + } + } else { + log.Debugf("Unable to send message '%s' since client %s not found", message, wsId) + } +} + +func (b *WsEditSession) NotifyOthers(wsId string, message interface{}) { + utils.SafeAsync(func() { + b.clients.Range(func(key, value interface{}) bool { + c := value.(*WsClient) + if c.SessionId == wsId { + return true + } + err := c.send(message) + if err != nil { + log.Errorf("Failed to notify client %v: %v", c.SessionId, err.Error()) + } + return true + }) + }) +} + +func (b *WsEditSession) NotifyAll(message interface{}) { + utils.SafeAsync(func() { + b.clients.Range(func(key, value interface{}) bool { + c := value.(*WsClient) + err := c.send(message) + if err != nil { + log.Errorf("Failed to notify client %v: %v", c.SessionId, err.Error()) + } + return true + }) + }) +} + +func (b *WsEditSession) handleIncomingMessages(connection *ws.Conn, wsId string, user view.User) { + defer connection.Close() + for { + _, data, err := connection.ReadMessage() + if err != nil { + log.Debugf("Connection %v closed: %v", wsId, err.Error()) + b.handleClientDisconnect(wsId) + break + } + if b.messageHandler != nil { + b.messageHandler.HandleMessage(data, wsId, b) + } + } +} + +func (b *WsEditSession) handleClientDisconnect(wsId string) { + v, exists := b.clients.Load(wsId) + if !exists { + return + } + client := v.(*WsClient) + + b.clients.Delete(wsId) + + clientsCount := 0 + b.clients.Range(func(key, value interface{}) bool { + clientsCount++ + return true + }) + + if clientsCount > 0 { + b.NotifyAll(UserDisconnectedPatch{Type: UserDisconnectedType, SessionId: wsId, User: client.User}) + if b.sessionClosedHandler != nil { + b.sessionClosedHandler.HandleUserDisconnected(b.EditSessionId, wsId) + } + } else { + close(b.registerClientsCh) + if b.sessionClosedHandler != nil { + b.sessionClosedHandler.HandleSessionClosed(b.EditSessionId) + } + log.Debugf("Closed WS edit session with id %s", b.EditSessionId) + } +} + +func (b *WsEditSession) ForceDisconnectAll() { + utils.SafeAsync(func() { + b.clients.Range(func(key, value interface{}) bool { + c := value.(*WsClient) + c.Connection.Close() + return true + }) + }) +} + +func (b *WsEditSession) ForceDisconnect(wsId string) { + v, exists := b.clients.Load(wsId) + if exists { + client := v.(*WsClient) + client.Connection.Close() + } +} + +func (b *WsEditSession) MarshalJSON() ([]byte, error) { + var clients map[string]*WsClient + b.clients.Range(func(key, value interface{}) bool { + clients[key.(string)] = value.(*WsClient) + return true + }) + + return json.Marshal(&struct { + Clients map[string]*WsClient `json:"clients"` + EditSessionId string `json:"editSessionId"` + OriginatorUserId string `json:"originatorUserId"` + }{ + Clients: clients, + EditSessionId: b.EditSessionId, + OriginatorUserId: b.OriginatorUserId, + }) +} + +const PingTime = time.Second * 5 + +func (b *WsEditSession) SendPingToAllClients() { + b.clients.Range(func(key, value interface{}) bool { + client := value.(*WsClient) + utils.SafeAsync(func() { + if err := client.Connection.WriteControl(ws.PingMessage, []byte{}, time.Now().Add(PingTime)); err != nil { + log.Errorf("Can't send ping for %v", client.SessionId) + log.Debugf("Connection wsId=%v will be closed due to timeout: %v", client.SessionId, err.Error()) + b.handleClientDisconnect(client.SessionId) + client.Connection.Close() + } + }) + return true + }) +} diff --git a/qubership-apihub-service/websocket/WsFileActions.go b/qubership-apihub-service/websocket/WsFileActions.go new file mode 100644 index 0000000..79e346b --- /dev/null +++ b/qubership-apihub-service/websocket/WsFileActions.go @@ -0,0 +1,96 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package websocket + +import ( + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/ot" +) + +const ( + UserCursorOutputType = "user:cursor" + OperationOutputType = "user:operation" + DocSnapshotOutputType = "document:snapshot" + UnexpectedMessageType = "message:unexpected" + + UserCursorInputType = "cursor" + OperationInputMessageType = "operation" + + DebugStateInputMessageType = "debug_state" +) + +type TypedWsMessage struct { + Type string `json:"type" msgpack:"type"` +} + +type UserCursorInput struct { + Type string `json:"type" msgpack:"type"` + Position int `json:"position" msgpack:"position"` + SelectionEnd int `json:"selectionEnd" msgpack:"selectionEnd"` +} + +type UserCursorOutput struct { + Type string `json:"type" msgpack:"type"` + SessionId string `json:"sessionId" msgpack:"sessionId"` + Cursor CursorValue `json:"cursor" msgpack:"cursor"` +} + +type CursorValue struct { + Position int `json:"position" msgpack:"position"` + SelectionEnd int `json:"selectionEnd" msgpack:"selectionEnd"` +} + +type OperationInputMessage struct { + Type string `json:"type" msgpack:"type"` + Revision int `json:"revision" msgpack:"revision"` + Operation []interface{} `json:"operation" msgpack:"operation"` +} + +type OperationOutputMessage struct { + Type string `json:"type" msgpack:"type"` + SessionId string `json:"sessionId" msgpack:"sessionId"` + Revision int `json:"revision" msgpack:"revision"` + Operation []interface{} `json:"operation" msgpack:"operation"` +} + +type DocSnapshotOutputMessage struct { + Type string `json:"type" msgpack:"type"` + Revision int `json:"revision" msgpack:"revision"` + Document []string `json:"document" msgpack:"document"` +} + +type UnexpectedMessage struct { + Type string `json:"type" msgpack:"type"` + Message interface{} `json:"message" msgpack:"message"` +} + +type WsEvent struct { + EditSessionId string // file or branch id + WsId string + Data []byte +} + +type OpsStatData struct { + LastSavedRev int + SaveTimestamp time.Time +} + +type DebugSessionStateOutputMessage struct { + Session *WsEditSession + File *ot.ServerDoc + Cursors map[string]CursorValue + OpsStatData OpsStatData +} diff --git a/qubership-apihub-service/websocket/WsGeneralActions.go b/qubership-apihub-service/websocket/WsGeneralActions.go new file mode 100644 index 0000000..70c7bd7 --- /dev/null +++ b/qubership-apihub-service/websocket/WsGeneralActions.go @@ -0,0 +1,40 @@ +// Copyright 2024-2025 NetCracker Technology Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package websocket + +import ( + "time" + + "github.com/Netcracker/qubership-apihub-backend/qubership-apihub-service/view" +) + +const ( + UserConnectedType = "user:connected" + UserDisconnectedType = "user:disconnected" +) + +type UserConnectedPatch struct { + Type string `json:"type" msgpack:"type"` + SessionId string `json:"sessionId" msgpack:"sessionId"` + ConnectedAt time.Time `json:"connectedAt" msgpack:"connectedAt"` + User view.User `json:"user" msgpack:"user"` + UserColor string `json:"userColor" msgpack:"userColor"` +} + +type UserDisconnectedPatch struct { + Type string `json:"type" msgpack:"type"` + SessionId string `json:"sessionId" msgpack:"sessionId"` + User view.User `json:"user" msgpack:"user"` +}