diff --git a/.env.sample b/.env.sample
index 1c46d3451..d8703235d 100644
--- a/.env.sample
+++ b/.env.sample
@@ -36,12 +36,9 @@ AzureWebJobsStorage=
BACKEND_URL=http://localhost:7071
DOCUMENT_PROCESSING_QUEUE_NAME=
# Azure Blob Storage for storing the original documents to be processed
-AZURE_BLOB_ACCOUNT_NAME=
-AZURE_BLOB_ACCOUNT_KEY=
-AZURE_BLOB_CONTAINER_NAME=
+AZURE_BLOB_STORAGE_INFO="{\"containerName\":\"documents\",\"accountName\":\"\",\"accountKey\":\"\"}"
# Azure Form Recognizer for extracting the text from the documents
-AZURE_FORM_RECOGNIZER_ENDPOINT=
-AZURE_FORM_RECOGNIZER_KEY=
+AZURE_FORM_RECOGNIZER_INFO="{\"endpoint\":\"\",\"key\":\"\"}"
# Azure AI Content Safety for filtering out the inappropriate questions or answers
AZURE_CONTENT_SAFETY_ENDPOINT=
AZURE_CONTENT_SAFETY_KEY=
@@ -66,3 +63,5 @@ CONVERSATION_FLOW=
AZURE_COSMOSDB_INFO="{\"accountName\":\"cosmos-abc123\",\"databaseName\":\"db_conversation_history\",\"containerName\":\"conversations\"}"
AZURE_COSMOSDB_ACCOUNT_KEY=
AZURE_COSMOSDB_ENABLE_FEEDBACK=
+AZURE_POSTGRESQL_INFO="{\"user\":\"\",\"dbname\":\"postgres\",\"host\":\"\"}"
+DATABASE_TYPE="CosmosDB"
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 87e4b8b3b..98fd7173f 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -52,7 +52,6 @@ jobs:
env:
AZURE_ENV_NAME: ${{ github.run_id }}
AZURE_LOCATION: ${{ vars.AZURE_LOCATION }}
- AZURE_RESOURCE_GROUP: ${{ vars.AZURE_RESOURCE_GROUP }}
with:
imageName: ghcr.io/azure-samples/chat-with-your-data-solution-accelerator
cacheFrom: ghcr.io/azure-samples/chat-with-your-data-solution-accelerator
@@ -66,15 +65,12 @@ jobs:
AZURE_SUBSCRIPTION_ID
AZURE_ENV_NAME
AZURE_LOCATION
- AZURE_RESOURCE_GROUP
-
- name: Tidy up
uses: devcontainers/ci@v0.3
if: always()
env:
AZURE_ENV_NAME: ${{ github.run_id }}
AZURE_LOCATION: ${{ vars.AZURE_LOCATION }}
- AZURE_RESOURCE_GROUP: ${{ vars.AZURE_RESOURCE_GROUP }}
with:
push: never
imageName: ghcr.io/azure-samples/chat-with-your-data-solution-accelerator
@@ -87,8 +83,7 @@ jobs:
AZURE_SUBSCRIPTION_ID
AZURE_ENV_NAME
AZURE_LOCATION
- AZURE_RESOURCE_GROUP
-
+
- name: Send Notification on Failure
if: failure()
run: |
diff --git a/Makefile b/Makefile
index 6d816c65a..24c047929 100644
--- a/Makefile
+++ b/Makefile
@@ -57,14 +57,10 @@ azd-login: ## 🔑 Login to Azure with azd and a SPN
@echo -e "\e[34m$@\e[0m" || true
@azd auth login --client-id ${AZURE_CLIENT_ID} --client-secret ${AZURE_CLIENT_SECRET} --tenant-id ${AZURE_TENANT_ID}
-az-login: ## 🔑 Login to Azure with az and a SPN
- az login --service-principal -u ${AZURE_CLIENT_ID} -p ${AZURE_CLIENT_SECRET} --tenant ${AZURE_TENANT_ID}
-
-deploy: azd-login az-login ## 🚀 Deploy everything to Azure
+deploy: azd-login ## 🚀 Deploy everything to Azure
@echo -e "\e[34m$@\e[0m" || true
@azd env new ${AZURE_ENV_NAME}
@azd env set AZURE_APP_SERVICE_HOSTING_MODEL code --no-prompt
- @az group create --name ${AZURE_RESOURCE_GROUP} --location ${AZURE_LOCATION}
@azd provision --no-prompt
@azd deploy web --no-prompt
@azd deploy function --no-prompt
diff --git a/README.md b/README.md
index 9ec853d54..b01d5ca67 100644
--- a/README.md
+++ b/README.md
@@ -48,7 +48,7 @@ urlFragment: chat-with-your-data-solution-accelerator
## User story
Welcome to the *Chat with your data* Solution accelerator repository! The *Chat with your data* Solution accelerator is a powerful tool that combines the capabilities of Azure AI Search and Large Language Models (LLMs) to create a conversational search experience. This solution accelerator uses an Azure OpenAI GPT model and an Azure AI Search index generated from your data, which is integrated into a web application to provide a natural language interface, including [speech-to-text](docs/speech_to_text.md) functionality, for search queries. Users can drag and drop files, point to storage, and take care of technical setup to transform documents. Everything can be deployed in your own subscription to accelerate your use of this technology.
-![Solution Architecture - Chat with your data](/docs/images/cwyd-solution-architecture.png)
+
### About this repo
@@ -91,12 +91,15 @@ Here is a comparison table with a few features offered by Azure, an available Gi
- **Single application access to your full data set**: Minimize endpoints required to access internal company knowledgebases. Reuse the same backend with the [Microsoft Teams Extension](docs/teams_extension.md)
- **Natural language interaction with your unstructured data**: Use natural language to quickly find the answers you need and ask follow-up queries to get the supplemental details, including [Speech-to-text](docs/speech_to_text.md).
- **Easy access to source documentation when querying**: Review referenced documents in the same chat window for additional context.
+- **Chat history**: Prior conversations and context are maintained and accessible through chat history.
- **Data upload**: Batch upload documents of [various file types](docs/supported_file_types.md)
- **Accessible orchestration**: Prompt and document configuration (prompt engineering, document processing, and data retrieval)
+- **Database flexibility**: Dynamic database switching allows users to choose between PostgreSQL and Cosmos DB based on their requirements. If no preference is specified the platform defaults to PostgreSQL.
**Note**: The current model allows users to ask questions about unstructured data, such as PDF, text, and docx files. See the [supported file types](docs/supported_file_types.md).
+
### Target end users
Company personnel (employees, executives) looking to research against internal unstructured company data would leverage this accelerator using natural language to find what they need quickly.
@@ -107,6 +110,11 @@ Tech administrators can use this accelerator to give their colleagues easy acces
### Use Case scenarios
+#### Employee Onboarding Scenario
+The sample data illustrates how this accelerator could be used for an employee onboarding scenario in across industries.
+
+In this scenario, a newly hired employee is in the process of onboarding to their organization. Leveraging the solution accelerator, she navigates through the extensive offerings of her organization’s health and retirement benefits. With the newly integrated chat history capabilities, they can revisit previous conversations, ensuring continuity and context across multiple days of research. This functionality allows the new employee to efficiently gather and consolidate information, streamlining their onboarding experience. [For more details, refer to the README](docs/employee_assistance.md).
+
#### Financial Advisor Scenario
The sample data illustrates how this accelerator could be used in the financial services industry (FSI).
@@ -120,12 +128,6 @@ Additionally, we have implemented a Legal Review and Summarization Assistant sce
Note: Some of the sample data included with this accelerator was generated using AI and is for illustrative purposes only.
-#### Employee Onboarding Scenario
-The sample data illustrates how this accelerator could be used for an employee onboarding scenario in across industries.
-
-In this scenario, a newly hired employee is in the process of onboarding to their organization. Leveraging the solution accelerator, she navigates through the extensive offerings of her organization’s health and retirement benefits. With the newly integrated chat history capabilities, they can revisit previous conversations, ensuring continuity and context across multiple days of research. This functionality allows the new employee to efficiently gather and consolidate information, streamlining their onboarding experience. [For more details, refer to the README](docs/employee_assistance.md).
-
-
---
![One-click Deploy](/docs/images/oneClickDeploy.png)
@@ -146,6 +148,7 @@ In this scenario, a newly hired employee is in the process of onboarding to thei
- Azure Storage Account
- Azure Speech Service
- Azure CosmosDB
+- Azure PostgreSQL
- Teams (optional: Teams extension only)
### Required licenses
@@ -163,13 +166,30 @@ The following are links to the pricing details for some of the resources:
- [Azure AI Document Intelligence pricing](https://azure.microsoft.com/pricing/details/ai-document-intelligence/)
- [Azure Web App Pricing](https://azure.microsoft.com/pricing/details/app-service/windows/)
+### Deployment options: PostgreSQL or Cosmos DB
+With the addition of PostgreSQL, customers can leverage the power of a relationship-based AI solution to enhance historical conversation access, improve data privacy, and open the possibilities for scalability.
+
+Customers have the option to deploy this solution with PostgreSQL or Cosmos DB. Consider the following when deciding which database to use:
+- PostgreSQL enables a relationship-based AI solution and search indexing for Retrieval Augmented Generation (RAG)
+- Cosmos DB is a NoSQL-based solution for chat history
+
+
+To review PostgreSQL configuration overview and steps, follow the link [here](docs/postgreSQL.md).
+![Solution Architecture - Chat with your data PostgreSQL](/docs/images/architrecture_pg.png)
+
+To review Cosmos DB configuration overview and steps, follow the link [here](docs/employee_assistance.md).
+![Solution Architecture - Chat with your data CosmosDB](/docs/images/architecture_cdb.png)
+
### Deploy instructions
+The "Deploy to Azure" button offers a one-click deployment where you don’t have to clone the code. If you would like a developer experience instead, follow the [local deployment instructions](./docs/LOCAL_DEPLOYMENT.md).
-There are two choices; the "Deploy to Azure" offers a one click deployment where you don't have to clone the code, alternatively if you would like a developer experience, follow the [Local deployment instructions](./docs/LOCAL_DEPLOYMENT.md).
+Once you deploy to Azure, you will have the option to select PostgreSQL or Cosmos DB, see screenshot below.
-The demo, which uses containers pre-built from the main branch is available by clicking this button:
+[![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure-Samples%2Fchat-with-your-data-solution-accelerator%2Frefs%2Fheads%2Fmain%2Finfra%2Fmain.json)
+
+Select either "PostgreSQL" or "Cosmos DB":
+![Solution Architecture - DB Selection](/docs/images/db_selection.png)
-[![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure-Samples%2Fchat-with-your-data-solution-accelerator%2Fmain%2Finfra%2Fmain.json)
When Deployment is complete, follow steps in [Set Up Authentication in Azure App Service](./docs/azure_app_service_auth_setup.md) to add app authentication to your web app running on Azure App Service
@@ -195,9 +215,11 @@ switch to a lower version. To find out which versions are supported in different
![A screenshot of the chat app.](./docs/images/web-unstructureddata.png)
-\
-\
+
+
+
![Supporting documentation](/docs/images/supportingDocuments.png)
+
## Supporting documentation
### Resource links
diff --git a/code/backend/api/chat_history.py b/code/backend/api/chat_history.py
index 2aba1a8a4..8a86b8119 100644
--- a/code/backend/api/chat_history.py
+++ b/code/backend/api/chat_history.py
@@ -4,13 +4,12 @@
from dotenv import load_dotenv
from flask import request, jsonify, Blueprint
from openai import AsyncAzureOpenAI
-from backend.batch.utilities.chat_history.cosmosdb import CosmosConversationClient
from backend.batch.utilities.chat_history.auth_utils import (
get_authenticated_user_details,
)
from backend.batch.utilities.helpers.config.config_helper import ConfigHelper
-from azure.identity.aio import DefaultAzureCredential
from backend.batch.utilities.helpers.env_helper import EnvHelper
+from backend.batch.utilities.chat_history.database_factory import DatabaseFactory
load_dotenv()
bp_chat_history_response = Blueprint("chat_history", __name__)
@@ -20,35 +19,13 @@
env_helper: EnvHelper = EnvHelper()
-def init_cosmosdb_client():
- cosmos_conversation_client = None
- config = ConfigHelper.get_active_config_or_default()
- if config.enable_chat_history:
- try:
- cosmos_endpoint = (
- f"https://{env_helper.AZURE_COSMOSDB_ACCOUNT}.documents.azure.com:443/"
- )
-
- if not env_helper.AZURE_COSMOSDB_ACCOUNT_KEY:
- credential = DefaultAzureCredential()
- else:
- credential = env_helper.AZURE_COSMOSDB_ACCOUNT_KEY
-
- cosmos_conversation_client = CosmosConversationClient(
- cosmosdb_endpoint=cosmos_endpoint,
- credential=credential,
- database_name=env_helper.AZURE_COSMOSDB_DATABASE,
- container_name=env_helper.AZURE_COSMOSDB_CONVERSATIONS_CONTAINER,
- enable_message_feedback=env_helper.AZURE_COSMOSDB_ENABLE_FEEDBACK,
- )
- except Exception as e:
- logger.exception("Exception in CosmosDB initialization: %s", e)
- cosmos_conversation_client = None
- raise e
- else:
- logger.debug("CosmosDB not configured")
-
- return cosmos_conversation_client
+def init_database_client():
+ try:
+ conversation_client = DatabaseFactory.get_conversation_client()
+ return conversation_client
+ except Exception as e:
+ logger.exception("Exception in database initialization: %s", e)
+ raise e
def init_openai_client():
@@ -75,7 +52,7 @@ def init_openai_client():
async def list_conversations():
config = ConfigHelper.get_active_config_or_default()
if not config.enable_chat_history:
- return (jsonify({"error": "Chat history is not avaliable"}), 400)
+ return jsonify({"error": "Chat history is not available"}), 400
try:
offset = request.args.get("offset", 0)
@@ -83,32 +60,39 @@ async def list_conversations():
request_headers=request.headers
)
user_id = authenticated_user["user_principal_id"]
- cosmos_conversation_client = init_cosmosdb_client()
- if not cosmos_conversation_client:
- return (jsonify({"error": "database not available"}), 500)
+ conversation_client = init_database_client()
+ if not conversation_client:
+ return jsonify({"error": "Database not available"}), 500
- # get the conversations from cosmos
- conversations = await cosmos_conversation_client.get_conversations(
- user_id, offset=offset, limit=25
- )
- if not isinstance(conversations, list):
- return (
- jsonify({"error": f"No conversations for {user_id} were found"}),
- 400,
+ await conversation_client.connect()
+ try:
+ conversations = await conversation_client.get_conversations(
+ user_id, offset=offset, limit=25
)
+ if not isinstance(conversations, list):
+ return (
+ jsonify({"error": f"No conversations for {user_id} were found"}),
+ 404,
+ )
- return (jsonify(conversations), 200)
+ return jsonify(conversations), 200
+ except Exception as e:
+ logger.exception(f"Error fetching conversations: {e}")
+ raise
+ finally:
+ await conversation_client.close()
except Exception as e:
- logger.exception("Exception in /list" + str(e))
- return (jsonify({"error": "Error While listing historical conversations"}), 500)
+ logger.exception(f"Exception in /history/list: {e}")
+ return jsonify({"error": "Error while listing historical conversations"}), 500
@bp_chat_history_response.route("/history/rename", methods=["POST"])
async def rename_conversation():
config = ConfigHelper.get_active_config_or_default()
if not config.enable_chat_history:
- return (jsonify({"error": "Chat history is not avaliable"}), 400)
+ return jsonify({"error": "Chat history is not available"}), 400
+
try:
authenticated_user = get_authenticated_user_details(
request_headers=request.headers
@@ -122,45 +106,54 @@ async def rename_conversation():
if not conversation_id:
return (jsonify({"error": "conversation_id is required"}), 400)
- # make sure cosmos is configured
- cosmos_conversation_client = init_cosmosdb_client()
- if not cosmos_conversation_client:
- return (jsonify({"error": "database not available"}), 500)
-
- # get the conversation from cosmos
- conversation = await cosmos_conversation_client.get_conversation(
- user_id, conversation_id
- )
- if not conversation:
- return (
- jsonify(
- {
- "error": f"Conversation {conversation_id} was not found. It either does not exist or the logged in user does not have access to it."
- }
- ),
- 400,
- )
-
- # update the title
title = request_json.get("title", None)
if not title or title.strip() == "":
- return jsonify({"error": "title is required"}), 400
- conversation["title"] = title
- updated_conversation = await cosmos_conversation_client.upsert_conversation(
- conversation
- )
- return (jsonify(updated_conversation), 200)
+ return jsonify({"error": "A non-empty title is required"}), 400
+
+ # Initialize and connect to the database client
+ conversation_client = init_database_client()
+ if not conversation_client:
+ return jsonify({"error": "Database not available"}), 500
+
+ await conversation_client.connect()
+ try:
+ # Retrieve conversation from database
+ conversation = await conversation_client.get_conversation(
+ user_id, conversation_id
+ )
+ if not conversation:
+ return (
+ jsonify(
+ {
+ "error": f"Conversation {conversation_id} was not found. It either does not exist or the logged in user does not have access to it."
+ }
+ ),
+ 400,
+ )
+ # Update the title and save changes
+ conversation["title"] = title
+ updated_conversation = await conversation_client.upsert_conversation(
+ conversation
+ )
+ return jsonify(updated_conversation), 200
+ except Exception as e:
+ logger.exception(
+ f"Error updating conversation: user_id={user_id}, conversation_id={conversation_id}, error={e}"
+ )
+ raise
+ finally:
+ await conversation_client.close()
except Exception as e:
- logger.exception("Exception in /rename" + str(e))
- return (jsonify({"error": "Error renaming is fail"}), 500)
+ logger.exception(f"Exception in /history/rename: {e}")
+ return jsonify({"error": "Error while renaming conversation"}), 500
@bp_chat_history_response.route("/history/read", methods=["POST"])
async def get_conversation():
config = ConfigHelper.get_active_config_or_default()
if not config.enable_chat_history:
- return (jsonify({"error": "Chat history is not avaliable"}), 400)
+ return jsonify({"error": "Chat history is not available"}), 400
try:
authenticated_user = get_authenticated_user_details(
@@ -171,64 +164,71 @@ async def get_conversation():
# check request for conversation_id
request_json = request.get_json()
conversation_id = request_json.get("conversation_id", None)
-
if not conversation_id:
- return (jsonify({"error": "conversation_id is required"}), 400)
+ return jsonify({"error": "conversation_id is required"}), 400
- # make sure cosmos is configured
- cosmos_conversation_client = init_cosmosdb_client()
- if not cosmos_conversation_client:
- return (jsonify({"error": "database not available"}), 500)
+ # Initialize and connect to the database client
+ conversation_client = init_database_client()
+ if not conversation_client:
+ return jsonify({"error": "Database not available"}), 500
- # get the conversation object and the related messages from cosmos
- conversation = await cosmos_conversation_client.get_conversation(
- user_id, conversation_id
- )
- # return the conversation id and the messages in the bot frontend format
- if not conversation:
- return (
- jsonify(
- {
- "error": f"Conversation {conversation_id} was not found. It either does not exist or the logged in user does not have access to it."
- }
- ),
- 400,
+ await conversation_client.connect()
+ try:
+ # Retrieve conversation
+ conversation = await conversation_client.get_conversation(
+ user_id, conversation_id
)
+ if not conversation:
+ return (
+ jsonify(
+ {
+ "error": f"Conversation {conversation_id} was not found. It either does not exist or the logged in user does not have access to it."
+ }
+ ),
+ 400,
+ )
- # get the messages for the conversation from cosmos
- conversation_messages = await cosmos_conversation_client.get_messages(
- user_id, conversation_id
- )
+ # Fetch conversation messages
+ conversation_messages = await conversation_client.get_messages(
+ user_id, conversation_id
+ )
+ messages = [
+ {
+ "id": msg["id"],
+ "role": msg["role"],
+ "content": msg["content"],
+ "createdAt": msg["createdAt"],
+ "feedback": msg.get("feedback"),
+ }
+ for msg in conversation_messages
+ ]
+
+ # Return formatted conversation and messages
+ return (
+ jsonify({"conversation_id": conversation_id, "messages": messages}),
+ 200,
+ )
+ except Exception as e:
+ logger.exception(
+ f"Error fetching conversation or messages: user_id={user_id}, conversation_id={conversation_id}, error={e}"
+ )
+ raise
+ finally:
+ await conversation_client.close()
- # format the messages in the bot frontend format
- messages = [
- {
- "id": msg["id"],
- "role": msg["role"],
- "content": msg["content"],
- "createdAt": msg["createdAt"],
- "feedback": msg.get("feedback"),
- }
- for msg in conversation_messages
- ]
-
- return (
- jsonify({"conversation_id": conversation_id, "messages": messages}),
- 200,
- )
except Exception as e:
- logger.exception("Exception in /read" + str(e))
- return (jsonify({"error": "Error while fetching history conversation"}), 500)
+ logger.exception(f"Exception in /history/read: {e}")
+ return jsonify({"error": "Error while fetching conversation history"}), 500
@bp_chat_history_response.route("/history/delete", methods=["DELETE"])
async def delete_conversation():
config = ConfigHelper.get_active_config_or_default()
if not config.enable_chat_history:
- return (jsonify({"error": "Chat history is not avaliable"}), 400)
+ return jsonify({"error": "Chat history is not available"}), 400
try:
- # get the user id from the request headers
+ # Get the user ID from the request headers
authenticated_user = get_authenticated_user_details(
request_headers=request.headers
)
@@ -246,198 +246,239 @@ async def delete_conversation():
400,
)
- cosmos_conversation_client = init_cosmosdb_client()
- if not cosmos_conversation_client:
- return (jsonify({"error": "database not available"}), 500)
+ # Initialize and connect to the database client
+ conversation_client = init_database_client()
+ if not conversation_client:
+ return jsonify({"error": "Database not available"}), 500
- # delete the conversation messages from cosmos first
- await cosmos_conversation_client.delete_messages(conversation_id, user_id)
+ await conversation_client.connect()
+ try:
+ # Delete conversation messages from database
+ await conversation_client.delete_messages(conversation_id, user_id)
- # Now delete the conversation
- await cosmos_conversation_client.delete_conversation(user_id, conversation_id)
+ # Delete the conversation itself
+ await conversation_client.delete_conversation(user_id, conversation_id)
+
+ return (
+ jsonify(
+ {
+ "message": "Successfully deleted conversation and messages",
+ "conversation_id": conversation_id,
+ }
+ ),
+ 200,
+ )
+ except Exception as e:
+ logger.exception(
+ f"Error deleting conversation: user_id={user_id}, conversation_id={conversation_id}, error={e}"
+ )
+ raise
+ finally:
+ await conversation_client.close()
- return (
- jsonify(
- {
- "message": "Successfully deleted conversation and messages",
- "conversation_id": conversation_id,
- }
- ),
- 200,
- )
except Exception as e:
- logger.exception("Exception in /delete" + str(e))
- return (jsonify({"error": "Error while deleting history conversation"}), 500)
+ logger.exception(f"Exception in /history/delete: {e}")
+ return jsonify({"error": "Error while deleting conversation history"}), 500
@bp_chat_history_response.route("/history/delete_all", methods=["DELETE"])
async def delete_all_conversations():
config = ConfigHelper.get_active_config_or_default()
+
+ # Check if chat history is available
if not config.enable_chat_history:
- return (jsonify({"error": "Chat history is not avaliable"}), 400)
+ return jsonify({"error": "Chat history is not available"}), 400
try:
- # get the user id from the request headers
+ # Get the user ID from the request headers (ensure authentication is successful)
authenticated_user = get_authenticated_user_details(
request_headers=request.headers
)
user_id = authenticated_user["user_principal_id"]
+ # Initialize the database client
+ conversation_client = init_database_client()
+ if not conversation_client:
+ return jsonify({"error": "Database not available"}), 500
- # get conversations for user
- # make sure cosmos is configured
- cosmos_conversation_client = init_cosmosdb_client()
- if not cosmos_conversation_client:
- return (jsonify({"error": "database not available"}), 500)
-
- conversations = await cosmos_conversation_client.get_conversations(
- user_id, offset=0, limit=None
- )
- if not conversations:
- return (
- jsonify({"error": f"No conversations for {user_id} were found"}),
- 400,
+ await conversation_client.connect()
+ try:
+ # Get all conversations for the user
+ conversations = await conversation_client.get_conversations(
+ user_id, offset=0, limit=None
)
+ if not conversations:
+ return (
+ jsonify({"error": f"No conversations found for user {user_id}"}),
+ 400,
+ )
- # delete each conversation
- for conversation in conversations:
- # delete the conversation messages from cosmos first
- await cosmos_conversation_client.delete_messages(
- conversation["id"], user_id
+ # Delete each conversation and its associated messages
+ for conversation in conversations:
+ try:
+ # Delete messages associated with the conversation
+ await conversation_client.delete_messages(
+ conversation["id"], user_id
+ )
+
+ # Delete the conversation itself
+ await conversation_client.delete_conversation(
+ user_id, conversation["id"]
+ )
+
+ except Exception as e:
+ # Log and continue with the next conversation if one fails
+ logger.exception(
+ f"Error deleting conversation {conversation['id']} for user {user_id}: {e}"
+ )
+ continue
+ return (
+ jsonify(
+ {
+ "message": f"Successfully deleted all conversations and messages for user {user_id}"
+ }
+ ),
+ 200,
)
-
- # Now delete the conversation
- await cosmos_conversation_client.delete_conversation(
- user_id, conversation["id"]
+ except Exception as e:
+ logger.exception(
+ f"Error deleting all conversations for user {user_id}: {e}"
)
-
- return (
- jsonify(
- {
- "message": f"Successfully deleted all conversation and messages for user {user_id} "
- }
- ),
- 200,
- )
+ raise
+ finally:
+ await conversation_client.close()
except Exception as e:
- logger.exception("Exception in /delete" + str(e))
- return (
- jsonify({"error": "Error while deleting all history conversation"}),
- 500,
- )
+ logger.exception(f"Exception in /history/delete_all: {e}")
+ return jsonify({"error": "Error while deleting all conversation history"}), 500
@bp_chat_history_response.route("/history/update", methods=["POST"])
async def update_conversation():
config = ConfigHelper.get_active_config_or_default()
if not config.enable_chat_history:
- return (jsonify({"error": "Chat history is not avaliable"}), 400)
+ return jsonify({"error": "Chat history is not available"}), 400
- authenticated_user = get_authenticated_user_details(request_headers=request.headers)
- user_id = authenticated_user["user_principal_id"]
try:
- # check request for conversation_id
+ # Get user details from request headers
+ authenticated_user = get_authenticated_user_details(
+ request_headers=request.headers
+ )
+ user_id = authenticated_user["user_principal_id"]
request_json = request.get_json()
conversation_id = request_json.get("conversation_id", None)
if not conversation_id:
- return (jsonify({"error": "conversation_id is required"}), 400)
-
- # make sure cosmos is configured
- cosmos_conversation_client = init_cosmosdb_client()
- if not cosmos_conversation_client:
- return jsonify({"error": "database not available"}), 500
+ return jsonify({"error": "conversation_id is required"}), 400
- # check for the conversation_id, if the conversation is not set, we will create a new one
- conversation = await cosmos_conversation_client.get_conversation(
- user_id, conversation_id
- )
- if not conversation:
- title = await generate_title(request_json["messages"])
- conversation = await cosmos_conversation_client.create_conversation(
- user_id=user_id, conversation_id=conversation_id, title=title
- )
- conversation_id = conversation["id"]
-
- # Format the incoming message object in the "chat/completions" messages format then write it to the
- # conversation history in cosmos
messages = request_json["messages"]
- if len(messages) > 0 and messages[0]["role"] == "user":
- user_message = next(
- (
- message
- for message in reversed(messages)
- if message["role"] == "user"
- ),
- None,
- )
- createdMessageValue = await cosmos_conversation_client.create_message(
- uuid=str(uuid4()),
- conversation_id=conversation_id,
- user_id=user_id,
- input_message=user_message,
+ if not messages or len(messages) == 0:
+ return jsonify({"error": "Messages are required"}), 400
+
+ # Initialize conversation client
+ conversation_client = init_database_client()
+ if not conversation_client:
+ return jsonify({"error": "Database not available"}), 500
+ await conversation_client.connect()
+ try:
+ # Get or create the conversation
+ conversation = await conversation_client.get_conversation(
+ user_id, conversation_id
)
- if createdMessageValue == "Conversation not found":
- return (jsonify({"error": "Conversation not found"}), 400)
- else:
- return (jsonify({"error": "User not found"}), 400)
+ if not conversation:
+ title = await generate_title(messages)
+ conversation = await conversation_client.create_conversation(
+ user_id=user_id, conversation_id=conversation_id, title=title
+ )
- if len(messages) > 0 and messages[-1]["role"] == "assistant":
- if len(messages) > 1 and messages[-2].get("role", None) == "tool":
- # write the tool message first
- await cosmos_conversation_client.create_message(
+ # Process and save user and assistant messages
+ # Process user message
+ if messages[0]["role"] == "user":
+ user_message = next(
+ (msg for msg in reversed(messages) if msg["role"] == "user"), None
+ )
+ if not user_message:
+ return jsonify({"error": "User message not found"}), 400
+
+ created_message = await conversation_client.create_message(
uuid=str(uuid4()),
conversation_id=conversation_id,
user_id=user_id,
- input_message=messages[-2],
+ input_message=user_message,
)
- # write the assistant message
- await cosmos_conversation_client.create_message(
- uuid=str(uuid4()),
- conversation_id=conversation_id,
- user_id=user_id,
- input_message=messages[-1],
- )
- else:
- return (jsonify({"error": "no conversationbot"}), 400)
+ if created_message == "Conversation not found":
+ return jsonify({"error": "Conversation not found"}), 400
+
+ # Process assistant and tool messages if available
+ if messages[-1]["role"] == "assistant":
+ if len(messages) > 1 and messages[-2].get("role") == "tool":
+ # Write the tool message first if it exists
+ await conversation_client.create_message(
+ uuid=str(uuid4()),
+ conversation_id=conversation_id,
+ user_id=user_id,
+ input_message=messages[-2],
+ )
+ # Write the assistant message
+ await conversation_client.create_message(
+ uuid=str(uuid4()),
+ conversation_id=conversation_id,
+ user_id=user_id,
+ input_message=messages[-1],
+ )
+ else:
+ return jsonify({"error": "No assistant message found"}), 400
- return (
- jsonify(
- {
- "success": True,
- "data": {
- "title": conversation["title"],
- "date": conversation["updatedAt"],
- "conversation_id": conversation["id"],
- },
- }
- ),
- 200,
- )
+ return (
+ jsonify(
+ {
+ "success": True,
+ "data": {
+ "title": conversation["title"],
+ "date": conversation["updatedAt"],
+ "conversation_id": conversation["id"],
+ },
+ }
+ ),
+ 200,
+ )
+ except Exception as e:
+ logger.exception(
+ f"Error updating conversation or messages: user_id={user_id}, conversation_id={conversation_id}, error={e}"
+ )
+ raise
+ finally:
+ await conversation_client.close()
except Exception as e:
- logger.exception("Exception in /update" + str(e))
- return (jsonify({"error": "Error while update the history conversation"}), 500)
+ logger.exception(f"Exception in /history/update: {e}")
+ return jsonify({"error": "Error while updating the conversation history"}), 500
@bp_chat_history_response.route("/history/frontend_settings", methods=["GET"])
def get_frontend_settings():
try:
+ # Clear the cache for the config helper method
ConfigHelper.get_active_config_or_default.cache_clear()
+
+ # Retrieve active config
config = ConfigHelper.get_active_config_or_default()
- chat_history_enabled = (
- config.enable_chat_history.lower() == "true"
- if isinstance(config.enable_chat_history, str)
- else config.enable_chat_history
- )
+
+ # Ensure `enable_chat_history` is processed correctly
+ if isinstance(config.enable_chat_history, str):
+ chat_history_enabled = config.enable_chat_history.strip().lower() == "true"
+ else:
+ chat_history_enabled = bool(config.enable_chat_history)
+
return jsonify({"CHAT_HISTORY_ENABLED": chat_history_enabled}), 200
+
except Exception as e:
- logger.exception("Exception in /frontend_settings" + str(e))
- return (jsonify({"error": "Error while getting frontend settings"}), 500)
+ logger.exception(f"Exception in /history/frontend_settings: {e}")
+ return jsonify({"error": "Error while getting frontend settings"}), 500
async def generate_title(conversation_messages):
title_prompt = "Summarize the conversation so far into a 4-word or less title. Do not use any quotation marks or punctuation. Do not include any other commentary or description."
+ # Filter only the user messages, but consider including system or assistant context if necessary
messages = [
{"role": msg["role"], "content": msg["content"]}
for msg in conversation_messages
@@ -447,6 +488,8 @@ async def generate_title(conversation_messages):
try:
azure_openai_client = init_openai_client()
+
+ # Create a chat completion with the Azure OpenAI client
response = await azure_openai_client.chat.completions.create(
model=env_helper.AZURE_OPENAI_MODEL,
messages=messages,
@@ -454,7 +497,14 @@ async def generate_title(conversation_messages):
max_tokens=64,
)
- title = response.choices[0].message.content
- return title
- except Exception:
- return messages[-2]["content"]
+ # Ensure response contains valid choices and content
+ if response and response.choices and len(response.choices) > 0:
+ title = response.choices[0].message.content.strip()
+ return title
+ else:
+ raise ValueError("No valid choices in response")
+
+ except Exception as e:
+ logger.exception(f"Error generating title: {str(e)}")
+ # Fallback: return the content of the second to last message if something goes wrong
+ return messages[-2]["content"] if len(messages) > 1 else "Untitled"
diff --git a/code/backend/batch/utilities/chat_history/cosmosdb.py b/code/backend/batch/utilities/chat_history/cosmosdb.py
index 7c3bb70c8..5cac5fc8c 100644
--- a/code/backend/batch/utilities/chat_history/cosmosdb.py
+++ b/code/backend/batch/utilities/chat_history/cosmosdb.py
@@ -2,8 +2,10 @@
from azure.cosmos.aio import CosmosClient
from azure.cosmos import exceptions
+from .database_client_base import DatabaseClientBase
-class CosmosConversationClient:
+
+class CosmosConversationClient(DatabaseClientBase):
def __init__(
self,
@@ -42,6 +44,12 @@ def __init__(
except exceptions.CosmosResourceNotFoundError:
raise ValueError("Invalid CosmosDB container name")
+ async def connect(self):
+ pass
+
+ async def close(self):
+ pass
+
async def ensure(self):
if (
not self.cosmosdb_client
diff --git a/code/backend/batch/utilities/chat_history/database_client_base.py b/code/backend/batch/utilities/chat_history/database_client_base.py
new file mode 100644
index 000000000..ebbf70fc2
--- /dev/null
+++ b/code/backend/batch/utilities/chat_history/database_client_base.py
@@ -0,0 +1,82 @@
+from abc import ABC, abstractmethod
+from typing import List, Optional, Dict, Any
+
+
+class DatabaseClientBase(ABC):
+ @abstractmethod
+ async def connect(self):
+ """Establish a connection to the database."""
+ pass
+
+ @abstractmethod
+ async def close(self):
+ """Close the connection to the database."""
+ pass
+
+ @abstractmethod
+ async def ensure(self):
+ """Verify that the database and required tables/collections exist."""
+ pass
+
+ @abstractmethod
+ async def create_conversation(
+ self, user_id: str, conversation_id: str, title: str = ""
+ ) -> bool:
+ """Create a new conversation entry."""
+ pass
+
+ @abstractmethod
+ async def upsert_conversation(self, conversation: Dict[str, Any]) -> bool:
+ """Update or insert a conversation entry."""
+ pass
+
+ @abstractmethod
+ async def delete_conversation(self, user_id: str, conversation_id: str) -> bool:
+ """Delete a specific conversation."""
+ pass
+
+ @abstractmethod
+ async def delete_messages(
+ self, conversation_id: str, user_id: str
+ ) -> List[Dict[str, Any]]:
+ """Delete all messages associated with a conversation."""
+ pass
+
+ @abstractmethod
+ async def get_conversations(
+ self, user_id: str, limit: int, sort_order: str = "DESC", offset: int = 0
+ ) -> List[Dict[str, Any]]:
+ """Retrieve a list of conversations for a user."""
+ pass
+
+ @abstractmethod
+ async def get_conversation(
+ self, user_id: str, conversation_id: str
+ ) -> Optional[Dict[str, Any]]:
+ """Retrieve a specific conversation by ID."""
+ pass
+
+ @abstractmethod
+ async def create_message(
+ self,
+ uuid: str,
+ conversation_id: str,
+ user_id: str,
+ input_message: Dict[str, Any],
+ ) -> bool:
+ """Create a new message within a conversation."""
+ pass
+
+ @abstractmethod
+ async def update_message_feedback(
+ self, user_id: str, message_id: str, feedback: str
+ ) -> bool:
+ """Update feedback for a specific message."""
+ pass
+
+ @abstractmethod
+ async def get_messages(
+ self, user_id: str, conversation_id: str
+ ) -> List[Dict[str, Any]]:
+ """Retrieve all messages within a conversation."""
+ pass
diff --git a/code/backend/batch/utilities/chat_history/database_factory.py b/code/backend/batch/utilities/chat_history/database_factory.py
new file mode 100644
index 000000000..980c2cf82
--- /dev/null
+++ b/code/backend/batch/utilities/chat_history/database_factory.py
@@ -0,0 +1,59 @@
+# database_factory.py
+from ..helpers.env_helper import EnvHelper
+from .cosmosdb import CosmosConversationClient
+from .postgresdbservice import PostgresConversationClient
+from azure.identity import DefaultAzureCredential
+from ..helpers.config.database_type import DatabaseType
+
+
+class DatabaseFactory:
+ @staticmethod
+ def get_conversation_client():
+ env_helper: EnvHelper = EnvHelper()
+
+ if env_helper.DATABASE_TYPE == DatabaseType.COSMOSDB.value:
+ DatabaseFactory._validate_env_vars(
+ [
+ "AZURE_COSMOSDB_ACCOUNT",
+ "AZURE_COSMOSDB_DATABASE",
+ "AZURE_COSMOSDB_CONVERSATIONS_CONTAINER",
+ ],
+ env_helper,
+ )
+
+ cosmos_endpoint = (
+ f"https://{env_helper.AZURE_COSMOSDB_ACCOUNT}.documents.azure.com:443/"
+ )
+ credential = (
+ DefaultAzureCredential()
+ if not env_helper.AZURE_COSMOSDB_ACCOUNT_KEY
+ else env_helper.AZURE_COSMOSDB_ACCOUNT_KEY
+ )
+ return CosmosConversationClient(
+ cosmosdb_endpoint=cosmos_endpoint,
+ credential=credential,
+ database_name=env_helper.AZURE_COSMOSDB_DATABASE,
+ container_name=env_helper.AZURE_COSMOSDB_CONVERSATIONS_CONTAINER,
+ enable_message_feedback=env_helper.AZURE_COSMOSDB_ENABLE_FEEDBACK,
+ )
+ elif env_helper.DATABASE_TYPE == DatabaseType.POSTGRESQL.value:
+ DatabaseFactory._validate_env_vars(
+ ["POSTGRESQL_USER", "POSTGRESQL_HOST", "POSTGRESQL_DATABASE"],
+ env_helper,
+ )
+
+ return PostgresConversationClient(
+ user=env_helper.POSTGRESQL_USER,
+ host=env_helper.POSTGRESQL_HOST,
+ database=env_helper.POSTGRESQL_DATABASE,
+ )
+ else:
+ raise ValueError(
+ "Unsupported DATABASE_TYPE. Please set DATABASE_TYPE to 'CosmosDB' or 'PostgreSQL'."
+ )
+
+ @staticmethod
+ def _validate_env_vars(required_vars, env_helper):
+ for var in required_vars:
+ if not getattr(env_helper, var, None):
+ raise ValueError(f"Environment variable {var} is required.")
diff --git a/code/backend/batch/utilities/chat_history/postgresdbservice.py b/code/backend/batch/utilities/chat_history/postgresdbservice.py
new file mode 100644
index 000000000..a758bb20c
--- /dev/null
+++ b/code/backend/batch/utilities/chat_history/postgresdbservice.py
@@ -0,0 +1,159 @@
+import logging
+import asyncpg
+from datetime import datetime, timezone
+from azure.identity import DefaultAzureCredential
+
+from .database_client_base import DatabaseClientBase
+
+logger = logging.getLogger(__name__)
+
+
+class PostgresConversationClient(DatabaseClientBase):
+
+ def __init__(
+ self, user: str, host: str, database: str, enable_message_feedback: bool = False
+ ):
+ self.user = user
+ self.host = host
+ self.database = database
+ self.enable_message_feedback = enable_message_feedback
+ self.conn = None
+
+ async def connect(self):
+ try:
+ credential = DefaultAzureCredential()
+ token = credential.get_token(
+ "https://ossrdbms-aad.database.windows.net/.default"
+ ).token
+ self.conn = await asyncpg.connect(
+ user=self.user,
+ host=self.host,
+ database=self.database,
+ password=token,
+ port=5432,
+ ssl="require",
+ )
+ except Exception as e:
+ logger.error("Failed to connect to PostgreSQL: %s", e)
+ raise
+
+ async def close(self):
+ if self.conn:
+ await self.conn.close()
+
+ async def ensure(self):
+ if not self.conn:
+ return False, "PostgreSQL client not initialized correctly"
+ return True, "PostgreSQL client initialized successfully"
+
+ async def create_conversation(self, conversation_id, user_id, title=""):
+ utc_now = datetime.now(timezone.utc)
+ createdAt = utc_now.strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3] + "Z"
+ query = """
+ INSERT INTO conversations (id, conversation_id, type, "createdAt", "updatedAt", user_id, title)
+ VALUES ($1, $2, 'conversation', $3, $3, $4, $5)
+ RETURNING *
+ """
+ conversation = await self.conn.fetchrow(
+ query, conversation_id, conversation_id, createdAt, user_id, title
+ )
+ return dict(conversation) if conversation else False
+
+ async def upsert_conversation(self, conversation):
+ query = """
+ INSERT INTO conversations (id, conversation_id, type, "createdAt", "updatedAt", user_id, title)
+ VALUES ($1, $2, $3, $4, $5, $6, $7)
+ ON CONFLICT (id) DO UPDATE SET
+ "updatedAt" = EXCLUDED."updatedAt",
+ title = EXCLUDED.title
+ RETURNING *
+ """
+ updated_conversation = await self.conn.fetchrow(
+ query,
+ conversation["id"],
+ conversation["conversation_id"],
+ conversation["type"],
+ conversation["createdAt"],
+ conversation["updatedAt"],
+ conversation["user_id"],
+ conversation["title"],
+ )
+ return dict(updated_conversation) if updated_conversation else False
+
+ async def delete_conversation(self, user_id, conversation_id):
+ query = "DELETE FROM conversations WHERE conversation_id = $1 AND user_id = $2"
+ await self.conn.execute(query, conversation_id, user_id)
+ return True
+
+ async def delete_messages(self, conversation_id, user_id):
+ query = "DELETE FROM messages WHERE conversation_id = $1 AND user_id = $2 RETURNING *"
+ messages = await self.conn.fetch(query, conversation_id, user_id)
+ return [dict(message) for message in messages]
+
+ async def get_conversations(self, user_id, limit=None, sort_order="DESC", offset=0):
+ try:
+ offset = int(offset) # Ensure offset is an integer
+ except ValueError:
+ raise ValueError("Offset must be an integer.")
+ # Base query without LIMIT and OFFSET
+ query = f"""
+ SELECT * FROM conversations
+ WHERE user_id = $1 AND type = 'conversation'
+ ORDER BY "updatedAt" {sort_order}
+ """
+ # Append LIMIT and OFFSET to the query if limit is specified
+ if limit is not None:
+ try:
+ limit = int(limit) # Ensure limit is an integer
+ query += " LIMIT $2 OFFSET $3"
+ # Fetch records with LIMIT and OFFSET
+ conversations = await self.conn.fetch(query, user_id, limit, offset)
+ except ValueError:
+ raise ValueError("Limit must be an integer.")
+ else:
+ # Fetch records without LIMIT and OFFSET
+ conversations = await self.conn.fetch(query, user_id)
+ return [dict(conversation) for conversation in conversations]
+
+ async def get_conversation(self, user_id, conversation_id):
+ query = "SELECT * FROM conversations WHERE id = $1 AND user_id = $2 AND type = 'conversation'"
+ conversation = await self.conn.fetchrow(query, conversation_id, user_id)
+ return dict(conversation) if conversation else None
+
+ async def create_message(self, uuid, conversation_id, user_id, input_message: dict):
+ message_id = uuid
+ utc_now = datetime.now(timezone.utc)
+ createdAt = utc_now.strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3] + "Z"
+ query = """
+ INSERT INTO messages (id, type, "createdAt", "updatedAt", user_id, conversation_id, role, content, feedback)
+ VALUES ($1, 'message', $2, $2, $3, $4, $5, $6, $7)
+ RETURNING *
+ """
+ feedback = "" if self.enable_message_feedback else None
+ message = await self.conn.fetchrow(
+ query,
+ message_id,
+ createdAt,
+ user_id,
+ conversation_id,
+ input_message["role"],
+ input_message["content"],
+ feedback,
+ )
+
+ if message:
+ update_query = 'UPDATE conversations SET "updatedAt" = $1 WHERE id = $2 AND user_id = $3 RETURNING *'
+ await self.conn.execute(update_query, createdAt, conversation_id, user_id)
+ return dict(message)
+ else:
+ return False
+
+ async def update_message_feedback(self, user_id, message_id, feedback):
+ query = "UPDATE messages SET feedback = $1 WHERE id = $2 AND user_id = $3 RETURNING *"
+ message = await self.conn.fetchrow(query, feedback, message_id, user_id)
+ return dict(message) if message else False
+
+ async def get_messages(self, user_id, conversation_id):
+ query = 'SELECT * FROM messages WHERE conversation_id = $1 AND user_id = $2 ORDER BY "createdAt" ASC'
+ messages = await self.conn.fetch(query, conversation_id, user_id)
+ return [dict(message) for message in messages]
diff --git a/code/backend/batch/utilities/helpers/azure_postgres_helper.py b/code/backend/batch/utilities/helpers/azure_postgres_helper.py
new file mode 100644
index 000000000..674ba166a
--- /dev/null
+++ b/code/backend/batch/utilities/helpers/azure_postgres_helper.py
@@ -0,0 +1,275 @@
+import logging
+import psycopg2
+from psycopg2.extras import execute_values, RealDictCursor
+from azure.identity import DefaultAzureCredential
+from .llm_helper import LLMHelper
+from .env_helper import EnvHelper
+
+logger = logging.getLogger(__name__)
+
+
+class AzurePostgresHelper:
+ def __init__(self):
+ self.llm_helper = LLMHelper()
+ self.env_helper = EnvHelper()
+ self.conn = None
+
+ def _create_search_client(self):
+ """
+ Establishes a connection to Azure PostgreSQL using AAD authentication.
+ """
+ try:
+ user = self.env_helper.POSTGRESQL_USER
+ host = self.env_helper.POSTGRESQL_HOST
+ dbname = self.env_helper.POSTGRESQL_DATABASE
+
+ # Acquire the access token
+ credential = DefaultAzureCredential()
+ access_token = credential.get_token(
+ "https://ossrdbms-aad.database.windows.net/.default"
+ )
+
+ # Use the token in the connection string
+ conn_string = (
+ f"host={host} user={user} dbname={dbname} password={access_token.token}"
+ )
+ self.conn = psycopg2.connect(conn_string)
+ logger.info("Connected to Azure PostgreSQL successfully.")
+ return self.conn
+ except Exception as e:
+ logger.error(f"Error establishing a connection to PostgreSQL: {e}")
+ raise
+
+ def get_search_client(self):
+ """
+ Provides a reusable database connection.
+ """
+ if self.conn is None or self.conn.closed != 0: # Ensure the connection is open
+ self.conn = self._create_search_client()
+ return self.conn
+
+ def get_vector_store(self, embedding_array):
+ """
+ Fetches search indexes from PostgreSQL based on an embedding vector.
+ """
+ conn = self.get_search_client()
+ try:
+ with conn.cursor(cursor_factory=RealDictCursor) as cur:
+ cur.execute(
+ """
+ SELECT id, title, chunk, "offset", page_number, content, source
+ FROM vector_store
+ ORDER BY content_vector <=> %s::vector
+ LIMIT %s
+ """,
+ (
+ embedding_array,
+ self.env_helper.AZURE_POSTGRES_SEARCH_TOP_K,
+ ),
+ )
+ search_results = cur.fetchall()
+ logger.info(f"Retrieved {len(search_results)} search results.")
+ return search_results
+ except Exception as e:
+ logger.error(f"Error executing search query: {e}")
+ raise
+ finally:
+ conn.close()
+
+ def create_vector_store(self, documents_to_upload):
+ """
+ Inserts documents into the `vector_store` table in batch mode.
+ """
+ conn = self.get_search_client()
+ try:
+ with conn.cursor(cursor_factory=RealDictCursor) as cur:
+ data_to_insert = [
+ (
+ d["id"],
+ d["title"],
+ d["chunk"],
+ d["chunk_id"],
+ d["offset"],
+ d["page_number"],
+ d["content"],
+ d["source"],
+ d["metadata"],
+ d["content_vector"],
+ )
+ for d in documents_to_upload
+ ]
+
+ # Batch insert using execute_values for efficiency
+ query = """
+ INSERT INTO vector_store (
+ id, title, chunk, chunk_id, "offset", page_number,
+ content, source, metadata, content_vector
+ ) VALUES %s
+ """
+ execute_values(cur, query, data_to_insert)
+ logger.info(
+ f"Inserted {len(documents_to_upload)} documents successfully."
+ )
+
+ conn.commit() # Commit the transaction
+ except Exception as e:
+ logger.error(f"Error during index creation: {e}")
+ conn.rollback() # Roll back transaction on error
+ raise
+ finally:
+ conn.close()
+
+ def get_files(self):
+ """
+ Fetches distinct titles from the PostgreSQL database.
+
+ Returns:
+ list[dict] or None: A list of dictionaries (each with a single key 'title')
+ or None if no titles are found or an error occurs.
+ """
+ conn = self.get_search_client()
+ try:
+ # Using a cursor to execute the query
+ with conn.cursor(cursor_factory=RealDictCursor) as cursor:
+ query = """
+ SELECT id, title
+ FROM vector_store
+ WHERE title IS NOT NULL
+ ORDER BY title;
+ """
+ cursor.execute(query)
+ # Fetch all results
+ results = cursor.fetchall()
+ # Return results or None if empty
+ return results if results else None
+ except psycopg2.Error as db_err:
+ logger.error(f"Database error while fetching titles: {db_err}")
+ raise
+ except Exception as e:
+ logger.error(f"Unexpected error while fetching titles: {e}")
+ raise
+ finally:
+ conn.close()
+
+ def delete_documents(self, ids_to_delete):
+ """
+ Deletes documents from the PostgreSQL database based on the provided ids.
+
+ Args:
+ ids_to_delete (list): A list of document IDs to delete.
+
+ Returns:
+ int: The number of deleted rows.
+ """
+ conn = self.get_search_client()
+ try:
+ if not ids_to_delete:
+ logger.warning("No IDs provided for deletion.")
+ return 0
+
+ # Using a cursor to execute the query
+ with conn.cursor() as cursor:
+ # Construct the DELETE query with the list of ids_to_delete
+ query = """
+ DELETE FROM vector_store
+ WHERE id = ANY(%s)
+ """
+ # Extract the 'id' values from the list of dictionaries (ids_to_delete)
+ ids_to_delete_values = [item["id"] for item in ids_to_delete]
+
+ # Execute the query, passing the list of IDs as a parameter
+ cursor.execute(query, (ids_to_delete_values,))
+
+ # Commit the transaction
+ conn.commit()
+
+ # Return the number of deleted rows
+ deleted_rows = cursor.rowcount
+ logger.info(f"Deleted {deleted_rows} documents.")
+ return deleted_rows
+ except psycopg2.Error as db_err:
+ logger.error(f"Database error while deleting documents: {db_err}")
+ conn.rollback()
+ raise
+ except Exception as e:
+ logger.error(f"Unexpected error while deleting documents: {e}")
+ conn.rollback()
+ raise
+ finally:
+ conn.close()
+
+ def perform_search(self, title):
+ """
+ Fetches search results from PostgreSQL based on the title.
+ """
+ # Establish connection to PostgreSQL
+ conn = self.get_search_client()
+ try:
+ with conn.cursor(cursor_factory=RealDictCursor) as cur:
+ # Execute query to fetch title, content, and metadata
+ cur.execute(
+ """
+ SELECT title, content, metadata
+ FROM vector_store
+ WHERE title = %s
+ """,
+ (title,),
+ )
+ results = cur.fetchall() # Fetch all matching results
+ logger.info(f"Retrieved {len(results)} search result(s).")
+ return results
+ except Exception as e:
+ logger.error(f"Error executing search query: {e}")
+ raise
+ finally:
+ conn.close()
+
+ def get_unique_files(self):
+ """
+ Fetches unique titles from PostgreSQL.
+ """
+ # Establish connection to PostgreSQL
+ conn = self.get_search_client()
+ try:
+ with conn.cursor(cursor_factory=RealDictCursor) as cur:
+ # Execute query to fetch distinct titles
+ cur.execute(
+ """
+ SELECT DISTINCT title
+ FROM vector_store
+ """
+ )
+ results = cur.fetchall() # Fetch all results as RealDictRow objects
+ logger.info(f"Retrieved {len(results)} unique title(s).")
+ return results
+ except Exception as e:
+ logger.error(f"Error executing search query: {e}")
+ raise
+ finally:
+ conn.close()
+
+ def search_by_blob_url(self, blob_url):
+ """
+ Fetches unique titles from PostgreSQL based on a given blob URL.
+ """
+ # Establish connection to PostgreSQL
+ conn = self.get_search_client()
+ try:
+ with conn.cursor(cursor_factory=RealDictCursor) as cur:
+ # Execute parameterized query to fetch results
+ cur.execute(
+ """
+ SELECT id, title
+ FROM vector_store
+ WHERE source = %s
+ """,
+ (f"{blob_url}_SAS_TOKEN_PLACEHOLDER_",),
+ )
+ results = cur.fetchall() # Fetch all results as RealDictRow objects
+ logger.info(f"Retrieved {len(results)} unique title(s).")
+ return results
+ except Exception as e:
+ logger.error(f"Error executing search query: {e}")
+ raise
+ finally:
+ conn.close()
diff --git a/code/backend/batch/utilities/helpers/config/config_helper.py b/code/backend/batch/utilities/helpers/config/config_helper.py
index 05549ac04..dca7c52ab 100644
--- a/code/backend/batch/utilities/helpers/config/config_helper.py
+++ b/code/backend/batch/utilities/helpers/config/config_helper.py
@@ -13,6 +13,7 @@
from ..env_helper import EnvHelper
from .assistant_strategy import AssistantStrategy
from .conversation_flow import ConversationFlow
+from .database_type import DatabaseType
CONFIG_CONTAINER_NAME = "config"
CONFIG_FILE_NAME = "active.json"
@@ -49,9 +50,8 @@ def __init__(self, config: dict):
if self.env_helper.AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION
else None
)
- self.enable_chat_history = config.get(
- "enable_chat_history", self.env_helper.CHAT_HISTORY_ENABLED
- )
+ self.enable_chat_history = config["enable_chat_history"]
+ self.database_type = config.get("database_type", self.env_helper.DATABASE_TYPE)
def get_available_document_types(self) -> list[str]:
document_types = {
@@ -118,8 +118,10 @@ def __init__(self, messages: dict):
class Logging:
def __init__(self, logging: dict):
- self.log_user_interactions = logging["log_user_interactions"]
- self.log_tokens = logging["log_tokens"]
+ self.log_user_interactions = (
+ str(logging["log_user_interactions"]).lower() == "true"
+ )
+ self.log_tokens = str(logging["log_tokens"]).lower() == "true"
class IntegratedVectorizationConfig:
@@ -245,8 +247,22 @@ def get_default_config():
logger.info("Loading default config from %s", config_file_path)
ConfigHelper._default_config = json.loads(
Template(f.read()).substitute(
- ORCHESTRATION_STRATEGY=env_helper.ORCHESTRATION_STRATEGY,
- CHAT_HISTORY_ENABLED=env_helper.CHAT_HISTORY_ENABLED,
+ ORCHESTRATION_STRATEGY=(
+ OrchestrationStrategy.SEMANTIC_KERNEL.value
+ if env_helper.DATABASE_TYPE == DatabaseType.POSTGRESQL.value
+ else env_helper.ORCHESTRATION_STRATEGY
+ ),
+ LOG_USER_INTERACTIONS=(
+ False
+ if env_helper.DATABASE_TYPE == DatabaseType.POSTGRESQL.value
+ else True
+ ),
+ LOG_TOKENS=(
+ False
+ if env_helper.DATABASE_TYPE == DatabaseType.POSTGRESQL.value
+ else True
+ ),
+ DATABASE_TYPE=env_helper.DATABASE_TYPE,
)
)
if env_helper.USE_ADVANCED_IMAGE_PROCESSING:
diff --git a/code/backend/batch/utilities/helpers/config/database_type.py b/code/backend/batch/utilities/helpers/config/database_type.py
new file mode 100644
index 000000000..1b914d037
--- /dev/null
+++ b/code/backend/batch/utilities/helpers/config/database_type.py
@@ -0,0 +1,6 @@
+from enum import Enum
+
+
+class DatabaseType(Enum):
+ COSMOSDB = "CosmosDB"
+ POSTGRESQL = "PostgreSQL"
diff --git a/code/backend/batch/utilities/helpers/config/default.json b/code/backend/batch/utilities/helpers/config/default.json
index be50c1a4c..45db5ee3c 100644
--- a/code/backend/batch/utilities/helpers/config/default.json
+++ b/code/backend/batch/utilities/helpers/config/default.json
@@ -136,11 +136,12 @@
"page_overlap_length": "100"
},
"logging": {
- "log_user_interactions": true,
- "log_tokens": true
+ "log_user_interactions": "${LOG_USER_INTERACTIONS}",
+ "log_tokens": "${LOG_TOKENS}"
},
"orchestrator": {
"strategy": "${ORCHESTRATION_STRATEGY}"
},
- "enable_chat_history": "${CHAT_HISTORY_ENABLED}"
+ "enable_chat_history": true,
+ "database_type": "${DATABASE_TYPE}"
}
diff --git a/code/backend/batch/utilities/helpers/embedders/embedder_factory.py b/code/backend/batch/utilities/helpers/embedders/embedder_factory.py
index 3a2336b99..d83ead1fe 100644
--- a/code/backend/batch/utilities/helpers/embedders/embedder_factory.py
+++ b/code/backend/batch/utilities/helpers/embedders/embedder_factory.py
@@ -1,6 +1,8 @@
from ..env_helper import EnvHelper
+from ..config.database_type import DatabaseType
from ..azure_blob_storage_client import AzureBlobStorageClient
from .push_embedder import PushEmbedder
+from .postgres_embedder import PostgresEmbedder
from .integrated_vectorization_embedder import (
IntegratedVectorizationEmbedder,
)
@@ -9,7 +11,10 @@
class EmbedderFactory:
@staticmethod
def create(env_helper: EnvHelper):
- if env_helper.AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION:
- return IntegratedVectorizationEmbedder(env_helper)
+ if env_helper.DATABASE_TYPE == DatabaseType.POSTGRESQL.value:
+ return PostgresEmbedder(AzureBlobStorageClient(), env_helper)
else:
- return PushEmbedder(AzureBlobStorageClient(), env_helper)
+ if env_helper.AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION:
+ return IntegratedVectorizationEmbedder(env_helper)
+ else:
+ return PushEmbedder(AzureBlobStorageClient(), env_helper)
diff --git a/code/backend/batch/utilities/helpers/embedders/postgres_embedder.py b/code/backend/batch/utilities/helpers/embedders/postgres_embedder.py
new file mode 100644
index 000000000..d81c9727c
--- /dev/null
+++ b/code/backend/batch/utilities/helpers/embedders/postgres_embedder.py
@@ -0,0 +1,98 @@
+import json
+import logging
+from typing import List
+
+from ...helpers.llm_helper import LLMHelper
+from ...helpers.env_helper import EnvHelper
+from ..azure_blob_storage_client import AzureBlobStorageClient
+
+from ..config.embedding_config import EmbeddingConfig
+from ..config.config_helper import ConfigHelper
+
+from .embedder_base import EmbedderBase
+from ..azure_postgres_helper import AzurePostgresHelper
+from ..document_loading_helper import DocumentLoading
+from ..document_chunking_helper import DocumentChunking
+from ...common.source_document import SourceDocument
+
+logger = logging.getLogger(__name__)
+
+
+class PostgresEmbedder(EmbedderBase):
+ def __init__(self, blob_client: AzureBlobStorageClient, env_helper: EnvHelper):
+ self.env_helper = env_helper
+ self.llm_helper = LLMHelper()
+ self.azure_postgres_helper = AzurePostgresHelper()
+ self.document_loading = DocumentLoading()
+ self.document_chunking = DocumentChunking()
+ self.blob_client = blob_client
+ self.config = ConfigHelper.get_active_config_or_default()
+ self.embedding_configs = {}
+ for processor in self.config.document_processors:
+ ext = processor.document_type.lower()
+ self.embedding_configs[ext] = processor
+
+ def embed_file(self, source_url: str, file_name: str):
+ file_extension = file_name.split(".")[-1].lower()
+ embedding_config = self.embedding_configs.get(file_extension)
+ self.__embed(
+ source_url=source_url,
+ file_extension=file_extension,
+ embedding_config=embedding_config,
+ )
+ if file_extension != "url":
+ self.blob_client.upsert_blob_metadata(
+ file_name, {"embeddings_added": "true"}
+ )
+
+ def __embed(
+ self, source_url: str, file_extension: str, embedding_config: EmbeddingConfig
+ ):
+ documents_to_upload: List[SourceDocument] = []
+ if (
+ embedding_config.use_advanced_image_processing
+ and file_extension
+ in self.config.get_advanced_image_processing_image_types()
+ ):
+ raise NotImplementedError(
+ "Advanced image processing is not supported in PostgresEmbedder."
+ )
+ else:
+ documents: List[SourceDocument] = self.document_loading.load(
+ source_url, embedding_config.loading
+ )
+ documents = self.document_chunking.chunk(
+ documents, embedding_config.chunking
+ )
+
+ for document in documents:
+ documents_to_upload.append(self.__convert_to_search_document(document))
+
+ if documents_to_upload:
+ self.azure_postgres_helper.create_vector_store(documents_to_upload)
+ else:
+ logger.warning("No documents to upload.")
+
+ def __convert_to_search_document(self, document: SourceDocument):
+ embedded_content = self.llm_helper.generate_embeddings(document.content)
+ metadata = {
+ "id": document.id,
+ "source": document.source,
+ "title": document.title,
+ "chunk": document.chunk,
+ "chunk_id": document.chunk_id,
+ "offset": document.offset,
+ "page_number": document.page_number,
+ }
+ return {
+ "id": document.id,
+ "content": document.content,
+ "content_vector": embedded_content,
+ "metadata": json.dumps(metadata),
+ "title": document.title,
+ "source": document.source,
+ "chunk": document.chunk,
+ "chunk_id": document.chunk_id,
+ "offset": document.offset,
+ "page_number": document.page_number,
+ }
diff --git a/code/backend/batch/utilities/helpers/env_helper.py b/code/backend/batch/utilities/helpers/env_helper.py
index 63c5d52d9..1e767bc51 100644
--- a/code/backend/batch/utilities/helpers/env_helper.py
+++ b/code/backend/batch/utilities/helpers/env_helper.py
@@ -5,6 +5,7 @@
from dotenv import load_dotenv
from azure.identity import DefaultAzureCredential, get_bearer_token_provider
from azure.keyvault.secrets import SecretClient
+from ..helpers.config.database_type import DatabaseType
logger = logging.getLogger(__name__)
@@ -87,9 +88,48 @@ def __load_config(self, **kwargs) -> None:
"AZURE_SEARCH_DATASOURCE_NAME", ""
)
self.AZURE_SEARCH_INDEXER_NAME = os.getenv("AZURE_SEARCH_INDEXER_NAME", "")
- self.AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION = self.get_env_var_bool(
- "AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION", "False"
- )
+
+ # Chat History DB Integration Settings
+ # Set default values based on DATABASE_TYPE
+ self.DATABASE_TYPE = (
+ os.getenv("DATABASE_TYPE", "").strip() or DatabaseType.COSMOSDB.value
+ )
+ # Cosmos DB configuration
+ if self.DATABASE_TYPE == DatabaseType.COSMOSDB.value:
+ azure_cosmosdb_info = self.get_info_from_env("AZURE_COSMOSDB_INFO", "")
+ self.AZURE_COSMOSDB_DATABASE = azure_cosmosdb_info.get("databaseName", "")
+ self.AZURE_COSMOSDB_ACCOUNT = azure_cosmosdb_info.get("accountName", "")
+ self.AZURE_COSMOSDB_CONVERSATIONS_CONTAINER = azure_cosmosdb_info.get(
+ "containerName", ""
+ )
+ self.AZURE_COSMOSDB_ACCOUNT_KEY = self.secretHelper.get_secret(
+ "AZURE_COSMOSDB_ACCOUNT_KEY"
+ )
+ self.AZURE_COSMOSDB_ENABLE_FEEDBACK = (
+ os.getenv("AZURE_COSMOSDB_ENABLE_FEEDBACK", "false").lower() == "true"
+ )
+ self.AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION = self.get_env_var_bool(
+ "AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION", "False"
+ )
+ self.USE_ADVANCED_IMAGE_PROCESSING = self.get_env_var_bool(
+ "USE_ADVANCED_IMAGE_PROCESSING", "False"
+ )
+ # PostgreSQL configuration
+ elif self.DATABASE_TYPE == DatabaseType.POSTGRESQL.value:
+ self.AZURE_POSTGRES_SEARCH_TOP_K = self.get_env_var_int(
+ "AZURE_POSTGRES_SEARCH_TOP_K", 5
+ )
+ azure_postgresql_info = self.get_info_from_env("AZURE_POSTGRESQL_INFO", "")
+ self.POSTGRESQL_USER = azure_postgresql_info.get("user", "")
+ self.POSTGRESQL_DATABASE = azure_postgresql_info.get("dbname", "")
+ self.POSTGRESQL_HOST = azure_postgresql_info.get("host", "")
+ # Ensure integrated vectorization is disabled for PostgreSQL
+ self.AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION = False
+ self.USE_ADVANCED_IMAGE_PROCESSING = False
+ else:
+ raise ValueError(
+ "Unsupported DATABASE_TYPE. Please set DATABASE_TYPE to 'CosmosDB' or 'PostgreSQL'."
+ )
self.AZURE_AUTH_TYPE = os.getenv("AZURE_AUTH_TYPE", "keys")
# Azure OpenAI
@@ -146,9 +186,6 @@ def __load_config(self, **kwargs) -> None:
self.AZURE_TOKEN_PROVIDER = get_bearer_token_provider(
DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default"
)
- self.USE_ADVANCED_IMAGE_PROCESSING = self.get_env_var_bool(
- "USE_ADVANCED_IMAGE_PROCESSING", "False"
- )
self.ADVANCED_IMAGE_PROCESSING_MAX_IMAGES = self.get_env_var_int(
"ADVANCED_IMAGE_PROCESSING_MAX_IMAGES", 1
)
@@ -205,22 +242,51 @@ def __load_config(self, **kwargs) -> None:
"DOCUMENT_PROCESSING_QUEUE_NAME", "doc-processing"
)
# Azure Blob Storage
- self.AZURE_BLOB_ACCOUNT_NAME = os.getenv("AZURE_BLOB_ACCOUNT_NAME", "")
- self.AZURE_BLOB_ACCOUNT_KEY = self.secretHelper.get_secret(
- "AZURE_BLOB_ACCOUNT_KEY"
- )
- self.AZURE_BLOB_CONTAINER_NAME = os.getenv("AZURE_BLOB_CONTAINER_NAME", "")
+ azure_blob_storage_info = self.get_info_from_env("AZURE_BLOB_STORAGE_INFO", "")
+ if azure_blob_storage_info:
+ # If AZURE_BLOB_STORAGE_INFO exists
+ self.AZURE_BLOB_ACCOUNT_NAME = azure_blob_storage_info.get(
+ "accountName", ""
+ )
+ self.AZURE_BLOB_ACCOUNT_KEY = self.secretHelper.get_secret_from_json(
+ azure_blob_storage_info.get("accountKey", "")
+ )
+ self.AZURE_BLOB_CONTAINER_NAME = azure_blob_storage_info.get(
+ "containerName", ""
+ )
+ else:
+ # Otherwise, fallback to individual environment variables
+ self.AZURE_BLOB_ACCOUNT_NAME = os.getenv("AZURE_BLOB_ACCOUNT_NAME", "")
+ self.AZURE_BLOB_ACCOUNT_KEY = self.secretHelper.get_secret(
+ "AZURE_BLOB_ACCOUNT_KEY"
+ )
+ self.AZURE_BLOB_CONTAINER_NAME = os.getenv("AZURE_BLOB_CONTAINER_NAME", "")
self.AZURE_STORAGE_ACCOUNT_ENDPOINT = os.getenv(
"AZURE_STORAGE_ACCOUNT_ENDPOINT",
f"https://{self.AZURE_BLOB_ACCOUNT_NAME}.blob.core.windows.net/",
)
+
# Azure Form Recognizer
- self.AZURE_FORM_RECOGNIZER_ENDPOINT = os.getenv(
- "AZURE_FORM_RECOGNIZER_ENDPOINT", ""
- )
- self.AZURE_FORM_RECOGNIZER_KEY = self.secretHelper.get_secret(
- "AZURE_FORM_RECOGNIZER_KEY"
+ azure_form_recognizer_info = self.get_info_from_env(
+ "AZURE_FORM_RECOGNIZER_INFO", ""
)
+ if azure_form_recognizer_info:
+ # If AZURE_FORM_RECOGNIZER_INFO exists
+ self.AZURE_FORM_RECOGNIZER_ENDPOINT = azure_form_recognizer_info.get(
+ "endpoint", ""
+ )
+ self.AZURE_FORM_RECOGNIZER_KEY = self.secretHelper.get_secret_from_json(
+ azure_form_recognizer_info.get("key", "")
+ )
+ else:
+ # Otherwise, fallback to individual environment variables
+ self.AZURE_FORM_RECOGNIZER_ENDPOINT = os.getenv(
+ "AZURE_FORM_RECOGNIZER_ENDPOINT", ""
+ )
+ self.AZURE_FORM_RECOGNIZER_KEY = self.secretHelper.get_secret(
+ "AZURE_FORM_RECOGNIZER_KEY"
+ )
+
# Azure App Insights
# APPLICATIONINSIGHTS_ENABLED will be True when the application runs in App Service
self.APPLICATIONINSIGHTS_ENABLED = self.get_env_var_bool(
@@ -264,23 +330,6 @@ def __load_config(self, **kwargs) -> None:
self.PROMPT_FLOW_DEPLOYMENT_NAME = os.getenv("PROMPT_FLOW_DEPLOYMENT_NAME", "")
- # Chat History CosmosDB Integration Settings
- azure_cosmosdb_info = self.get_info_from_env("AZURE_COSMOSDB_INFO", "")
- self.AZURE_COSMOSDB_DATABASE = azure_cosmosdb_info.get("databaseName", "")
- self.AZURE_COSMOSDB_ACCOUNT = azure_cosmosdb_info.get("accountName", "")
- self.AZURE_COSMOSDB_CONVERSATIONS_CONTAINER = azure_cosmosdb_info.get(
- "containerName", ""
- )
- self.AZURE_COSMOSDB_ACCOUNT_KEY = self.secretHelper.get_secret(
- "AZURE_COSMOSDB_ACCOUNT_KEY"
- )
- self.AZURE_COSMOSDB_ENABLE_FEEDBACK = (
- os.getenv("AZURE_COSMOSDB_ENABLE_FEEDBACK", "false").lower() == "true"
- )
- self.CHAT_HISTORY_ENABLED = self.get_env_var_bool(
- "CHAT_HISTORY_ENABLED", "true"
- )
-
def is_chat_model(self):
if "gpt-4" in self.AZURE_OPENAI_MODEL_NAME.lower():
return True
@@ -363,3 +412,10 @@ def get_secret(self, secret_name: str) -> str:
if self.USE_KEY_VAULT and secret_name_value
else os.getenv(secret_name, "")
)
+
+ def get_secret_from_json(self, secret_name: str) -> str:
+ return (
+ self.secret_client.get_secret(secret_name).value
+ if self.USE_KEY_VAULT and secret_name
+ else secret_name
+ )
diff --git a/code/backend/batch/utilities/orchestrator/orchestrator_base.py b/code/backend/batch/utilities/orchestrator/orchestrator_base.py
index 1073b9ec0..15539e305 100644
--- a/code/backend/batch/utilities/orchestrator/orchestrator_base.py
+++ b/code/backend/batch/utilities/orchestrator/orchestrator_base.py
@@ -70,7 +70,7 @@ async def handle_message(
**kwargs: Optional[dict],
) -> dict:
result = await self.orchestrate(user_message, chat_history, **kwargs)
- if self.config.logging.log_tokens:
+ if str(self.config.logging.log_tokens).lower() == "true":
custom_dimensions = {
"conversation_id": conversation_id,
"message_id": self.message_id,
@@ -79,7 +79,7 @@ async def handle_message(
"total_tokens": self.tokens["total"],
}
logger.info("Token Consumption", extra=custom_dimensions)
- if self.config.logging.log_user_interactions:
+ if str(self.config.logging.log_user_interactions).lower() == "true":
self.conversation_logger.log(
messages=[
{
diff --git a/code/backend/batch/utilities/parser/output_parser_tool.py b/code/backend/batch/utilities/parser/output_parser_tool.py
index 4455ac20b..ac326353f 100644
--- a/code/backend/batch/utilities/parser/output_parser_tool.py
+++ b/code/backend/batch/utilities/parser/output_parser_tool.py
@@ -20,17 +20,11 @@ def _get_source_docs_from_answer(self, answer):
results = re.findall(r"\[doc(\d+)\]", answer)
return [int(i) for i in results]
- def _replace_last(self, text, old, new):
- """Replaces the last occurence of a substring in a string
-
- This is done by reversing the string using [::-1], replacing the first occurence of the reversed substring, and
- reversing the string again.
- """
- return (text[::-1].replace(old[::-1], new[::-1], 1))[::-1]
-
- def _make_doc_references_sequential(self, answer, doc_ids):
- for i, idx in enumerate(doc_ids):
- answer = self._replace_last(answer, f"[doc{idx}]", f"[doc{i+1}]")
+ def _make_doc_references_sequential(self, answer):
+ doc_matches = list(re.finditer(r"\[doc\d+\]", answer))
+ for i, match in enumerate(doc_matches):
+ start, end = match.span()
+ answer = answer[:start] + f"[doc{i + 1}]" + answer[end:]
return answer
def parse(
@@ -42,7 +36,7 @@ def parse(
) -> List[dict]:
answer = self._clean_up_answer(answer)
doc_ids = self._get_source_docs_from_answer(answer)
- answer = self._make_doc_references_sequential(answer, doc_ids)
+ answer = self._make_doc_references_sequential(answer)
# create return message object
messages = [
diff --git a/code/backend/batch/utilities/search/postgres_search_handler.py b/code/backend/batch/utilities/search/postgres_search_handler.py
new file mode 100644
index 000000000..0671a16d2
--- /dev/null
+++ b/code/backend/batch/utilities/search/postgres_search_handler.py
@@ -0,0 +1,104 @@
+import json
+from typing import List
+import numpy as np
+
+from .search_handler_base import SearchHandlerBase
+from ..helpers.azure_postgres_helper import AzurePostgresHelper
+from ..common.source_document import SourceDocument
+
+
+class AzurePostgresHandler(SearchHandlerBase):
+
+ def __init__(self, env_helper):
+ self.azure_postgres_helper = AzurePostgresHelper()
+ super().__init__(env_helper)
+
+ def query_search(self, question) -> List[SourceDocument]:
+ user_input = question
+ query_embedding = self.azure_postgres_helper.llm_helper.generate_embeddings(
+ user_input
+ )
+
+ embedding_array = np.array(query_embedding).tolist()
+
+ search_results = self.azure_postgres_helper.get_vector_store(embedding_array)
+
+ return self._convert_to_source_documents(search_results)
+
+ def _convert_to_source_documents(self, search_results) -> List[SourceDocument]:
+ source_documents = []
+ for source in search_results:
+ source_documents.append(
+ SourceDocument(
+ id=source["id"],
+ title=source["title"],
+ chunk=source["chunk"],
+ offset=source["offset"],
+ page_number=source["page_number"],
+ content=source["content"],
+ source=source["source"],
+ )
+ )
+ return source_documents
+
+ def create_search_client(self):
+ return self.azure_postgres_helper.get_search_client()
+
+ def create_vector_store(self, documents_to_upload):
+ return self.azure_postgres_helper.create_vector_store(documents_to_upload)
+
+ def perform_search(self, filename):
+ return self.azure_postgres_helper.perform_search(filename)
+
+ def process_results(self, results):
+ if results is None:
+ return []
+ data = [
+ [json.loads(result["metadata"]).get("chunk", i), result["content"]]
+ for i, result in enumerate(results)
+ ]
+ return data
+
+ def get_files(self):
+ results = self.azure_postgres_helper.get_files()
+ if results is None or len(results) == 0:
+ return []
+ return results
+
+ def output_results(self, results):
+ files = {}
+ for result in results:
+ id = result["id"]
+ filename = result["title"]
+ if filename in files:
+ files[filename].append(id)
+ else:
+ files[filename] = [id]
+
+ return files
+
+ def delete_files(self, files):
+ ids_to_delete = []
+ files_to_delete = []
+
+ for filename, ids in files.items():
+ files_to_delete.append(filename)
+ ids_to_delete += [{"id": id} for id in ids]
+ self.azure_postgres_helper.delete_documents(ids_to_delete)
+
+ return ", ".join(files_to_delete)
+
+ def search_by_blob_url(self, blob_url):
+ return self.azure_postgres_helper.search_by_blob_url(blob_url)
+
+ def delete_from_index(self, blob_url) -> None:
+ documents = self.search_by_blob_url(blob_url)
+ if documents is None or len(documents) == 0:
+ return
+ files_to_delete = self.output_results(documents)
+ self.delete_files(files_to_delete)
+
+ def get_unique_files(self):
+ results = self.azure_postgres_helper.get_unique_files()
+ unique_titles = [row["title"] for row in results]
+ return unique_titles
diff --git a/code/backend/batch/utilities/search/search.py b/code/backend/batch/utilities/search/search.py
index 6a5eed95e..d1a746a06 100644
--- a/code/backend/batch/utilities/search/search.py
+++ b/code/backend/batch/utilities/search/search.py
@@ -1,3 +1,5 @@
+from ..search.postgres_search_handler import AzurePostgresHandler
+from ..helpers.config.database_type import DatabaseType
from ..search.azure_search_handler import AzureSearchHandler
from ..search.integrated_vectorization_search_handler import (
IntegratedVectorizationSearchHandler,
@@ -10,10 +12,14 @@
class Search:
@staticmethod
def get_search_handler(env_helper: EnvHelper) -> SearchHandlerBase:
- if env_helper.AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION:
- return IntegratedVectorizationSearchHandler(env_helper)
+ # TODO Since the full workflow for PostgreSQL indexing is not yet complete, you can comment out env_helper.DATABASE_TYPE == DatabaseType.POSTGRESQL.value.
+ if env_helper.DATABASE_TYPE == DatabaseType.POSTGRESQL.value:
+ return AzurePostgresHandler(env_helper)
else:
- return AzureSearchHandler(env_helper)
+ if env_helper.AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION:
+ return IntegratedVectorizationSearchHandler(env_helper)
+ else:
+ return AzureSearchHandler(env_helper)
@staticmethod
def get_source_documents(
diff --git a/code/backend/pages/02_Explore_Data.py b/code/backend/pages/02_Explore_Data.py
index 73ffde955..0d71ed47b 100644
--- a/code/backend/pages/02_Explore_Data.py
+++ b/code/backend/pages/02_Explore_Data.py
@@ -4,6 +4,7 @@
import sys
import pandas as pd
from batch.utilities.helpers.env_helper import EnvHelper
+from batch.utilities.helpers.config.database_type import DatabaseType
from batch.utilities.search.search import Search
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
@@ -40,8 +41,17 @@ def load_css(file_path):
try:
search_handler = Search.get_search_handler(env_helper)
- results = search_handler.search_with_facets("*", "title", facet_count=0)
- unique_files = search_handler.get_unique_files(results, "title")
+ # Determine unique files based on database type
+ if env_helper.DATABASE_TYPE == DatabaseType.POSTGRESQL.value:
+ unique_files = search_handler.get_unique_files()
+ elif env_helper.DATABASE_TYPE == DatabaseType.COSMOSDB.value:
+ results = search_handler.search_with_facets("*", "title", facet_count=0)
+ unique_files = search_handler.get_unique_files(results, "title")
+ else:
+ raise ValueError(
+ "Unsupported database type. Only 'PostgreSQL' and 'CosmosDB' are allowed."
+ )
+
filename = st.selectbox("Select your file:", unique_files)
st.write("Showing chunks for:", filename)
diff --git a/code/backend/pages/03_Delete_Data.py b/code/backend/pages/03_Delete_Data.py
index b92cf303c..c681ac411 100644
--- a/code/backend/pages/03_Delete_Data.py
+++ b/code/backend/pages/03_Delete_Data.py
@@ -5,6 +5,7 @@
import logging
from batch.utilities.helpers.env_helper import EnvHelper
from batch.utilities.search.search import Search
+from batch.utilities.helpers.config.database_type import DatabaseType
from batch.utilities.helpers.azure_blob_storage_client import AzureBlobStorageClient
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
@@ -46,7 +47,10 @@ def load_css(file_path):
search_handler = Search.get_search_handler(env_helper)
results = search_handler.get_files()
- if results is None or results.get_count() == 0:
+ if (
+ env_helper.DATABASE_TYPE == DatabaseType.COSMOSDB.value
+ and (results is None or results.get_count() == 0)
+ ) or (env_helper.DATABASE_TYPE == DatabaseType.POSTGRESQL.value and len(results) == 0):
st.info("No files to delete")
st.stop()
else:
diff --git a/code/backend/pages/04_Configuration.py b/code/backend/pages/04_Configuration.py
index 1ac80215e..c41d17aa5 100644
--- a/code/backend/pages/04_Configuration.py
+++ b/code/backend/pages/04_Configuration.py
@@ -8,6 +8,7 @@
from azure.core.exceptions import ResourceNotFoundError
from batch.utilities.helpers.config.assistant_strategy import AssistantStrategy
from batch.utilities.helpers.config.conversation_flow import ConversationFlow
+from batch.utilities.helpers.config.database_type import DatabaseType
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
env_helper: EnvHelper = EnvHelper()
@@ -58,10 +59,11 @@ def load_css(file_path):
if "example_answer" not in st.session_state:
st.session_state["example_answer"] = config.example.answer
if "log_user_interactions" not in st.session_state:
- st.session_state["log_user_interactions"] = config.logging.log_user_interactions
+ st.session_state["log_user_interactions"] = (
+ str(config.logging.log_user_interactions).lower() == "true"
+ )
if "log_tokens" not in st.session_state:
- st.session_state["log_tokens"] = config.logging.log_tokens
-
+ st.session_state["log_tokens"] = str(config.logging.log_tokens).lower() == "true"
if "orchestrator_strategy" not in st.session_state:
st.session_state["orchestrator_strategy"] = config.orchestrator.strategy.value
if "ai_assistant_type" not in st.session_state:
@@ -69,13 +71,11 @@ def load_css(file_path):
if "conversational_flow" not in st.session_state:
st.session_state["conversational_flow"] = config.prompts.conversational_flow
if "enable_chat_history" not in st.session_state:
- st.session_state["enable_chat_history"] = st.session_state[
- "enable_chat_history"
- ] = (
- config.enable_chat_history.lower() == "true"
- if isinstance(config.enable_chat_history, str)
- else config.enable_chat_history
+ st.session_state["enable_chat_history"] = (
+ str(config.enable_chat_history).lower() == "true"
)
+if "database_type" not in st.session_state:
+ st.session_state["database_type"] = config.database_type
if env_helper.AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION:
if "max_page_length" not in st.session_state:
@@ -196,6 +196,11 @@ def validate_documents():
key="conversational_flow",
options=config.get_available_conversational_flows(),
help=conversational_flow_help,
+ disabled=(
+ True
+ if env_helper.DATABASE_TYPE == DatabaseType.POSTGRESQL.value
+ else False
+ ),
)
with st.expander("Orchestrator configuration", expanded=True):
@@ -209,6 +214,7 @@ def validate_documents():
True
if st.session_state["conversational_flow"]
== ConversationFlow.BYOD.value
+ or env_helper.DATABASE_TYPE == "PostgreSQL"
else False
),
)
@@ -384,11 +390,21 @@ def validate_documents():
st.checkbox("Enable chat history", key="enable_chat_history")
with st.expander("Logging configuration", expanded=True):
+ disable_checkboxes = (
+ True
+ if env_helper.DATABASE_TYPE == DatabaseType.POSTGRESQL.value
+ else False
+ )
st.checkbox(
"Log user input and output (questions, answers, conversation history, sources)",
key="log_user_interactions",
+ disabled=disable_checkboxes,
+ )
+ st.checkbox(
+ "Log tokens",
+ key="log_tokens",
+ disabled=disable_checkboxes,
)
- st.checkbox("Log tokens", key="log_tokens")
if st.form_submit_button("Save configuration"):
document_processors = []
diff --git a/code/frontend/src/components/Answer/AnswerParser.tsx b/code/frontend/src/components/Answer/AnswerParser.tsx
index 57dd791a0..44cbedf03 100644
--- a/code/frontend/src/components/Answer/AnswerParser.tsx
+++ b/code/frontend/src/components/Answer/AnswerParser.tsx
@@ -11,7 +11,7 @@ let filteredCitations = [] as Citation[];
// Define a function to check if a citation with the same Chunk_Id already exists in filteredCitations
const isDuplicate = (citation: Citation,citationIndex:string) => {
- return filteredCitations.some((c) => c.chunk_id === citation.chunk_id) ;
+ return filteredCitations.some((c) => c.chunk_id === citation.chunk_id && c.id === citation.id) ;
};
export function parseAnswer(answer: AskResponse): ParsedAnswer {
@@ -28,12 +28,11 @@ export function parseAnswer(answer: AskResponse): ParsedAnswer {
let citation = cloneDeep(answer.citations[Number(citationIndex) - 1]) as Citation;
if (!isDuplicate(citation, citationIndex) && citation !== undefined) {
answerText = answerText.replaceAll(link, ` ^${++citationReindex}^ `);
- citation.id = citationIndex; // original doc index to de-dupe
citation.reindex_id = citationReindex.toString(); // reindex from 1 for display
filteredCitations.push(citation);
}else{
// Replacing duplicate citation with original index
- let matchingCitation = filteredCitations.find((ct) => citation.chunk_id == ct.chunk_id);
+ let matchingCitation = filteredCitations.find((ct) => citation.chunk_id === ct.chunk_id && citation.id === ct.id);
if (matchingCitation) {
answerText= answerText.replaceAll(link, ` ^${matchingCitation.reindex_id}^ `)
}
diff --git a/code/tests/functional/app_config.py b/code/tests/functional/app_config.py
index c4f2b6d8c..a072d7f92 100644
--- a/code/tests/functional/app_config.py
+++ b/code/tests/functional/app_config.py
@@ -5,6 +5,7 @@
from backend.batch.utilities.helpers.config.conversation_flow import ConversationFlow
logger = logging.getLogger(__name__)
+encoded_account_key = str(base64.b64encode(b"some-blob-account-key"), "utf-8")
class AppConfig:
@@ -12,17 +13,14 @@ class AppConfig:
config: dict[str, str | None] = {
"APPLICATIONINSIGHTS_ENABLED": "False",
"AZURE_AUTH_TYPE": "keys",
- "AZURE_BLOB_ACCOUNT_KEY": str(
- base64.b64encode(b"some-blob-account-key"), "utf-8"
- ),
- "AZURE_BLOB_ACCOUNT_NAME": "some-blob-account-name",
- "AZURE_BLOB_CONTAINER_NAME": "some-blob-container-name",
+ "AZURE_BLOB_STORAGE_INFO": '{"accountName": "some-blob-account-name", "containerName": "some-blob-container-name", "accountKey": "'
+ + encoded_account_key
+ + '"}',
"AZURE_COMPUTER_VISION_KEY": "some-computer-vision-key",
"AZURE_CONTENT_SAFETY_ENDPOINT": "some-content-safety-endpoint",
"AZURE_CONTENT_SAFETY_KEY": "some-content-safety-key",
"AZURE_FORM_RECOGNIZER_ENDPOINT": "some-form-recognizer-endpoint",
- "AZURE_FORM_RECOGNIZER_KEY": "some-form-recognizer-key",
- "AZURE_KEY_VAULT_ENDPOINT": "some-key-vault-endpoint",
+ "AZURE_FORM_RECOGNIZER_INFO": '{"endpoint":"some-key-vault-endpoint","key":"some-key-vault-endpoint"}',
"AZURE_OPENAI_API_KEY": "some-azure-openai-api-key",
"AZURE_OPENAI_API_VERSION": "2024-02-01",
"AZURE_OPENAI_EMBEDDING_MODEL_INFO": '{"model":"some-embedding-model","modelName":"some-embedding-model-name","modelVersion":"some-embedding-model-version"}',
@@ -84,6 +82,7 @@ class AppConfig:
"OPENAI_API_TYPE": None,
"OPENAI_API_KEY": None,
"OPENAI_API_VERSION": None,
+ "DATABASE_TYPE": "CosmosDB",
}
def __init__(self, config_overrides: dict[str, str | None] = {}) -> None:
diff --git a/code/tests/functional/tests/backend_api/default/test_conversation.py b/code/tests/functional/tests/backend_api/default/test_conversation.py
index 8d7106f8c..34e90cf7b 100644
--- a/code/tests/functional/tests/backend_api/default/test_conversation.py
+++ b/code/tests/functional/tests/backend_api/default/test_conversation.py
@@ -328,7 +328,7 @@ def test_post_makes_correct_call_to_openai_chat_completions_with_functions(
)
-def test_post_makes_correct_call_to_list_search_indexes(
+def test_post_makes_correct_call_to_list_vector_store(
app_url: str, app_config: AppConfig, httpserver: HTTPServer
):
# when
diff --git a/code/tests/functional/tests/backend_api/integrated_vectorization_custom_conversation/test_iv_question_answer_tool.py b/code/tests/functional/tests/backend_api/integrated_vectorization_custom_conversation/test_iv_question_answer_tool.py
index 9d1eb152b..875c8363c 100644
--- a/code/tests/functional/tests/backend_api/integrated_vectorization_custom_conversation/test_iv_question_answer_tool.py
+++ b/code/tests/functional/tests/backend_api/integrated_vectorization_custom_conversation/test_iv_question_answer_tool.py
@@ -136,7 +136,7 @@ def test_post_makes_correct_call_to_get_conversation_log_search_index(
)
-def test_post_makes_correct_call_to_list_search_indexes(
+def test_post_makes_correct_call_to_list_vector_store(
app_url: str, app_config: AppConfig, httpserver: HTTPServer
):
# when
diff --git a/code/tests/functional/tests/functions/advanced_image_processing/test_advanced_image_processing.py b/code/tests/functional/tests/functions/advanced_image_processing/test_advanced_image_processing.py
index 31ecb697f..d500077b4 100644
--- a/code/tests/functional/tests/functions/advanced_image_processing/test_advanced_image_processing.py
+++ b/code/tests/functional/tests/functions/advanced_image_processing/test_advanced_image_processing.py
@@ -26,7 +26,7 @@ def message(app_config: AppConfig):
body=json.dumps(
{
"topic": "topic",
- "subject": f"/blobServices/default/{app_config.get('AZURE_BLOB_CONTAINER_NAME')}/documents/blobs/{FILE_NAME}",
+ "subject": f"/blobServices/default/{app_config.get_from_json('AZURE_BLOB_STORAGE_INFO','containerName')}/documents/blobs/{FILE_NAME}",
"eventType": "Microsoft.Storage.BlobCreated",
"id": "id",
"data": {
@@ -37,7 +37,7 @@ def message(app_config: AppConfig):
"contentType": "image/jpeg",
"contentLength": 115310,
"blobType": "BlockBlob",
- "url": f"https://{app_config.get('AZURE_BLOB_ACCOUNT_NAME')}.blob.core.windows.net/documents/{FILE_NAME}",
+ "url": f"https://{app_config.get_from_json('AZURE_BLOB_STORAGE_INFO','accountName')}.blob.core.windows.net/documents/{FILE_NAME}",
"sequencer": "00000000000000000000000000005E450000000000001f49",
"storageDiagnostics": {
"batchId": "952bdc2e-6006-0000-00bb-a20860000000"
@@ -54,12 +54,12 @@ def message(app_config: AppConfig):
@pytest.fixture(autouse=True)
def setup_blob_metadata_mocking(httpserver: HTTPServer, app_config: AppConfig):
httpserver.expect_request(
- f"/{app_config.get('AZURE_BLOB_CONTAINER_NAME')}/{FILE_NAME}",
+ f"/{app_config.get_from_json('AZURE_BLOB_STORAGE_INFO','containerName')}/{FILE_NAME}",
method="HEAD",
).respond_with_data()
httpserver.expect_request(
- f"/{app_config.get('AZURE_BLOB_CONTAINER_NAME')}/{FILE_NAME}",
+ f"/{app_config.get_from_json('AZURE_BLOB_STORAGE_INFO','containerName')}/{FILE_NAME}",
method="PUT",
).respond_with_data()
@@ -141,7 +141,7 @@ def test_image_passed_to_computer_vision_to_generate_image_embeddings(
)[0]
assert request.get_json()["url"].startswith(
- f"{app_config.get('AZURE_STORAGE_ACCOUNT_ENDPOINT')}{app_config.get('AZURE_BLOB_CONTAINER_NAME')}/{FILE_NAME}"
+ f"{app_config.get('AZURE_STORAGE_ACCOUNT_ENDPOINT')}{app_config.get_from_json('AZURE_BLOB_STORAGE_INFO','containerName')}/{FILE_NAME}"
)
@@ -195,7 +195,7 @@ def test_image_passed_to_llm_to_generate_caption(
assert request.get_json()["messages"][1]["content"][1]["image_url"][
"url"
].startswith(
- f"{app_config.get('AZURE_STORAGE_ACCOUNT_ENDPOINT')}{app_config.get('AZURE_BLOB_CONTAINER_NAME')}/{FILE_NAME}"
+ f"{app_config.get('AZURE_STORAGE_ACCOUNT_ENDPOINT')}{app_config.get_from_json('AZURE_BLOB_STORAGE_INFO','containerName')}/{FILE_NAME}"
)
@@ -240,7 +240,7 @@ def test_metadata_is_updated_after_processing(
verify_request_made(
mock_httpserver=httpserver,
request_matcher=RequestMatcher(
- path=f"/{app_config.get('AZURE_BLOB_CONTAINER_NAME')}/{FILE_NAME}",
+ path=f"/{app_config.get_from_json('AZURE_BLOB_STORAGE_INFO','containerName')}/{FILE_NAME}",
method="PUT",
headers={
"Authorization": ANY,
@@ -255,7 +255,7 @@ def test_metadata_is_updated_after_processing(
)
-def test_makes_correct_call_to_list_search_indexes(
+def test_makes_correct_call_to_list_vector_store(
message: QueueMessage, httpserver: HTTPServer, app_config: AppConfig
):
# when
@@ -439,7 +439,7 @@ def test_makes_correct_call_to_store_documents_in_search_index(
batch_push_results.build().get_user_function()(message)
# then
- expected_file_path = f"{app_config.get('AZURE_BLOB_CONTAINER_NAME')}/{FILE_NAME}"
+ expected_file_path = f"{app_config.get_from_json('AZURE_BLOB_STORAGE_INFO','containerName')}/{FILE_NAME}"
expected_source_url = (
f"{app_config.get('AZURE_STORAGE_ACCOUNT_ENDPOINT')}{expected_file_path}"
)
diff --git a/code/tests/functional/tests/functions/integrated_vectorization/test_integrated_vectorization_resource_creation.py b/code/tests/functional/tests/functions/integrated_vectorization/test_integrated_vectorization_resource_creation.py
index ed374b181..32be05562 100644
--- a/code/tests/functional/tests/functions/integrated_vectorization/test_integrated_vectorization_resource_creation.py
+++ b/code/tests/functional/tests/functions/integrated_vectorization/test_integrated_vectorization_resource_creation.py
@@ -20,12 +20,12 @@
@pytest.fixture(autouse=True)
def setup_blob_metadata_mocking(httpserver: HTTPServer, app_config: AppConfig):
httpserver.expect_request(
- f"/{app_config.get('AZURE_BLOB_CONTAINER_NAME')}/{FILE_NAME}",
+ f"/{app_config.get_from_json('AZURE_BLOB_STORAGE_INFO','containerName')}/{FILE_NAME}",
method="HEAD",
).respond_with_data()
httpserver.expect_request(
- f"/{app_config.get('AZURE_BLOB_CONTAINER_NAME')}/{FILE_NAME}",
+ f"/{app_config.get_from_json('AZURE_BLOB_STORAGE_INFO','containerName')}/{FILE_NAME}",
method="PUT",
).respond_with_data()
@@ -36,7 +36,7 @@ def message(app_config: AppConfig):
body=json.dumps(
{
"topic": "topic",
- "subject": f"/blobServices/default/{app_config.get('AZURE_BLOB_CONTAINER_NAME')}/documents/blobs/{FILE_NAME}",
+ "subject": f"/blobServices/default/{app_config.get_from_json('AZURE_BLOB_STORAGE_INFO','containerName')}/documents/blobs/{FILE_NAME}",
"eventType": "Microsoft.Storage.BlobCreated",
"id": "id",
"data": {
@@ -47,7 +47,7 @@ def message(app_config: AppConfig):
"contentType": "application/pdf",
"contentLength": 544811,
"blobType": "BlockBlob",
- "url": f"https://{app_config.get('AZURE_BLOB_ACCOUNT_NAME')}.blob.core.windows.net/documents/{FILE_NAME}",
+ "url": f"https://{app_config.get_from_json('AZURE_BLOB_STORAGE_INFO','accountName')}.blob.core.windows.net/documents/{FILE_NAME}",
"sequencer": "00000000000000000000000000036029000000000017251c",
"storageDiagnostics": {
"batchId": "c98008b9-e006-007c-00bb-a2ae9f000000"
@@ -97,9 +97,9 @@ def test_integrated_vectorization_datasouce_created(
"name": app_config.get("AZURE_SEARCH_DATASOURCE_NAME"),
"type": "azureblob",
"credentials": {
- "connectionString": f"DefaultEndpointsProtocol=https;AccountName={app_config.get('AZURE_BLOB_ACCOUNT_NAME')};AccountKey={app_config.get('AZURE_BLOB_ACCOUNT_KEY')};EndpointSuffix=core.windows.net"
+ "connectionString": f"DefaultEndpointsProtocol=https;AccountName={app_config.get_from_json('AZURE_BLOB_STORAGE_INFO','accountName')};AccountKey={app_config.get_from_json('AZURE_BLOB_STORAGE_INFO','accountKey')};EndpointSuffix=core.windows.net"
},
- "container": {"name": f"{app_config.get('AZURE_BLOB_CONTAINER_NAME')}"},
+ "container": {"name": f"{app_config.get_from_json('AZURE_BLOB_STORAGE_INFO','containerName')}"},
"dataDeletionDetectionPolicy": {
"@odata.type": "#Microsoft.Azure.Search.NativeBlobSoftDeleteDeletionDetectionPolicy"
},
diff --git a/code/tests/search_utilities/test_postgres_search_handler.py b/code/tests/search_utilities/test_postgres_search_handler.py
new file mode 100644
index 000000000..65811058d
--- /dev/null
+++ b/code/tests/search_utilities/test_postgres_search_handler.py
@@ -0,0 +1,218 @@
+import json
+import pytest
+from unittest.mock import MagicMock, patch
+from backend.batch.utilities.common.source_document import SourceDocument
+from backend.batch.utilities.search.postgres_search_handler import AzurePostgresHandler
+
+
+@pytest.fixture(autouse=True)
+def env_helper_mock():
+ mock = MagicMock()
+ mock.POSTGRESQL_USER = "test_user"
+ mock.POSTGRESQL_PASSWORD = "test_password"
+ mock.POSTGRESQL_HOST = "test_host"
+ mock.POSTGRESQL_DB = "test_db"
+ return mock
+
+
+@pytest.fixture(autouse=True)
+def mock_search_client():
+ with patch(
+ "backend.batch.utilities.search.postgres_search_handler.AzurePostgresHelper"
+ ) as mock:
+ search_client = mock.return_value.get_search_client.return_value
+ yield search_client
+
+
+@pytest.fixture
+def handler(env_helper_mock, mock_search_client):
+ with patch(
+ "backend.batch.utilities.search.postgres_search_handler",
+ return_value=mock_search_client,
+ ):
+ return AzurePostgresHandler(env_helper_mock)
+
+
+def test_query_search(handler, mock_search_client):
+ mock_llm_helper = MagicMock()
+ mock_search_client.llm_helper = mock_llm_helper
+
+ mock_llm_helper.generate_embeddings.return_value = [1, 2, 3]
+
+ mock_search_client.get_vector_store.return_value = [
+ {
+ "id": "1",
+ "title": "Title1",
+ "chunk": "Chunk1",
+ "offset": 0,
+ "page_number": 1,
+ "content": "Content1",
+ "source": "Source1",
+ },
+ {
+ "id": "2",
+ "title": "Title2",
+ "chunk": "Chunk2",
+ "offset": 1,
+ "page_number": 2,
+ "content": "Content2",
+ "source": "Source2",
+ },
+ ]
+
+ mock_search_client.get_search_client.return_value = mock_search_client
+ handler.azure_postgres_helper = mock_search_client
+
+ result = handler.query_search("Sample question")
+
+ mock_llm_helper.generate_embeddings.assert_called_once_with("Sample question")
+ mock_search_client.get_vector_store.assert_called_once()
+ assert len(result) == 2
+ assert isinstance(result[0], SourceDocument)
+ assert result[0].id == "1"
+ assert result[0].title == "Title1"
+ assert result[1].content == "Content2"
+
+
+def test_convert_to_source_documents(handler):
+ search_results = [
+ {
+ "id": "1",
+ "title": "Title1",
+ "chunk": "Chunk1",
+ "offset": 0,
+ "page_number": 1,
+ "content": "Content1",
+ "source": "Source1",
+ },
+ {
+ "id": "2",
+ "title": "Title2",
+ "chunk": "Chunk2",
+ "offset": 1,
+ "page_number": 2,
+ "content": "Content2",
+ "source": "Source2",
+ },
+ ]
+
+ result = handler._convert_to_source_documents(search_results)
+
+ assert len(result) == 2
+ assert result[0].id == "1"
+ assert result[0].content == "Content1"
+ assert result[1].page_number == 2
+
+
+def test_create_search_client(handler, mock_search_client):
+ handler.azure_postgres_helper.get_search_client = MagicMock(
+ return_value=mock_search_client
+ )
+
+ result = handler.create_search_client()
+
+ assert result == mock_search_client
+
+
+def test_get_files(handler):
+ mock_get_files = MagicMock(return_value=["test1.txt", "test2.txt"])
+ handler.azure_postgres_helper.get_files = mock_get_files
+
+ result = handler.get_files()
+
+ assert len(result) == 2
+ assert result[0] == "test1.txt"
+ assert result[1] == "test2.txt"
+
+
+def test_output_results(handler):
+ results = [
+ {"id": "1", "title": "file1.txt"},
+ {"id": "2", "title": "file2.txt"},
+ {"id": "3", "title": "file1.txt"},
+ {"id": "4", "title": "file3.txt"},
+ {"id": "5", "title": "file2.txt"},
+ ]
+
+ expected_output = {
+ "file1.txt": ["1", "3"],
+ "file2.txt": ["2", "5"],
+ "file3.txt": ["4"],
+ }
+
+ result = handler.output_results(results)
+
+ assert result == expected_output
+ assert len(result) == 3
+ assert "file1.txt" in result
+ assert result["file2.txt"] == ["2", "5"]
+
+
+def test_process_results(handler):
+ results = [
+ {"metadata": json.dumps({"chunk": "Chunk1"}), "content": "Content1"},
+ {"metadata": json.dumps({"chunk": "Chunk2"}), "content": "Content2"},
+ ]
+ expected_output = [["Chunk1", "Content1"], ["Chunk2", "Content2"]]
+ result = handler.process_results(results)
+ assert result == expected_output
+
+
+def test_process_results_none(handler):
+ result = handler.process_results(None)
+ assert result == []
+
+
+def test_process_results_missing_chunk(handler):
+ results = [
+ {"metadata": json.dumps({}), "content": "Content1"},
+ {"metadata": json.dumps({"chunk": "Chunk2"}), "content": "Content2"},
+ ]
+ expected_output = [[0, "Content1"], ["Chunk2", "Content2"]]
+ result = handler.process_results(results)
+ assert result == expected_output
+
+
+def test_delete_files(handler):
+ files_to_delete = {"test1.txt": [1, 2], "test2.txt": [3]}
+ mock_delete_documents = MagicMock()
+ handler.azure_postgres_helper.delete_documents = mock_delete_documents
+
+ result = handler.delete_files(files_to_delete)
+
+ mock_delete_documents.assert_called_once_with([{"id": 1}, {"id": 2}, {"id": 3}])
+ assert "test1.txt" in result
+
+
+# Test case for delete_from_index method
+def test_delete_from_index(handler):
+ blob_url = "https://example.com/blob"
+
+ # Mocking methods
+ mock_search_by_blob_url = MagicMock(return_value=[{"id": "1", "title": "Title1"}])
+ mock_output_results = MagicMock(return_value={"test1.txt": ["1"]})
+ mock_delete_files = MagicMock(return_value="test1.txt")
+
+ handler.search_by_blob_url = mock_search_by_blob_url
+ handler.output_results = mock_output_results
+ handler.delete_files = mock_delete_files
+
+ handler.delete_from_index(blob_url)
+
+ mock_search_by_blob_url.assert_called_once_with(blob_url)
+ mock_output_results.assert_called_once()
+ mock_delete_files.assert_called_once_with({"test1.txt": ["1"]})
+
+
+# Test case for get_unique_files method
+def test_get_unique_files(handler):
+ mock_get_unique_files = MagicMock(
+ return_value=[{"title": "test1.txt"}, {"title": "test2.txt"}]
+ )
+ handler.azure_postgres_helper.get_unique_files = mock_get_unique_files
+
+ result = handler.get_unique_files()
+
+ assert len(result) == 2
+ assert result[0] == "test1.txt"
+ assert result[1] == "test2.txt"
diff --git a/code/tests/test_chat_history.py b/code/tests/test_chat_history.py
new file mode 100644
index 000000000..6ef805d50
--- /dev/null
+++ b/code/tests/test_chat_history.py
@@ -0,0 +1,708 @@
+"""
+This module tests the entry point for the application.
+"""
+
+from unittest.mock import AsyncMock, MagicMock, patch
+
+import pytest
+from create_app import create_app
+
+
+@pytest.fixture
+def client():
+ """Create a test client for the app."""
+ app = create_app()
+ app.testing = True
+ return app.test_client()
+
+
+@pytest.fixture
+def mock_conversation_client():
+ """Mock the database client."""
+ with patch(
+ "backend.batch.utilities.chat_history.database_factory.DatabaseFactory.get_conversation_client"
+ ) as mock:
+ mock_conversation_client = AsyncMock()
+ mock.return_value = mock_conversation_client
+ yield mock_conversation_client
+
+
+class TestListConversations:
+ @patch(
+ "backend.batch.utilities.helpers.config.config_helper.ConfigHelper.get_active_config_or_default"
+ )
+ def test_list_conversations_success(
+ self, get_active_config_or_default_mock, mock_conversation_client, client
+ ):
+ """Test that the list_conversations endpoint works when everything is set up correctly."""
+ # Given
+ get_active_config_or_default_mock.return_value.prompts.conversational_flow = (
+ "custom"
+ )
+ get_active_config_or_default_mock.enable_chat_history = True
+ mock_conversation_client.get_conversations = AsyncMock(
+ return_value=[{"conversation_id": "1", "content": "Hello, world!"}]
+ )
+
+ # When
+ response = client.get("/api/history/list?offset=0")
+
+ # Then
+ assert response.status_code == 200
+ assert response.json == [{"conversation_id": "1", "content": "Hello, world!"}]
+
+ @patch(
+ "backend.batch.utilities.helpers.config.config_helper.ConfigHelper.get_active_config_or_default"
+ )
+ def test_list_conversations_no_history(
+ self, get_active_config_or_default_mock, client
+ ):
+ """Test that the list_conversations endpoint returns an error if chat history is not enabled."""
+ # Given
+ get_active_config_or_default_mock.return_value.enable_chat_history = False
+
+ # When
+ response = client.get("/api/history/list?offset=0")
+
+ # Then
+ assert response.status_code == 400
+ assert response.json == {"error": "Chat history is not available"}
+
+ @patch(
+ "backend.batch.utilities.helpers.config.config_helper.ConfigHelper.get_active_config_or_default"
+ )
+ def test_list_conversations_db_error(
+ self, get_active_config_or_default_mock, mock_conversation_client, client
+ ):
+ """Test that the list_conversations endpoint returns an error if the database is not available."""
+ # Given
+ get_active_config_or_default_mock.return_value.enable_chat_history = True
+ mock_conversation_client.get_conversations = AsyncMock(
+ side_effect=Exception("Database error")
+ )
+
+ # When
+ response = client.get("/api/history/list?offset=0")
+
+ # Then
+ assert response.status_code == 500
+ assert response.json == {
+ "error": "Error while listing historical conversations"
+ }
+
+ @patch(
+ "backend.batch.utilities.helpers.config.config_helper.ConfigHelper.get_active_config_or_default"
+ )
+ def test_list_conversations_no_conversations(
+ self, get_active_config_or_default_mock, mock_conversation_client, client
+ ):
+ """Test that the list_conversations endpoint returns an error if no conversations are found."""
+ # Given
+ get_active_config_or_default_mock.return_value.enable_chat_history = True
+ mock_conversation_client.get_conversations = AsyncMock(
+ return_value="invalid response"
+ )
+
+ # When
+ response = client.get("/api/history/list?offset=0")
+
+ # Then
+ assert response.status_code == 404
+ assert response.json == {
+ "error": "No conversations for 00000000-0000-0000-0000-000000000000 were found"
+ }
+
+ @patch(
+ "backend.batch.utilities.helpers.config.config_helper.ConfigHelper.get_active_config_or_default"
+ )
+ def test_rename_conversation_success(
+ self, get_active_config_or_default_mock, mock_conversation_client, client
+ ):
+ """Test that the rename_conversation endpoint works correctly."""
+ # Given
+ get_active_config_or_default_mock.return_value.enable_chat_history = True
+ mock_conversation_client.get_conversations = AsyncMock(
+ return_value={"conversation_id": "1", "title": "Old Title"}
+ )
+ mock_conversation_client.upsert_conversation = AsyncMock(
+ return_value={"conversation_id": "1", "title": "New Title"}
+ )
+
+ request_json = {"conversation_id": "1", "title": "New Title"}
+
+ # When
+ response = client.post("/api/history/rename", json=request_json)
+
+ # Then
+ assert response.status_code == 200
+ assert response.json == {"conversation_id": "1", "title": "New Title"}
+
+ @patch(
+ "backend.batch.utilities.helpers.config.config_helper.ConfigHelper.get_active_config_or_default"
+ )
+ def test_rename_conversation_no_history(
+ self, get_active_config_or_default_mock, client
+ ):
+ """Test that the rename_conversation endpoint returns an error if chat history is not enabled."""
+ # Given
+ get_active_config_or_default_mock.return_value.enable_chat_history = False
+
+ request_json = {"conversation_id": "1", "title": "New Title"}
+
+ # When
+ response = client.post("/api/history/rename", json=request_json)
+
+ # Then
+ assert response.status_code == 400
+ assert response.json == {"error": "Chat history is not available"}
+
+ @patch(
+ "backend.batch.utilities.helpers.config.config_helper.ConfigHelper.get_active_config_or_default"
+ )
+ def test_rename_conversation_missing_conversation_id(
+ self, get_active_config_or_default_mock, client
+ ):
+ """Test that the rename_conversation endpoint returns an error if conversation_id is missing."""
+ # Given
+ get_active_config_or_default_mock.return_value.enable_chat_history = True
+
+ request_json = {"title": "New Title"}
+
+ # When
+ response = client.post("/api/history/rename", json=request_json)
+
+ # Then
+ assert response.status_code == 400
+ assert response.json == {"error": "conversation_id is required"}
+
+ @patch(
+ "backend.batch.utilities.helpers.config.config_helper.ConfigHelper.get_active_config_or_default"
+ )
+ def test_rename_conversation_empty_title(
+ self, get_active_config_or_default_mock, client
+ ):
+ """Test that the rename_conversation endpoint returns an error if the title is empty."""
+ # Given
+ get_active_config_or_default_mock.return_value.enable_chat_history = True
+
+ request_json = {"conversation_id": "1", "title": ""}
+
+ # When
+ response = client.post("/api/history/rename", json=request_json)
+
+ # Then
+ assert response.status_code == 400
+ assert response.json == {"error": "A non-empty title is required"}
+
+ @patch(
+ "backend.batch.utilities.helpers.config.config_helper.ConfigHelper.get_active_config_or_default"
+ )
+ @patch(
+ "backend.batch.utilities.chat_history.database_factory.DatabaseFactory.get_conversation_client"
+ )
+ def test_rename_conversation_db_error(
+ self, mock_conversation_client, get_active_config_or_default_mock, client
+ ):
+ """Test that the rename_conversation endpoint returns an error if the database is not available."""
+ # Given
+ get_active_config_or_default_mock.return_value.enable_chat_history = True
+ mock_conversation_client.return_value.get_conversation = AsyncMock(
+ side_effect=Exception("Database error")
+ )
+
+ request_json = {"conversation_id": "1", "title": "New Title"}
+
+ # When
+ response = client.post("/api/history/rename", json=request_json)
+
+ # Then
+ assert response.status_code == 500
+ assert response.json == {"error": "Error while renaming conversation"}
+
+ @patch(
+ "backend.batch.utilities.helpers.config.config_helper.ConfigHelper.get_active_config_or_default"
+ )
+ def test_rename_conversation_not_found(
+ self, get_active_config_or_default_mock, mock_conversation_client, client
+ ):
+ """Test that the rename_conversation endpoint returns an error if the conversation is not found."""
+ # Given
+ get_active_config_or_default_mock.return_value.enable_chat_history = True
+ mock_conversation_client.get_conversation = AsyncMock(return_value=None)
+
+ request_json = {"conversation_id": "1", "title": "New Title"}
+
+ # When
+ response = client.post("/api/history/rename", json=request_json)
+
+ # Then
+ assert response.status_code == 400
+ assert response.json == {
+ "error": "Conversation 1 was not found. It either does not exist or the logged in user does not have access to it."
+ }
+
+ @patch(
+ "backend.batch.utilities.helpers.config.config_helper.ConfigHelper.get_active_config_or_default"
+ )
+ def test_get_conversation_success(
+ self, get_active_config_or_default_mock, mock_conversation_client, client
+ ):
+ """Test that the get_conversation endpoint works correctly."""
+ # Given
+ get_active_config_or_default_mock.return_value.enable_chat_history = True
+ mock_conversation_client.get_conversation = AsyncMock(
+ return_value={"conversation_id": "1", "title": "Sample Conversation"}
+ )
+ mock_conversation_client.get_messages = AsyncMock(
+ return_value=[
+ {
+ "id": "1",
+ "role": "user",
+ "content": "Hello, world!",
+ "createdAt": "2024-11-29T12:00:00Z",
+ }
+ ]
+ )
+
+ request_json = {"conversation_id": "1"}
+
+ # When
+ response = client.post("/api/history/read", json=request_json)
+
+ # Then
+ assert response.status_code == 200
+ assert response.json == {
+ "conversation_id": "1",
+ "messages": [
+ {
+ "id": "1",
+ "role": "user",
+ "content": "Hello, world!",
+ "createdAt": "2024-11-29T12:00:00Z",
+ "feedback": None,
+ }
+ ],
+ }
+
+ @patch(
+ "backend.batch.utilities.helpers.config.config_helper.ConfigHelper.get_active_config_or_default"
+ )
+ def test_get_conversation_no_history(
+ self, get_active_config_or_default_mock, client
+ ):
+ """Test that the get_conversation endpoint returns an error if chat history is not enabled."""
+ # Given
+ get_active_config_or_default_mock.return_value.enable_chat_history = False
+
+ request_json = {"conversation_id": "1"}
+
+ # When
+ response = client.post("/api/history/read", json=request_json)
+
+ # Then
+ assert response.status_code == 400
+ assert response.json == {"error": "Chat history is not available"}
+
+ @patch(
+ "backend.batch.utilities.helpers.config.config_helper.ConfigHelper.get_active_config_or_default"
+ )
+ def test_get_conversation_missing_conversation_id(
+ self, get_active_config_or_default_mock, client
+ ):
+ """Test that the get_conversation endpoint returns an error if conversation_id is missing."""
+ # Given
+ get_active_config_or_default_mock.return_value.enable_chat_history = True
+
+ request_json = {}
+
+ # When
+ response = client.post("/api/history/read", json=request_json)
+
+ # Then
+ assert response.status_code == 400
+ assert response.json == {"error": "conversation_id is required"}
+
+ @patch(
+ "backend.batch.utilities.helpers.config.config_helper.ConfigHelper.get_active_config_or_default"
+ )
+ @patch(
+ "backend.batch.utilities.chat_history.database_factory.DatabaseFactory.get_conversation_client"
+ )
+ def test_get_conversation_db_error(
+ self, mock_conversation_client, get_active_config_or_default_mock, client
+ ):
+ """Test that the get_conversation endpoint returns an error if the database is not available."""
+ # Given
+ get_active_config_or_default_mock.return_value.enable_chat_history = True
+ mock_conversation_client.return_value.get_conversation = AsyncMock(
+ side_effect=Exception("Database error")
+ )
+
+ request_json = {"conversation_id": "1"}
+
+ # When
+ response = client.post("/api/history/read", json=request_json)
+
+ # Then
+ assert response.status_code == 500
+ assert response.json == {"error": "Error while fetching conversation history"}
+
+ @patch(
+ "backend.batch.utilities.helpers.config.config_helper.ConfigHelper.get_active_config_or_default"
+ )
+ def test_get_conversation_not_found(
+ self, get_active_config_or_default_mock, mock_conversation_client, client
+ ):
+ """Test that the get_conversation endpoint returns an error if the conversation is not found."""
+ # Given
+ get_active_config_or_default_mock.return_value.enable_chat_history = True
+ mock_conversation_client.get_conversation = AsyncMock(return_value=None)
+
+ request_json = {"conversation_id": "1"}
+
+ # When
+ response = client.post("/api/history/read", json=request_json)
+
+ # Then
+ assert response.status_code == 400
+ assert response.json == {
+ "error": "Conversation 1 was not found. It either does not exist or the logged in user does not have access to it."
+ }
+
+ @patch(
+ "backend.batch.utilities.helpers.config.config_helper.ConfigHelper.get_active_config_or_default"
+ )
+ def test_delete_conversation_success(
+ self, get_active_config_or_default_mock, mock_conversation_client, client
+ ):
+ """Test that the delete_conversation endpoint works correctly."""
+
+ # Setup mocks
+ get_active_config_or_default_mock.return_value.enable_chat_history = True
+
+ # Mock the database client
+ mock_conversation_client.delete_messages = AsyncMock(return_value=None)
+ mock_conversation_client.delete_conversation = AsyncMock(return_value=None)
+
+ # Define request data
+ request_json = {"conversation_id": "conv123"}
+
+ # Make DELETE request to delete the conversation
+ response = client.delete("/api/history/delete", json=request_json)
+
+ # Assert the response status and data
+ assert response.status_code == 200
+ assert response.json == {
+ "message": "Successfully deleted conversation and messages",
+ "conversation_id": "conv123",
+ }
+
+ @patch(
+ "backend.batch.utilities.helpers.config.config_helper.ConfigHelper.get_active_config_or_default"
+ )
+ def test_delete_conversation_no_chat_history(
+ self, get_active_config_or_default_mock, client
+ ):
+ """Test when chat history is not enabled in the configuration."""
+
+ # Setup mocks
+ get_active_config_or_default_mock.return_value.enable_chat_history = False
+
+ # Define request data
+ request_json = {"conversation_id": "conv123"}
+
+ # Make DELETE request to delete the conversation
+ response = client.delete("/api/history/delete", json=request_json)
+
+ # Assert the response status and error message
+ assert response.status_code == 400
+ assert response.json == {"error": "Chat history is not available"}
+
+ @patch(
+ "backend.batch.utilities.helpers.config.config_helper.ConfigHelper.get_active_config_or_default"
+ )
+ def test_delete_conversation_missing_conversation_id(
+ self, get_active_config_or_default_mock, client
+ ):
+ """Test when the conversation_id is missing in the request."""
+
+ # Setup mocks
+ get_active_config_or_default_mock.return_value.enable_chat_history = True
+
+ # Define request data (missing conversation_id)
+ request_json = {}
+
+ # Make DELETE request to delete the conversation
+ response = client.delete("/api/history/delete", json=request_json)
+
+ # Assert the response status and error message
+ assert response.status_code == 400
+ assert response.json == {
+ "error": "Conversation None was not found. It either does not exist or the logged in user does not have access to it."
+ }
+
+ @patch(
+ "backend.batch.utilities.helpers.config.config_helper.ConfigHelper.get_active_config_or_default"
+ )
+ def test_delete_conversation_database_error(
+ self, get_active_config_or_default_mock, mock_conversation_client, client
+ ):
+ """Test when the database client connection fails."""
+
+ # Setup mocks
+ get_active_config_or_default_mock.return_value.enable_chat_history = True
+
+ # Mock a failure in the database client connection
+ mock_conversation_client.connect.side_effect = Exception(
+ "Database not available"
+ )
+
+ # Define request data
+ request_json = {"conversation_id": "conv123"}
+
+ # Make DELETE request to delete the conversation
+ response = client.delete("/api/history/delete", json=request_json)
+
+ # Assert the response status and error message
+ assert response.status_code == 500
+ assert response.json == {"error": "Error while deleting conversation history"}
+
+ @patch(
+ "backend.batch.utilities.helpers.config.config_helper.ConfigHelper.get_active_config_or_default"
+ )
+ def test_delete_conversation_internal_error(
+ self, get_active_config_or_default_mock, mock_conversation_client, client
+ ):
+ """Test when an unexpected internal error occurs during conversation deletion."""
+
+ # Setup mocks
+ get_active_config_or_default_mock.return_value.enable_chat_history = True
+
+ # Mock an unexpected error in the database client deletion
+ mock_conversation_client.delete_messages.side_effect = Exception(
+ "Unexpected error"
+ )
+
+ # Define request data
+ request_json = {"conversation_id": "conv123"}
+
+ # Make DELETE request to delete the conversation
+ response = client.delete("/api/history/delete", json=request_json)
+
+ # Assert the response status and error message
+ assert response.status_code == 500
+ assert response.json == {"error": "Error while deleting conversation history"}
+
+ @patch(
+ "backend.batch.utilities.helpers.config.config_helper.ConfigHelper.get_active_config_or_default"
+ )
+ def test_delete_all_conversations_success(
+ self, get_active_config_or_default_mock, mock_conversation_client, client
+ ):
+ get_active_config_or_default_mock.return_value.enable_chat_history = True
+ mock_conversation_client.get_conversation = AsyncMock(
+ return_value=[{"id": "conv1"}, {"id": "conv2"}]
+ )
+
+ response = client.delete("/api/history/delete_all")
+ assert response.status_code == 200
+ assert response.json == {
+ "message": "Successfully deleted all conversations and messages for user 00000000-0000-0000-0000-000000000000"
+ }
+
+ @patch(
+ "backend.batch.utilities.helpers.config.config_helper.ConfigHelper.get_active_config_or_default"
+ )
+ def test_delete_all_conversations_no_chat_history(
+ self, get_active_config_or_default_mock, client
+ ):
+ get_active_config_or_default_mock.return_value.enable_chat_history = False
+ response = client.delete("/api/history/delete_all")
+ assert response.status_code == 400
+ assert response.json == {"error": "Chat history is not available"}
+
+ @patch(
+ "backend.batch.utilities.helpers.config.config_helper.ConfigHelper.get_active_config_or_default"
+ )
+ def test_update_conversation_success(
+ self, get_active_config_or_default_mock, mock_conversation_client, client
+ ):
+ get_active_config_or_default_mock.return_value.enable_chat_history = True
+ mock_conversation_client.get_conversation.return_value = {
+ "title": "Test Title",
+ "updatedAt": "2024-12-01",
+ "id": "conv1",
+ }
+ mock_conversation_client.create_message.return_value = "success"
+ request_json = {
+ "conversation_id": "conv1",
+ "messages": [
+ {"role": "user", "content": "Hello"},
+ {"role": "assistant", "content": "Hi!"},
+ ],
+ }
+
+ # When
+ response = client.post("/api/history/update", json=request_json)
+
+ assert response.status_code == 200
+ assert response.json == {
+ "data": {
+ "conversation_id": "conv1",
+ "date": "2024-12-01",
+ "title": "Test Title",
+ },
+ "success": True,
+ }
+
+ @patch("backend.api.chat_history.AsyncAzureOpenAI")
+ @patch(
+ "backend.batch.utilities.helpers.config.config_helper.ConfigHelper.get_active_config_or_default"
+ )
+ def test_update_conversation_new_success(
+ self,
+ get_active_config_or_default_mock,
+ azure_openai_mock: MagicMock,
+ mock_conversation_client,
+ client,
+ ):
+ get_active_config_or_default_mock.return_value.enable_chat_history = True
+ mock_conversation_client.get_conversation.return_value = []
+ mock_conversation_client.create_message.return_value = "success"
+ mock_conversation_client.create_conversation.return_value = {
+ "title": "Test Title",
+ "updatedAt": "2024-12-01",
+ "id": "conv1",
+ }
+ request_json = {
+ "conversation_id": "conv1",
+ "messages": [
+ {"role": "user", "content": "Hello"},
+ {"role": "assistant", "content": "Hi!"},
+ ],
+ }
+
+ openai_client_mock = azure_openai_mock.return_value
+
+ mock_response = MagicMock()
+ mock_response.choices = [MagicMock(message=MagicMock(content="Test Title"))]
+
+ openai_client_mock.chat.completions.create = AsyncMock(
+ return_value=mock_response
+ )
+
+ response = client.post("/api/history/update", json=request_json)
+
+ assert response.status_code == 200
+ assert response.json == {
+ "data": {
+ "conversation_id": "conv1",
+ "date": "2024-12-01",
+ "title": "Test Title",
+ },
+ "success": True,
+ }
+
+ @patch(
+ "backend.batch.utilities.helpers.config.config_helper.ConfigHelper.get_active_config_or_default"
+ )
+ def test_update_conversation_no_chat_history(
+ self, get_active_config_or_default_mock, client
+ ):
+ get_active_config_or_default_mock.return_value.enable_chat_history = False
+ response = client.post(
+ "/api/history/update", json={}, headers={"Content-Type": "application/json"}
+ )
+ assert response.status_code == 400
+ assert response.json == {"error": "Chat history is not available"}
+
+ @patch(
+ "backend.batch.utilities.helpers.config.config_helper.ConfigHelper.get_active_config_or_default"
+ )
+ def test_update_conversation_connect_error(
+ self, get_active_config_or_default_mock, mock_conversation_client, client
+ ):
+ get_active_config_or_default_mock.return_value.enable_chat_history = True
+ mock_conversation_client.get_conversation.return_value = {
+ "title": "Test Title",
+ "updatedAt": "2024-12-01",
+ "id": "conv1",
+ }
+ request_json = {
+ "conversation_id": "conv1",
+ "messages": [
+ {"role": "user", "content": "Hello"},
+ {"role": "assistant", "content": "Hi!"},
+ ],
+ }
+ mock_conversation_client.connect.side_effect = Exception("Unexpected error")
+
+ # Make the API call
+ response = client.post(
+ "/api/history/update",
+ json=request_json,
+ headers={"Content-Type": "application/json"},
+ )
+
+ # Assert response
+ assert response.status_code == 500
+ assert response.json == {
+ "error": "Error while updating the conversation history"
+ }
+
+ @patch(
+ "backend.batch.utilities.helpers.config.config_helper.ConfigHelper.get_active_config_or_default"
+ )
+ def test_update_conversation_error(
+ self, get_active_config_or_default_mock, mock_conversation_client, client
+ ):
+ get_active_config_or_default_mock.return_value.enable_chat_history = True
+ mock_conversation_client.create_message.side_effect = Exception(
+ "Unexpected error"
+ )
+ mock_conversation_client.get_conversation.return_value = {
+ "title": "Test Title",
+ "updatedAt": "2024-12-01",
+ "id": "conv1",
+ }
+ request_json = {
+ "conversation_id": "conv1",
+ "messages": [
+ {"role": "user", "content": "Hello"},
+ {"role": "assistant", "content": "Hi!"},
+ ],
+ }
+
+ response = client.post(
+ "/api/history/update",
+ json=request_json,
+ headers={"Content-Type": "application/json"},
+ )
+
+ # Assert response
+ assert response.status_code == 500
+ assert response.json == {
+ "error": "Error while updating the conversation history"
+ }
+
+ @patch(
+ "backend.batch.utilities.helpers.config.config_helper.ConfigHelper.get_active_config_or_default"
+ )
+ def test_get_frontend_settings_success(
+ self, get_active_config_or_default_mock, client
+ ):
+ get_active_config_or_default_mock.return_value.enable_chat_history = True
+ response = client.get("/api/history/frontend_settings")
+ assert response.status_code == 200
+ assert response.json == {"CHAT_HISTORY_ENABLED": True}
+
+ @patch(
+ "backend.batch.utilities.helpers.config.config_helper.ConfigHelper.get_active_config_or_default"
+ )
+ def test_get_frontend_settings_error(
+ self, get_active_config_or_default_mock, client
+ ):
+ get_active_config_or_default_mock.side_effect = Exception("Test Error")
+ response = client.get("/api/history/frontend_settings")
+ assert response.status_code == 500
+ assert response.json == {"error": "Error while getting frontend settings"}
diff --git a/code/tests/utilities/helpers/test_azure_postgres_helper.py b/code/tests/utilities/helpers/test_azure_postgres_helper.py
new file mode 100644
index 000000000..7fc10fcec
--- /dev/null
+++ b/code/tests/utilities/helpers/test_azure_postgres_helper.py
@@ -0,0 +1,909 @@
+import unittest
+from unittest.mock import MagicMock, patch
+import psycopg2
+from backend.batch.utilities.helpers.azure_postgres_helper import AzurePostgresHelper
+
+
+class TestAzurePostgresHelper(unittest.TestCase):
+ @patch(
+ "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential"
+ )
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect")
+ def test_create_search_client_success(self, mock_connect, mock_credential):
+ # Arrange
+ mock_access_token = MagicMock()
+ mock_access_token.token = "mock-access-token"
+ mock_credential.return_value.get_token.return_value = mock_access_token
+
+ mock_connection = MagicMock()
+ mock_connect.return_value = mock_connection
+
+ helper = AzurePostgresHelper()
+ helper.env_helper.POSTGRESQL_USER = "mock_user"
+ helper.env_helper.POSTGRESQL_HOST = "mock_host"
+ helper.env_helper.POSTGRESQL_DATABASE = "mock_database"
+
+ # Act
+ connection = helper._create_search_client()
+
+ # Assert
+ self.assertEqual(connection, mock_connection)
+ mock_credential.return_value.get_token.assert_called_once_with(
+ "https://ossrdbms-aad.database.windows.net/.default"
+ )
+ mock_connect.assert_called_once_with(
+ "host=mock_host user=mock_user dbname=mock_database password=mock-access-token"
+ )
+
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect")
+ def test_get_search_client_reuses_connection(self, mock_connect):
+ # Arrange
+ mock_connection = MagicMock()
+ mock_connection.closed = 0 # Simulate an open connection
+ mock_connect.return_value = mock_connection
+
+ helper = AzurePostgresHelper()
+ helper.conn = mock_connection
+
+ # Act
+ connection = helper.get_search_client()
+
+ # Assert
+ self.assertEqual(connection, mock_connection)
+ mock_connect.assert_not_called() # Ensure no new connection is created
+
+ @patch(
+ "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential"
+ )
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect")
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.RealDictCursor")
+ def test_get_vector_store_success(
+ self, mock_cursor, mock_connect, mock_credential
+ ):
+ # Arrange
+ # Mock the EnvHelper and set required attributes
+ mock_env_helper = MagicMock()
+ mock_env_helper.POSTGRESQL_USER = "mock_user"
+ mock_env_helper.POSTGRESQL_HOST = "mock_host"
+ mock_env_helper.POSTGRESQL_DATABASE = "mock_database"
+ mock_env_helper.AZURE_POSTGRES_SEARCH_TOP_K = 5
+
+ # Mock access token retrieval
+ mock_access_token = MagicMock()
+ mock_access_token.token = "mock-access-token"
+ mock_credential.return_value.get_token.return_value = mock_access_token
+
+ # Mock the database connection and cursor
+ mock_connection = MagicMock()
+ mock_connect.return_value = mock_connection
+ mock_cursor_instance = MagicMock()
+ mock_cursor.return_value = mock_cursor_instance
+
+ # Mock the behavior of the context manager for the cursor
+ mock_cursor_context = MagicMock()
+ mock_connection.cursor.return_value.__enter__.return_value = mock_cursor_context
+ mock_results = [{"id": 1, "title": "Test"}]
+ mock_cursor_context.fetchall.return_value = mock_results
+
+ # Replace EnvHelper in AzurePostgresHelper with the mocked version
+ helper = AzurePostgresHelper()
+ helper.env_helper = mock_env_helper
+
+ # Embedding vector for the test
+ embedding_vector = [1, 2, 3]
+
+ # Act
+ results = helper.get_vector_store(embedding_vector)
+
+ # Assert
+ self.assertEqual(results, mock_results)
+ mock_connect.assert_called_once_with(
+ "host=mock_host user=mock_user dbname=mock_database password=mock-access-token"
+ )
+
+ @patch(
+ "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential"
+ )
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect")
+ def test_get_vector_store_query_error(self, mock_connect, mock_credential):
+ # Arrange
+ # Mock the EnvHelper and set required attributes
+ mock_env_helper = MagicMock()
+ mock_env_helper.POSTGRESQL_USER = "mock_user"
+ mock_env_helper.POSTGRESQL_HOST = "mock_host"
+ mock_env_helper.POSTGRESQL_DATABASE = "mock_database"
+ mock_env_helper.AZURE_POSTGRES_SEARCH_TOP_K = 5
+
+ # Mock access token retrieval
+ mock_access_token = MagicMock()
+ mock_access_token.token = "mock-access-token"
+ mock_credential.return_value.get_token.return_value = mock_access_token
+
+ mock_connection = MagicMock()
+ mock_connect.return_value = mock_connection
+
+ def raise_exception(*args, **kwargs):
+ raise Exception("Query execution error")
+
+ mock_cursor_instance = MagicMock()
+ mock_cursor_instance.execute.side_effect = raise_exception
+
+ mock_connection.cursor.return_value.__enter__.return_value = (
+ mock_cursor_instance
+ )
+
+ helper = AzurePostgresHelper()
+ helper.env_helper = mock_env_helper
+ embedding_vector = [1, 2, 3]
+
+ # Act & Assert
+ with self.assertRaises(Exception) as context:
+ helper.get_vector_store(embedding_vector)
+
+ self.assertEqual(str(context.exception), "Query execution error")
+
+ @patch(
+ "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential"
+ )
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect")
+ def test_create_search_client_connection_error(self, mock_connect, mock_credential):
+ # Arrange
+ # Mock the EnvHelper and set required attributes
+ mock_env_helper = MagicMock()
+ mock_env_helper.POSTGRESQL_USER = "mock_user"
+ mock_env_helper.POSTGRESQL_HOST = "mock_host"
+ mock_env_helper.POSTGRESQL_DATABASE = "mock_database"
+ mock_env_helper.AZURE_POSTGRES_SEARCH_TOP_K = 5
+
+ # Mock access token retrieval
+ mock_access_token = MagicMock()
+ mock_access_token.token = "mock-access-token"
+ mock_credential.return_value.get_token.return_value = mock_access_token
+
+ def raise_exception(*args, **kwargs):
+ raise Exception("Connection error")
+
+ mock_connect.side_effect = raise_exception
+
+ helper = AzurePostgresHelper()
+ helper.env_helper = mock_env_helper
+
+ # Act & Assert
+ with self.assertRaises(Exception) as context:
+ helper._create_search_client()
+
+ self.assertEqual(str(context.exception), "Connection error")
+
+ @patch(
+ "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential"
+ )
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect")
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.EnvHelper")
+ def test_get_files_success(self, mock_env_helper, mock_connect, mock_credential):
+ # Mock the EnvHelper attributes
+ mock_env_helper.POSTGRESQL_USER = "mock_user"
+ mock_env_helper.POSTGRESQL_HOST = "mock_host"
+ mock_env_helper.POSTGRESQL_DATABASE = "mock_database"
+ mock_env_helper.AZURE_POSTGRES_SEARCH_TOP_K = 5
+
+ # Mock access token retrieval
+ mock_access_token = MagicMock()
+ mock_access_token.token = "mock-access-token"
+ mock_credential.return_value.get_token.return_value = mock_access_token
+
+ # Arrange: Mock the connection and cursor
+ mock_connection = MagicMock()
+ mock_cursor = MagicMock()
+ mock_connection.cursor.return_value.__enter__.return_value = mock_cursor
+ mock_connect.return_value = mock_connection
+
+ # Mock the result of the cursor's fetchall() method
+ mock_cursor.fetchall.return_value = [
+ {"id": 1, "title": "Title 1"},
+ {"id": 2, "title": "Title 2"},
+ ]
+
+ # Create an instance of the helper
+ helper = AzurePostgresHelper()
+
+ # Act: Call the method under test
+ result = helper.get_files()
+
+ # Assert: Check that the correct results are returned
+ self.assertEqual(
+ result, [{"id": 1, "title": "Title 1"}, {"id": 2, "title": "Title 2"}]
+ )
+ mock_connection.close.assert_called_once()
+
+ @patch(
+ "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential"
+ )
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect")
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.EnvHelper")
+ def test_get_files_no_results(self, mock_env_helper, mock_connect, mock_credential):
+ # Mock the EnvHelper attributes
+ mock_env_helper.POSTGRESQL_USER = "mock_user"
+ mock_env_helper.POSTGRESQL_HOST = "mock_host"
+ mock_env_helper.POSTGRESQL_DATABASE = "mock_database"
+ mock_env_helper.AZURE_POSTGRES_SEARCH_TOP_K = 5
+
+ # Mock access token retrieval
+ mock_access_token = MagicMock()
+ mock_access_token.token = "mock-access-token"
+ mock_credential.return_value.get_token.return_value = mock_access_token
+
+ # Arrange: Mock the connection and cursor
+ mock_connection = MagicMock()
+ mock_cursor = MagicMock()
+ mock_connection.cursor.return_value.__enter__.return_value = mock_cursor
+ mock_connect.return_value = mock_connection
+
+ # Mock the result of the cursor's fetchall() method to return an empty list
+ mock_cursor.fetchall.return_value = []
+
+ # Create an instance of the helper
+ helper = AzurePostgresHelper()
+
+ # Act: Call the method under test
+ result = helper.get_files()
+
+ # Assert: Check that the result is None
+ self.assertIsNone(result)
+ mock_connection.close.assert_called_once()
+
+ @patch(
+ "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential"
+ )
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect")
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.EnvHelper")
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.logger")
+ def test_get_files_db_error(
+ self, mock_logger, mock_env_helper, mock_connect, mock_credential
+ ):
+ # Mock the EnvHelper attributes
+ mock_env_helper.POSTGRESQL_USER = "mock_user"
+ mock_env_helper.POSTGRESQL_HOST = "mock_host"
+ mock_env_helper.POSTGRESQL_DATABASE = "mock_database"
+ mock_env_helper.AZURE_POSTGRES_SEARCH_TOP_K = 5
+
+ # Mock access token retrieval
+ mock_access_token = MagicMock()
+ mock_access_token.token = "mock-access-token"
+ mock_credential.return_value.get_token.return_value = mock_access_token
+
+ # Arrange: Mock the connection and cursor
+ mock_connection = MagicMock()
+ mock_cursor = MagicMock()
+ mock_connection.cursor.return_value.__enter__.return_value = mock_cursor
+ mock_connect.return_value = mock_connection
+
+ # Simulate a database error when executing the query
+ mock_cursor.fetchall.side_effect = psycopg2.Error("Database error")
+
+ # Create an instance of the helper
+ helper = AzurePostgresHelper()
+
+ # Act & Assert: Ensure that the exception is raised and the error is logged
+ with self.assertRaises(psycopg2.Error):
+ helper.get_files()
+
+ mock_logger.error.assert_called_with(
+ "Database error while fetching titles: Database error"
+ )
+ mock_connection.close.assert_called_once()
+
+ @patch(
+ "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential"
+ )
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect")
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.EnvHelper")
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.logger")
+ def test_get_files_unexpected_error(
+ self, mock_logger, mock_env_helper, mock_connect, mock_credential
+ ):
+ # Mock the EnvHelper attributes
+ mock_env_helper.POSTGRESQL_USER = "mock_user"
+ mock_env_helper.POSTGRESQL_HOST = "mock_host"
+ mock_env_helper.POSTGRESQL_DATABASE = "mock_database"
+ mock_env_helper.AZURE_POSTGRES_SEARCH_TOP_K = 5
+
+ # Mock access token retrieval
+ mock_access_token = MagicMock()
+ mock_access_token.token = "mock-access-token"
+ mock_credential.return_value.get_token.return_value = mock_access_token
+
+ # Arrange: Mock the connection and cursor
+ mock_connection = MagicMock()
+ mock_cursor = MagicMock()
+ mock_connection.cursor.return_value.__enter__.return_value = mock_cursor
+ mock_connect.return_value = mock_connection
+
+ # Simulate an unexpected error
+ mock_cursor.fetchall.side_effect = Exception("Unexpected error")
+
+ # Create an instance of the helper
+ helper = AzurePostgresHelper()
+
+ # Act & Assert: Ensure that the exception is raised and the error is logged
+ with self.assertRaises(Exception):
+ helper.get_files()
+
+ mock_logger.error.assert_called_with(
+ "Unexpected error while fetching titles: Unexpected error"
+ )
+ mock_connection.close.assert_called_once()
+
+ @patch(
+ "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential"
+ )
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect")
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.logger")
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.EnvHelper")
+ def test_delete_documents_success(
+ self, mock_env_helper, mock_logger, mock_connect, mock_credential
+ ):
+ # Arrange: Mock the EnvHelper attributes
+ mock_env_helper.POSTGRESQL_USER = "mock_user"
+ mock_env_helper.POSTGRESQL_HOST = "mock_host"
+ mock_env_helper.POSTGRESQL_DATABASE = "mock_database"
+ mock_env_helper.AZURE_POSTGRES_SEARCH_TOP_K = 5
+
+ # Mock access token retrieval
+ mock_access_token = MagicMock()
+ mock_access_token.token = "mock-access-token"
+ mock_credential.return_value.get_token.return_value = mock_access_token
+
+ # Mock the connection and cursor
+ mock_connection = MagicMock()
+ mock_cursor = MagicMock()
+ mock_connection.cursor.return_value.__enter__.return_value = mock_cursor
+ mock_connect.return_value = mock_connection
+
+ # Mock the behavior of cursor.rowcount and execute
+ mock_cursor.rowcount = 3 # Simulate 3 rows deleted
+ mock_cursor.execute.return_value = None
+
+ ids_to_delete = [{"id": 1}, {"id": 2}, {"id": 3}]
+
+ # Create an instance of the helper
+ helper = AzurePostgresHelper()
+
+ # Act: Call the method under test
+ result = helper.delete_documents(ids_to_delete)
+
+ # Assert: Check that the correct number of rows were deleted
+ self.assertEqual(result, 3)
+ mock_connection.commit.assert_called_once()
+ mock_connection.close.assert_called_once()
+ mock_logger.info.assert_called_with("Deleted 3 documents.")
+
+ @patch(
+ "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential"
+ )
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect")
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.logger")
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.EnvHelper")
+ def test_delete_documents_no_ids(
+ self, mock_env_helper, mock_logger, mock_connect, mock_credential
+ ):
+ # Arrange: Mock the EnvHelper attributes
+ mock_env_helper.POSTGRESQL_USER = "mock_user"
+ mock_env_helper.POSTGRESQL_HOST = "mock_host"
+ mock_env_helper.POSTGRESQL_DATABASE = "mock_database"
+ mock_env_helper.AZURE_POSTGRES_SEARCH_TOP_K = 5
+
+ # Mock access token retrieval
+ mock_access_token = MagicMock()
+ mock_access_token.token = "mock-access-token"
+ mock_credential.return_value.get_token.return_value = mock_access_token
+
+ # Mock the connection and cursor
+ mock_connection = MagicMock()
+ mock_cursor = MagicMock()
+ mock_connection.cursor.return_value.__enter__.return_value = mock_cursor
+ mock_connect.return_value = mock_connection
+
+ # No IDs to delete
+ ids_to_delete = []
+
+ # Create an instance of the helper
+ helper = AzurePostgresHelper()
+
+ # Act: Call the method under test
+ result = helper.delete_documents(ids_to_delete)
+
+ # Assert: Check that no rows were deleted and a warning was logged
+ self.assertEqual(result, 0)
+ mock_logger.warning.assert_called_with("No IDs provided for deletion.")
+ mock_connection.close.assert_called_once()
+
+ @patch(
+ "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential"
+ )
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect")
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.logger")
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.EnvHelper")
+ def test_delete_documents_db_error(
+ self, mock_env_helper, mock_logger, mock_connect, mock_credential
+ ):
+ # Arrange: Mock the EnvHelper attributes
+ mock_env_helper.POSTGRESQL_USER = "mock_user"
+ mock_env_helper.POSTGRESQL_HOST = "mock_host"
+ mock_env_helper.POSTGRESQL_DATABASE = "mock_database"
+ mock_env_helper.AZURE_POSTGRES_SEARCH_TOP_K = 5
+
+ # Mock access token retrieval
+ mock_access_token = MagicMock()
+ mock_access_token.token = "mock-access-token"
+ mock_credential.return_value.get_token.return_value = mock_access_token
+
+ # Mock the connection and cursor
+ mock_connection = MagicMock()
+ mock_cursor = MagicMock()
+ mock_connection.cursor.return_value.__enter__.return_value = mock_cursor
+ mock_connect.return_value = mock_connection
+
+ # Simulate a database error during execution
+ mock_cursor.execute.side_effect = psycopg2.Error("Database error")
+
+ ids_to_delete = [{"id": 1}, {"id": 2}]
+
+ # Create an instance of the helper
+ helper = AzurePostgresHelper()
+
+ # Act & Assert: Ensure that the exception is raised and the error is logged
+ with self.assertRaises(psycopg2.Error):
+ helper.delete_documents(ids_to_delete)
+
+ mock_logger.error.assert_called_with(
+ "Database error while deleting documents: Database error"
+ )
+ mock_connection.rollback.assert_called_once()
+ mock_connection.close.assert_called_once()
+
+ @patch(
+ "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential"
+ )
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect")
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.logger")
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.EnvHelper")
+ def test_delete_documents_unexpected_error(
+ self, mock_env_helper, mock_logger, mock_connect, mock_credential
+ ):
+ # Arrange: Mock the EnvHelper attributes
+ mock_env_helper.POSTGRESQL_USER = "mock_user"
+ mock_env_helper.POSTGRESQL_HOST = "mock_host"
+ mock_env_helper.POSTGRESQL_DATABASE = "mock_database"
+ mock_env_helper.AZURE_POSTGRES_SEARCH_TOP_K = 5
+
+ # Mock access token retrieval
+ mock_access_token = MagicMock()
+ mock_access_token.token = "mock-access-token"
+ mock_credential.return_value.get_token.return_value = mock_access_token
+
+ # Mock the connection and cursor
+ mock_connection = MagicMock()
+ mock_cursor = MagicMock()
+ mock_connection.cursor.return_value.__enter__.return_value = mock_cursor
+ mock_connect.return_value = mock_connection
+
+ # Simulate an unexpected error
+ mock_cursor.execute.side_effect = Exception("Unexpected error")
+
+ ids_to_delete = [{"id": 1}, {"id": 2}]
+
+ # Create an instance of the helper
+ helper = AzurePostgresHelper()
+
+ # Act & Assert: Ensure that the exception is raised and the error is logged
+ with self.assertRaises(Exception):
+ helper.delete_documents(ids_to_delete)
+
+ mock_logger.error.assert_called_with(
+ "Unexpected error while deleting documents: Unexpected error"
+ )
+ mock_connection.rollback.assert_called_once()
+ mock_connection.close.assert_called_once()
+
+ @patch(
+ "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential"
+ )
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect")
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.logger")
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.EnvHelper")
+ def test_perform_search_success(
+ self, mock_env_helper, mock_logger, mock_connect, mock_credential
+ ):
+ # Arrange: Mock the EnvHelper attributes
+ mock_env_helper.POSTGRESQL_USER = "mock_user"
+ mock_env_helper.POSTGRESQL_HOST = "mock_host"
+ mock_env_helper.POSTGRESQL_DATABASE = "mock_database"
+ mock_env_helper.AZURE_POSTGRES_SEARCH_TOP_K = 5
+
+ # Mock access token retrieval
+ mock_access_token = MagicMock()
+ mock_access_token.token = "mock-access-token"
+ mock_credential.return_value.get_token.return_value = mock_access_token
+
+ # Mock the connection and cursor
+ mock_connection = MagicMock()
+ mock_cursor = MagicMock()
+ mock_connection.cursor.return_value.__enter__.return_value = mock_cursor
+ mock_connect.return_value = mock_connection
+
+ # Mock the behavior of cursor's execute and fetchall
+ mock_cursor.fetchall.return_value = [
+ {
+ "title": "Test Title",
+ "content": "Test Content",
+ "metadata": "Test Metadata",
+ }
+ ]
+
+ title_to_search = "Test Title"
+
+ # Create an instance of the helper
+ helper = AzurePostgresHelper()
+
+ # Act: Call the method under test
+ result = helper.perform_search(title_to_search)
+
+ # Assert: Check that the results match the expected data
+ self.assertEqual(len(result), 1) # One result returned
+ self.assertEqual(result[0]["title"], "Test Title")
+ self.assertEqual(result[0]["content"], "Test Content")
+ self.assertEqual(result[0]["metadata"], "Test Metadata")
+
+ # Ensure the connection was closed
+ mock_connection.close.assert_called_once()
+ mock_logger.info.assert_called_with("Retrieved 1 search result(s).")
+
+ @patch(
+ "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential"
+ )
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect")
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.logger")
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.EnvHelper")
+ def test_perform_search_no_results(
+ self, mock_env_helper, mock_logger, mock_connect, mock_credential
+ ):
+ # Arrange: Mock the EnvHelper attributes
+ mock_env_helper.POSTGRESQL_USER = "mock_user"
+ mock_env_helper.POSTGRESQL_HOST = "mock_host"
+ mock_env_helper.POSTGRESQL_DATABASE = "mock_database"
+ mock_env_helper.AZURE_POSTGRES_SEARCH_TOP_K = 5
+
+ # Mock access token retrieval
+ mock_access_token = MagicMock()
+ mock_access_token.token = "mock-access-token"
+ mock_credential.return_value.get_token.return_value = mock_access_token
+
+ # Mock the connection and cursor
+ mock_connection = MagicMock()
+ mock_cursor = MagicMock()
+ mock_connection.cursor.return_value.__enter__.return_value = mock_cursor
+ mock_connect.return_value = mock_connection
+
+ # Mock the behavior of cursor's execute and fetchall to return no results
+ mock_cursor.fetchall.return_value = []
+
+ title_to_search = "Nonexistent Title"
+
+ # Create an instance of the helper
+ helper = AzurePostgresHelper()
+
+ # Act: Call the method under test
+ result = helper.perform_search(title_to_search)
+
+ # Assert: Check that no results were returned
+ self.assertEqual(result, []) # Empty list returned for no results
+
+ # Ensure the connection was closed
+ mock_connection.close.assert_called_once()
+ mock_logger.info.assert_called_with("Retrieved 0 search result(s).")
+
+ @patch(
+ "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential"
+ )
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect")
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.logger")
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.EnvHelper")
+ def test_perform_search_error(
+ self, mock_env_helper, mock_logger, mock_connect, mock_credential
+ ):
+ # Arrange: Mock the EnvHelper attributes
+ mock_env_helper.POSTGRESQL_USER = "mock_user"
+ mock_env_helper.POSTGRESQL_HOST = "mock_host"
+ mock_env_helper.POSTGRESQL_DATABASE = "mock_database"
+ mock_env_helper.AZURE_POSTGRES_SEARCH_TOP_K = 5
+
+ # Mock access token retrieval
+ mock_access_token = MagicMock()
+ mock_access_token.token = "mock-access-token"
+ mock_credential.return_value.get_token.return_value = mock_access_token
+
+ # Mock the connection and cursor
+ mock_connection = MagicMock()
+ mock_cursor = MagicMock()
+ mock_connection.cursor.return_value.__enter__.return_value = mock_cursor
+ mock_connect.return_value = mock_connection
+
+ # Simulate an error during the execution of the query
+ mock_cursor.execute.side_effect = Exception("Database error")
+
+ title_to_search = "Test Title"
+
+ # Create an instance of the helper
+ helper = AzurePostgresHelper()
+
+ # Act & Assert: Ensure that the exception is raised and the error is logged
+ with self.assertRaises(Exception):
+ helper.perform_search(title_to_search)
+
+ mock_logger.error.assert_called_with(
+ "Error executing search query: Database error"
+ )
+ mock_connection.close.assert_called_once()
+
+ @patch(
+ "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential"
+ )
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect")
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.logger")
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.EnvHelper")
+ def test_get_unique_files_success(
+ self, mock_env_helper, mock_logger, mock_connect, mock_credential
+ ):
+ # Arrange: Mock the EnvHelper attributes
+ mock_env_helper.POSTGRESQL_USER = "mock_user"
+ mock_env_helper.POSTGRESQL_HOST = "mock_host"
+ mock_env_helper.POSTGRESQL_DATABASE = "mock_database"
+ mock_env_helper.AZURE_POSTGRES_SEARCH_TOP_K = 5
+
+ # Mock access token retrieval
+ mock_access_token = MagicMock()
+ mock_access_token.token = "mock-access-token"
+ mock_credential.return_value.get_token.return_value = mock_access_token
+
+ # Mock the connection and cursor
+ mock_connection = MagicMock()
+ mock_cursor = MagicMock()
+ mock_connection.cursor.return_value.__enter__.return_value = mock_cursor
+ mock_connect.return_value = mock_connection
+
+ # Mock the behavior of cursor's execute and fetchall
+ mock_cursor.fetchall.return_value = [
+ {"title": "Unique Title 1"},
+ {"title": "Unique Title 2"},
+ ]
+
+ # Create an instance of the helper
+ helper = AzurePostgresHelper()
+
+ # Act: Call the method under test
+ result = helper.get_unique_files()
+
+ # Assert: Check that the results match the expected data
+ self.assertEqual(len(result), 2) # Two unique titles returned
+ self.assertEqual(result[0]["title"], "Unique Title 1")
+ self.assertEqual(result[1]["title"], "Unique Title 2")
+
+ # Ensure the connection was closed
+ mock_connection.close.assert_called_once()
+ mock_logger.info.assert_called_with("Retrieved 2 unique title(s).")
+
+ @patch(
+ "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential"
+ )
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect")
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.logger")
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.EnvHelper")
+ def test_get_unique_files_no_results(
+ self, mock_env_helper, mock_logger, mock_connect, mock_credential
+ ):
+ # Arrange: Mock the EnvHelper attributes
+ mock_env_helper.POSTGRESQL_USER = "mock_user"
+ mock_env_helper.POSTGRESQL_HOST = "mock_host"
+ mock_env_helper.POSTGRESQL_DATABASE = "mock_database"
+ mock_env_helper.AZURE_POSTGRES_SEARCH_TOP_K = 5
+
+ # Mock access token retrieval
+ mock_access_token = MagicMock()
+ mock_access_token.token = "mock-access-token"
+ mock_credential.return_value.get_token.return_value = mock_access_token
+
+ # Mock the connection and cursor
+ mock_connection = MagicMock()
+ mock_cursor = MagicMock()
+ mock_connection.cursor.return_value.__enter__.return_value = mock_cursor
+ mock_connect.return_value = mock_connection
+
+ # Mock the behavior of cursor's execute and fetchall to return no results
+ mock_cursor.fetchall.return_value = []
+
+ # Create an instance of the helper
+ helper = AzurePostgresHelper()
+
+ # Act: Call the method under test
+ result = helper.get_unique_files()
+
+ # Assert: Check that no results were returned
+ self.assertEqual(result, []) # Empty list returned for no results
+
+ # Ensure the connection was closed
+ mock_connection.close.assert_called_once()
+ mock_logger.info.assert_called_with("Retrieved 0 unique title(s).")
+
+ @patch(
+ "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential"
+ )
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect")
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.logger")
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.EnvHelper")
+ def test_get_unique_files_error(
+ self, mock_env_helper, mock_logger, mock_connect, mock_credential
+ ):
+ # Arrange: Mock the EnvHelper attributes
+ mock_env_helper.POSTGRESQL_USER = "mock_user"
+ mock_env_helper.POSTGRESQL_HOST = "mock_host"
+ mock_env_helper.POSTGRESQL_DATABASE = "mock_database"
+ mock_env_helper.AZURE_POSTGRES_SEARCH_TOP_K = 5
+
+ # Mock access token retrieval
+ mock_access_token = MagicMock()
+ mock_access_token.token = "mock-access-token"
+ mock_credential.return_value.get_token.return_value = mock_access_token
+
+ # Mock the connection and cursor
+ mock_connection = MagicMock()
+ mock_cursor = MagicMock()
+ mock_connection.cursor.return_value.__enter__.return_value = mock_cursor
+ mock_connect.return_value = mock_connection
+
+ # Simulate an error during the execution of the query
+ mock_cursor.execute.side_effect = Exception("Database error")
+
+ # Create an instance of the helper
+ helper = AzurePostgresHelper()
+
+ # Act & Assert: Ensure that the exception is raised and the error is logged
+ with self.assertRaises(Exception):
+ helper.get_unique_files()
+
+ mock_logger.error.assert_called_with(
+ "Error executing search query: Database error"
+ )
+ mock_connection.close.assert_called_once()
+
+ @patch(
+ "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential"
+ )
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect")
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.logger")
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.EnvHelper")
+ def test_search_by_blob_url_success(
+ self, mock_env_helper, mock_logger, mock_connect, mock_credential
+ ):
+ # Arrange: Mock the EnvHelper attributes
+ mock_env_helper.POSTGRESQL_USER = "mock_user"
+ mock_env_helper.POSTGRESQL_HOST = "mock_host"
+ mock_env_helper.POSTGRESQL_DATABASE = "mock_database"
+ mock_env_helper.AZURE_POSTGRES_SEARCH_TOP_K = 5
+
+ # Mock access token retrieval
+ mock_access_token = MagicMock()
+ mock_access_token.token = "mock-access-token"
+ mock_credential.return_value.get_token.return_value = mock_access_token
+
+ # Mock the connection and cursor
+ mock_connection = MagicMock()
+ mock_cursor = MagicMock()
+ mock_connection.cursor.return_value.__enter__.return_value = mock_cursor
+ mock_connect.return_value = mock_connection
+
+ # Mock the behavior of cursor's execute and fetchall
+ mock_cursor.fetchall.return_value = [
+ {"id": 1, "title": "Title 1"},
+ {"id": 2, "title": "Title 2"},
+ ]
+
+ # Create an instance of the helper
+ helper = AzurePostgresHelper()
+ blob_url = "mock_blob_url"
+
+ # Act: Call the method under test
+ result = helper.search_by_blob_url(blob_url)
+
+ # Assert: Check that the results match the expected data
+ self.assertEqual(len(result), 2) # Two titles returned
+ self.assertEqual(result[0]["title"], "Title 1")
+ self.assertEqual(result[1]["title"], "Title 2")
+
+ # Ensure the connection was closed
+ mock_connection.close.assert_called_once()
+ mock_logger.info.assert_called_with("Retrieved 2 unique title(s).")
+
+ @patch(
+ "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential"
+ )
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect")
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.logger")
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.EnvHelper")
+ def test_search_by_blob_url_no_results(
+ self, mock_env_helper, mock_logger, mock_connect, mock_credential
+ ):
+ # Arrange: Mock the EnvHelper attributes
+ mock_env_helper.POSTGRESQL_USER = "mock_user"
+ mock_env_helper.POSTGRESQL_HOST = "mock_host"
+ mock_env_helper.POSTGRESQL_DATABASE = "mock_database"
+ mock_env_helper.AZURE_POSTGRES_SEARCH_TOP_K = 5
+
+ # Mock access token retrieval
+ mock_access_token = MagicMock()
+ mock_access_token.token = "mock-access-token"
+ mock_credential.return_value.get_token.return_value = mock_access_token
+
+ # Mock the connection and cursor
+ mock_connection = MagicMock()
+ mock_cursor = MagicMock()
+ mock_connection.cursor.return_value.__enter__.return_value = mock_cursor
+ mock_connect.return_value = mock_connection
+
+ # Mock the behavior of cursor's execute and fetchall to return no results
+ mock_cursor.fetchall.return_value = []
+
+ # Create an instance of the helper
+ helper = AzurePostgresHelper()
+ blob_url = "mock_blob_url"
+
+ # Act: Call the method under test
+ result = helper.search_by_blob_url(blob_url)
+
+ # Assert: Check that no results were returned
+ self.assertEqual(result, []) # Empty list returned for no results
+
+ # Ensure the connection was closed
+ mock_connection.close.assert_called_once()
+ mock_logger.info.assert_called_with("Retrieved 0 unique title(s).")
+
+ @patch(
+ "backend.batch.utilities.helpers.azure_postgres_helper.DefaultAzureCredential"
+ )
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.psycopg2.connect")
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.logger")
+ @patch("backend.batch.utilities.helpers.azure_postgres_helper.EnvHelper")
+ def test_search_by_blob_url_error(
+ self, mock_env_helper, mock_logger, mock_connect, mock_credential
+ ):
+ # Arrange: Mock the EnvHelper attributes
+ mock_env_helper.POSTGRESQL_USER = "mock_user"
+ mock_env_helper.POSTGRESQL_HOST = "mock_host"
+ mock_env_helper.POSTGRESQL_DATABASE = "mock_database"
+ mock_env_helper.AZURE_POSTGRES_SEARCH_TOP_K = 5
+
+ # Mock access token retrieval
+ mock_access_token = MagicMock()
+ mock_access_token.token = "mock-access-token"
+ mock_credential.return_value.get_token.return_value = mock_access_token
+
+ # Mock the connection and cursor
+ mock_connection = MagicMock()
+ mock_cursor = MagicMock()
+ mock_connection.cursor.return_value.__enter__.return_value = mock_cursor
+ mock_connect.return_value = mock_connection
+
+ # Simulate an error during the execution of the query
+ mock_cursor.execute.side_effect = Exception("Database error")
+
+ # Create an instance of the helper
+ helper = AzurePostgresHelper()
+ blob_url = "mock_blob_url"
+
+ # Act & Assert: Ensure that the exception is raised and the error is logged
+ with self.assertRaises(Exception):
+ helper.search_by_blob_url(blob_url)
+
+ mock_logger.error.assert_called_with(
+ "Error executing search query: Database error"
+ )
+ mock_connection.close.assert_called_once()
diff --git a/code/tests/utilities/helpers/test_database_factory.py b/code/tests/utilities/helpers/test_database_factory.py
new file mode 100644
index 000000000..0a1734171
--- /dev/null
+++ b/code/tests/utilities/helpers/test_database_factory.py
@@ -0,0 +1,89 @@
+import pytest
+from unittest.mock import patch, MagicMock
+from backend.batch.utilities.helpers.config.database_type import DatabaseType
+from backend.batch.utilities.chat_history.cosmosdb import CosmosConversationClient
+from backend.batch.utilities.chat_history.database_factory import DatabaseFactory
+from backend.batch.utilities.chat_history.postgresdbservice import (
+ PostgresConversationClient,
+)
+
+
+@patch("backend.batch.utilities.chat_history.database_factory.DefaultAzureCredential")
+@patch("backend.batch.utilities.chat_history.database_factory.EnvHelper")
+@patch(
+ "backend.batch.utilities.chat_history.database_factory.CosmosConversationClient",
+ autospec=True,
+)
+def test_get_conversation_client_cosmos(
+ mock_cosmos_client, mock_env_helper, mock_credential
+):
+ # Configure the EnvHelper mock
+ mock_env_instance = mock_env_helper.return_value
+ mock_env_instance.DATABASE_TYPE = DatabaseType.COSMOSDB.value
+ mock_env_instance.AZURE_COSMOSDB_ACCOUNT = "cosmos_account"
+ mock_env_instance.AZURE_COSMOSDB_DATABASE = "cosmos_database"
+ mock_env_instance.AZURE_COSMOSDB_CONVERSATIONS_CONTAINER = "conversations_container"
+ mock_env_instance.AZURE_COSMOSDB_ENABLE_FEEDBACK = False
+ mock_env_instance.AZURE_COSMOSDB_ACCOUNT_KEY = None
+
+ mock_access_token = MagicMock()
+ mock_access_token.token = "mock-access-token"
+ mock_credential.return_value.get_token.return_value = mock_access_token
+ mock_credential_instance = mock_credential.return_value
+
+ # Mock the CosmosConversationClient instance
+ mock_cosmos_instance = MagicMock(spec=CosmosConversationClient)
+ mock_cosmos_client.return_value = mock_cosmos_instance
+
+ # Call the method
+ client = DatabaseFactory.get_conversation_client()
+
+ # Assert the CosmosConversationClient was called with correct arguments
+ mock_cosmos_client.assert_called_once_with(
+ cosmosdb_endpoint="https://cosmos_account.documents.azure.com:443/",
+ credential=mock_credential_instance,
+ database_name="cosmos_database",
+ container_name="conversations_container",
+ enable_message_feedback=False,
+ )
+ assert isinstance(client, CosmosConversationClient)
+ assert client == mock_cosmos_instance
+
+
+@patch("backend.batch.utilities.chat_history.database_factory.DefaultAzureCredential")
+@patch("backend.batch.utilities.chat_history.database_factory.EnvHelper")
+@patch(
+ "backend.batch.utilities.chat_history.database_factory.PostgresConversationClient",
+ autospec=True,
+)
+def test_get_conversation_client_postgres(
+ mock_postgres_client, mock_env_helper, mock_credential
+):
+ mock_env_instance = mock_env_helper.return_value
+ mock_env_instance.DATABASE_TYPE = DatabaseType.POSTGRESQL.value
+ mock_env_instance.POSTGRESQL_USER = "postgres_user"
+ mock_env_instance.POSTGRESQL_HOST = "postgres_host"
+ mock_env_instance.POSTGRESQL_DATABASE = "postgres_database"
+
+ mock_access_token = MagicMock()
+ mock_access_token.token = "mock-access-token"
+ mock_credential.return_value.get_token.return_value = mock_access_token
+
+ mock_postgres_instance = MagicMock(spec=PostgresConversationClient)
+ mock_postgres_client.return_value = mock_postgres_instance
+
+ client = DatabaseFactory.get_conversation_client()
+
+ mock_postgres_client.assert_called_once_with(
+ user="postgres_user", host="postgres_host", database="postgres_database"
+ )
+ assert isinstance(client, PostgresConversationClient)
+
+
+@patch("backend.batch.utilities.chat_history.database_factory.EnvHelper")
+def test_get_conversation_client_invalid_database_type(mock_env_helper):
+ mock_env_instance = mock_env_helper.return_value
+ mock_env_instance.DATABASE_TYPE = "INVALID_DB"
+
+ with pytest.raises(ValueError, match="Unsupported DATABASE_TYPE"):
+ DatabaseFactory.get_conversation_client()
diff --git a/code/tests/utilities/helpers/test_env_helper.py b/code/tests/utilities/helpers/test_env_helper.py
index 10e1de308..8acd1e497 100644
--- a/code/tests/utilities/helpers/test_env_helper.py
+++ b/code/tests/utilities/helpers/test_env_helper.py
@@ -133,6 +133,7 @@ def test_azure_speech_recognizer_languages_default(monkeypatch: MonkeyPatch):
)
def test_use_advanced_image_processing(monkeypatch: MonkeyPatch, value, expected):
# given
+ monkeypatch.setenv("DATABASE_TYPE", "CosmosDB")
if value is not None:
monkeypatch.setenv("USE_ADVANCED_IMAGE_PROCESSING", value)
diff --git a/code/tests/utilities/helpers/test_postgress_embedder.py b/code/tests/utilities/helpers/test_postgress_embedder.py
new file mode 100644
index 000000000..8ed07f472
--- /dev/null
+++ b/code/tests/utilities/helpers/test_postgress_embedder.py
@@ -0,0 +1,211 @@
+from unittest.mock import MagicMock, patch, call
+
+import pytest
+from backend.batch.utilities.helpers.embedders.postgres_embedder import PostgresEmbedder
+from backend.batch.utilities.common.source_document import SourceDocument
+from backend.batch.utilities.helpers.config.embedding_config import EmbeddingConfig
+from backend.batch.utilities.document_loading.strategies import LoadingStrategy
+from backend.batch.utilities.document_loading import LoadingSettings
+from backend.batch.utilities.document_chunking.chunking_strategy import ChunkingSettings
+
+CHUNKING_SETTINGS = ChunkingSettings({"strategy": "layout", "size": 1, "overlap": 0})
+LOADING_SETTINGS = LoadingSettings({"strategy": LoadingStrategy.LAYOUT})
+
+
+@pytest.fixture(autouse=True)
+def llm_helper_mock():
+ with patch(
+ "backend.batch.utilities.helpers.embedders.postgres_embedder.LLMHelper"
+ ) as mock:
+ llm_helper = mock.return_value
+ llm_helper.get_embedding_model.return_value.embed_query.return_value = [
+ 0
+ ] * 1536
+ mock_completion = llm_helper.get_chat_completion.return_value
+ choice = MagicMock()
+ choice.message.content = "This is a caption for an image"
+ mock_completion.choices = [choice]
+ llm_helper.generate_embeddings.return_value = [123]
+ yield llm_helper
+
+
+@pytest.fixture(autouse=True)
+def env_helper_mock():
+ with patch(
+ "backend.batch.utilities.helpers.embedders.push_embedder.EnvHelper"
+ ) as mock:
+ env_helper = mock.return_value
+ yield env_helper
+
+
+@pytest.fixture(autouse=True)
+def azure_postgres_helper_mock():
+ with patch(
+ "backend.batch.utilities.helpers.embedders.postgres_embedder.AzurePostgresHelper"
+ ) as mock:
+ yield mock
+
+
+@pytest.fixture(autouse=True)
+def mock_config_helper():
+ with patch(
+ "backend.batch.utilities.helpers.embedders.postgres_embedder.ConfigHelper"
+ ) as mock:
+ config_helper = mock.get_active_config_or_default.return_value
+ config_helper.document_processors = [
+ EmbeddingConfig(
+ "jpg",
+ CHUNKING_SETTINGS,
+ LOADING_SETTINGS,
+ use_advanced_image_processing=True,
+ ),
+ EmbeddingConfig(
+ "pdf",
+ CHUNKING_SETTINGS,
+ LOADING_SETTINGS,
+ use_advanced_image_processing=False,
+ ),
+ ]
+ config_helper.get_advanced_image_processing_image_types.return_value = {
+ "jpeg",
+ "jpg",
+ "png",
+ }
+ yield config_helper
+
+
+@pytest.fixture(autouse=True)
+def document_loading_mock():
+ with patch(
+ "backend.batch.utilities.helpers.embedders.postgres_embedder.DocumentLoading"
+ ) as mock:
+ expected_documents = [
+ SourceDocument(content="some content", source="some source")
+ ]
+ mock.return_value.load.return_value = expected_documents
+ yield mock
+
+
+@pytest.fixture(autouse=True)
+def document_chunking_mock():
+ with patch(
+ "backend.batch.utilities.helpers.embedders.postgres_embedder.DocumentChunking"
+ ) as mock:
+ expected_chunked_documents = [
+ SourceDocument(
+ content="some content",
+ source="some source",
+ id="some id",
+ title="some-title",
+ offset=1,
+ chunk=1,
+ page_number=1,
+ chunk_id="some chunk id",
+ ),
+ SourceDocument(
+ content="some other content",
+ source="some other source",
+ id="some other id",
+ title="some other-title",
+ offset=2,
+ chunk=2,
+ page_number=2,
+ chunk_id="some other chunk id",
+ ),
+ ]
+ mock.return_value.chunk.return_value = expected_chunked_documents
+ yield mock
+
+
+def test_embed_file(
+ document_chunking_mock,
+ document_loading_mock,
+ llm_helper_mock,
+ azure_postgres_helper_mock,
+):
+ postgres_embedder = PostgresEmbedder(MagicMock(), MagicMock())
+ # Setup test data
+ source_url = "https://example.com/document.pdf"
+ file_name = "document.pdf"
+ file_extension = "pdf"
+ embedding_config = MagicMock()
+ postgres_embedder.embedding_configs[file_extension] = (
+ embedding_config # This needs to be adapted if `self.embedder` isn't set.
+ )
+
+ # Mock methods
+ llm_helper_mock.generate_embeddings.return_value = [0.1, 0.2, 0.3]
+ azure_postgres_helper_mock.create_vector_store.return_value = True
+
+ # Execute
+ postgres_embedder.embed_file(source_url, file_name)
+
+ # Assert method calls
+ document_loading_mock.return_value.load.assert_called_once_with(
+ source_url, embedding_config.loading
+ )
+ document_chunking_mock.return_value.chunk.assert_called_once_with(
+ document_loading_mock.return_value.load.return_value, embedding_config.chunking
+ )
+ llm_helper_mock.generate_embeddings.assert_has_calls(
+ [call("some content"), call("some other content")]
+ )
+
+
+def test_advanced_image_processing_not_implemented():
+ postgres_embedder = PostgresEmbedder(MagicMock(), MagicMock())
+ # Test for unsupported advanced image processing
+ file_extension = "jpg"
+ embedding_config = MagicMock()
+ embedding_config.use_advanced_image_processing = True
+ postgres_embedder.embedding_configs[file_extension] = embedding_config
+
+ # Mock config method
+ postgres_embedder.config.get_advanced_image_processing_image_types = MagicMock(
+ return_value=["jpg", "png"]
+ )
+
+ # Use pytest.raises to check the exception
+ with pytest.raises(NotImplementedError) as context:
+ postgres_embedder.embed_file("https://example.com/image.jpg", "image.jpg")
+
+ # Assert that the exception message matches the expected one
+ assert (
+ str(context.value)
+ == "Advanced image processing is not supported in PostgresEmbedder."
+ )
+
+
+def test_postgres_embed_file_loads_documents(document_loading_mock, env_helper_mock):
+ # given
+ push_embedder = PostgresEmbedder(MagicMock(), env_helper_mock)
+ source_url = "some-url"
+
+ # when
+ push_embedder.embed_file(
+ source_url,
+ "some-file-name.pdf",
+ )
+
+ # then
+ document_loading_mock.return_value.load.assert_called_once_with(
+ source_url, LOADING_SETTINGS
+ )
+
+
+def test_postgres_embed_file_chunks_documents(
+ document_loading_mock, document_chunking_mock, env_helper_mock
+):
+ # given
+ push_embedder = PostgresEmbedder(MagicMock(), env_helper_mock)
+
+ # when
+ push_embedder.embed_file(
+ "some-url",
+ "some-file-name.pdf",
+ )
+
+ # then
+ document_chunking_mock.return_value.chunk.assert_called_once_with(
+ document_loading_mock.return_value.load.return_value, CHUNKING_SETTINGS
+ )
diff --git a/docs/LOCAL_DEPLOYMENT.md b/docs/LOCAL_DEPLOYMENT.md
index b10e2eed8..1575f481e 100644
--- a/docs/LOCAL_DEPLOYMENT.md
+++ b/docs/LOCAL_DEPLOYMENT.md
@@ -190,13 +190,9 @@ Execute the above [shell command](#L81) to run the function locally. You may nee
|AZURE_SEARCH_FILTER||Filter to apply to search queries.|
|AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION ||Whether to use [Integrated Vectorization](https://learn.microsoft.com/en-us/azure/search/vector-search-integrated-vectorization)|
|AZURE_OPENAI_RESOURCE||the name of your Azure OpenAI resource|
-|AZURE_OPENAI_MODEL||The name of your model deployment|
-|AZURE_OPENAI_MODEL_NAME|gpt-35-turbo|The name of the model|
-|AZURE_OPENAI_MODEL_VERSION|0613|The version of the model to use|
+|AZURE_OPENAI_MODEL_INFO|{"model":"gpt-35-turbo","modelName":"gpt-35-turbo","modelVersion":"0613"}|`model`: The name of your model deployment.
`modelName`: The name of the model.
`modelVersion`: The version of the model to use.|
|AZURE_OPENAI_API_KEY||One of the API keys of your Azure OpenAI resource|
-|AZURE_OPENAI_EMBEDDING_MODEL|text-embedding-ada-002|The name of your Azure OpenAI embeddings model deployment|
-|AZURE_OPENAI_EMBEDDING_MODEL_NAME|text-embedding-ada-002|The name of the embeddings model (can be found in Azure AI Studio)|
-|AZURE_OPENAI_EMBEDDING_MODEL_VERSION|2|The version of the embeddings model to use (can be found in Azure AI Studio)|
+|AZURE_OPENAI_EMBEDDING_MODEL_INFO|{"model":"text-embedding-ada-002","modelName":"text-embedding-ada-002","modelVersion":"2"}|`model`: The name of your Azure OpenAI embeddings model deployment.
`modelName`: The name of the embeddings model (can be found in Azure AI Studio).
`modelVersion`: The version of the embeddings model to use (can be found in Azure AI Studio).|
|AZURE_OPENAI_TEMPERATURE|0|What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. A value of 0 is recommended when using your data.|
|AZURE_OPENAI_TOP_P|1.0|An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. We recommend setting this to 1.0 when using your data.|
|AZURE_OPENAI_MAX_TOKENS|1000|The maximum number of tokens allowed for the generated answer.|
@@ -206,11 +202,8 @@ Execute the above [shell command](#L81) to run the function locally. You may nee
|AzureWebJobsStorage||The connection string to the Azure Blob Storage for the Azure Functions Batch processing|
|BACKEND_URL||The URL for the Backend Batch Azure Function. Use http://localhost:7071 for local execution|
|DOCUMENT_PROCESSING_QUEUE_NAME|doc-processing|The name of the Azure Queue to handle the Batch processing|
-|AZURE_BLOB_ACCOUNT_NAME||The name of the Azure Blob Storage for storing the original documents to be processed|
-|AZURE_BLOB_ACCOUNT_KEY||The key of the Azure Blob Storage for storing the original documents to be processed|
-|AZURE_BLOB_CONTAINER_NAME||The name of the Container in the Azure Blob Storage for storing the original documents to be processed|
-|AZURE_FORM_RECOGNIZER_ENDPOINT||The name of the Azure Form Recognizer for extracting the text from the documents|
-|AZURE_FORM_RECOGNIZER_KEY||The key of the Azure Form Recognizer for extracting the text from the documents|
+|AZURE_BLOB_STORAGE_INFO|{"containerName":"documents","accountName":"","accountKey":""}"|`containerName`: The name of the Container in the Azure Blob Storage for storing the original documents to be processed.
`accountName`: The name of the Azure Blob Storage for storing the original documents to be processed.
`accountKey`: The key of the Azure Blob Storage for storing the original documents to be processed.|
+|AZURE_FORM_RECOGNIZER_INFO|{"endpoint":"","key":""}|`endpoint`: The name of the Azure Form Recognizer for extracting the text from the documents.
`key`: The key of the Azure Form Recognizer for extracting the text from the documents.|
|APPLICATIONINSIGHTS_CONNECTION_STRING||The Application Insights connection string to store the application logs|
|ORCHESTRATION_STRATEGY | openai_function | Orchestration strategy. Use Azure OpenAI Functions (openai_function), Semantic Kernel (semantic_kernel), LangChain (langchain) or Prompt Flow (prompt_flow) for messages orchestration. If you are using a new model version 0613 select any strategy, if you are using a 0314 model version select "langchain". Note that both `openai_function` and `semantic_kernel` use OpenAI function calling. Prompt Flow option is still in development and does not support RBAC or integrated vectorization as of yet.|
|AZURE_CONTENT_SAFETY_ENDPOINT | | The endpoint of the Azure AI Content Safety service |
diff --git a/docs/TEAMS_LOCAL_DEPLOYMENT.md b/docs/TEAMS_LOCAL_DEPLOYMENT.md
index e712fddd7..f9234830d 100644
--- a/docs/TEAMS_LOCAL_DEPLOYMENT.md
+++ b/docs/TEAMS_LOCAL_DEPLOYMENT.md
@@ -64,10 +64,9 @@ Or use the [Azure Functions VS Code extension](https://marketplace.visualstudio.
|AZURE_SEARCH_FIELDS_METADATA|metadata|Field from your Azure AI Search index that contains metadata for the document. `metadata` if you don't have a specific requirement.|
|AZURE_SEARCH_FILTER||Filter to apply to search queries.|
|AZURE_OPENAI_RESOURCE||the name of your Azure OpenAI resource|
-|AZURE_OPENAI_MODEL||The name of your model deployment|
-|AZURE_OPENAI_MODEL_NAME|gpt-35-turbo|The name of the model|
+|AZURE_OPENAI_MODEL_INFO|{"model":"gpt-35-turbo","modelName":"gpt-35-turbo","modelVersion":"0613"}|`model`: The name of your model deployment.
`modelName`: The name of the model.
`modelVersion`: The version of the model to use.|
|AZURE_OPENAI_API_KEY||One of the API keys of your Azure OpenAI resource|
-|AZURE_OPENAI_EMBEDDING_MODEL|text-embedding-ada-002|The name of you Azure OpenAI embeddings model deployment|
+|AZURE_OPENAI_EMBEDDING_MODEL_INFO|{"model":"text-embedding-ada-002","modelName":"text-embedding-ada-002","modelVersion":"2"}|`model`: The name of your Azure OpenAI embeddings model deployment.
`modelName`: The name of the embeddings model (can be found in Azure AI Studio).
`modelVersion`: The version of the embeddings model to use (can be found in Azure AI Studio).|
|AZURE_OPENAI_TEMPERATURE|0|What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. A value of 0 is recommended when using your data.|
|AZURE_OPENAI_TOP_P|1.0|An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. We recommend setting this to 1.0 when using your data.|
|AZURE_OPENAI_MAX_TOKENS|1000|The maximum number of tokens allowed for the generated answer.|
@@ -77,11 +76,8 @@ Or use the [Azure Functions VS Code extension](https://marketplace.visualstudio.
|AzureWebJobsStorage||The connection string to the Azure Blob Storage for the Azure Functions Batch processing|
|BACKEND_URL||The URL for the Backend Batch Azure Function. Use http://localhost:7071 for local execution|
|DOCUMENT_PROCESSING_QUEUE_NAME|doc-processing|The name of the Azure Queue to handle the Batch processing|
-|AZURE_BLOB_ACCOUNT_NAME||The name of the Azure Blob Storage for storing the original documents to be processed|
-|AZURE_BLOB_ACCOUNT_KEY||The key of the Azure Blob Storage for storing the original documents to be processed|
-|AZURE_BLOB_CONTAINER_NAME||The name of the Container in the Azure Blob Storage for storing the original documents to be processed|
-|AZURE_FORM_RECOGNIZER_ENDPOINT||The name of the Azure Form Recognizer for extracting the text from the documents|
-|AZURE_FORM_RECOGNIZER_KEY||The key of the Azure Form Recognizer for extracting the text from the documents|
+|AZURE_BLOB_STORAGE_INFO|{"containerName":"documents","accountName":"","accountKey":""}"|`containerName`: The name of the Container in the Azure Blob Storage for storing the original documents to be processed.
`accountName`: The name of the Azure Blob Storage for storing the original documents to be processed.
`accountKey`: The key of the Azure Blob Storage for storing the original documents to be processed.|
+|AZURE_FORM_RECOGNIZER_INFO|{"endpoint":"","key":""}|`endpoint`: The name of the Azure Form Recognizer for extracting the text from the documents.
`key`: The key of the Azure Form Recognizer for extracting the text from the documents.|
|APPLICATIONINSIGHTS_CONNECTION_STRING||The Application Insights connection string to store the application logs|
|ORCHESTRATION_STRATEGY | openai_function | Orchestration strategy. Use Azure OpenAI Functions (openai_function), Semantic Kernel (semantic_kernel), LangChain (langchain) or Prompt Flow (prompt_flow) for messages orchestration. If you are using a new model version 0613 select any strategy, if you are using a 0314 model version select "langchain". Note that both `openai_function` and `semantic_kernel` use OpenAI function calling. Prompt Flow option is still in development and does not support RBAC or integrated vectorization as of yet.|
|AZURE_CONTENT_SAFETY_ENDPOINT | | The endpoint of the Azure AI Content Safety service |
diff --git a/docs/contract_assistance.md b/docs/contract_assistance.md
index ce980a164..8c6c355e6 100644
--- a/docs/contract_assistance.md
+++ b/docs/contract_assistance.md
@@ -22,9 +22,7 @@ To apply the suggested configurations in your deployment, update the following f
- **Azure Semantic Search**: Set `AZURE_SEARCH_USE_SEMANTIC_SEARCH` to `true`
- **Azure Cognitive Search Top K 15**: Set `AZURE_SEARCH_TOP_K` to `15`.
- **Azure Search Integrated Vectorization**: Set `AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION` to `true`.
-- **Azure OpenAI Model**: Set `AZURE_OPENAI_MODEL` to `gpt-4o`.
-- **Azure OpenAI Model Name**: Set `AZURE_OPENAI_MODEL_NAME` to `gpt-4o`. (could be different based on the name of the Azure OpenAI model deployment)
-- **Azure OpenAI Model Name Version**: Set `AZURE_OPENAI_MODEL_VERSION` to `2024-05-13`.
+- **Azure OpenAI Model Info**: Set `AZURE_OPENAI_MODEL_INFO` to `{"model":"gpt-4o","modelName":"gpt-4o","modelVersion":"2024-05-13"}`.(model could be different based on the name of the Azure OpenAI model deployment)
- **Conversation Flow Options**: Set `CONVERSATION_FLOW` to `byod`
- **Orchestration Strategy**: Set `ORCHESTRATION_STRATEGY` to `Semantic Kernel`.
diff --git a/docs/employee_assistance.md b/docs/employee_assistance.md
index e23616684..1af072d01 100644
--- a/docs/employee_assistance.md
+++ b/docs/employee_assistance.md
@@ -22,9 +22,7 @@ To apply the suggested configurations in your deployment, update the following f
- **Azure Semantic Search**: Set `AZURE_SEARCH_USE_SEMANTIC_SEARCH` to `true`
- **Azure Cognitive Search Top K 15**: Set `AZURE_SEARCH_TOP_K` to `15`.
- **Azure Search Integrated Vectorization**: Set `AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION` to `true`.
-- **Azure OpenAI Model**: Set `AZURE_OPENAI_MODEL` to `gpt-4o`.
-- **Azure OpenAI Model Name**: Set `AZURE_OPENAI_MODEL_NAME` to `gpt-4o`. (could be different based on the name of the Azure OpenAI model deployment)
-- **Azure OpenAI Model Name Version**: Set `AZURE_OPENAI_MODEL_VERSION` to `2024-05-13`.
+- **Azure OpenAI Model Info**: Set `AZURE_OPENAI_MODEL_INFO` to `{"model":"gpt-4o","modelName":"gpt-4o","modelVersion":"2024-05-13"}`.(model could be different based on the name of the Azure OpenAI model deployment).
- **Conversation Flow Options**: Set `CONVERSATION_FLOW` to `byod`
- **Orchestration Strategy**: Set `ORCHESTRATION_STRATEGY` to `Semantic Kernel`.
diff --git a/docs/images/architecture.png b/docs/images/architecture.png
deleted file mode 100644
index 280ad8da5..000000000
Binary files a/docs/images/architecture.png and /dev/null differ
diff --git a/docs/images/architecture_cdb.png b/docs/images/architecture_cdb.png
new file mode 100644
index 000000000..07d99b5e8
Binary files /dev/null and b/docs/images/architecture_cdb.png differ
diff --git a/docs/images/architrecture_pg.png b/docs/images/architrecture_pg.png
new file mode 100644
index 000000000..faef5a8c0
Binary files /dev/null and b/docs/images/architrecture_pg.png differ
diff --git a/docs/images/cwyd-solution-architecture.png b/docs/images/cwyd-solution-architecture.png
deleted file mode 100644
index 0d41503fd..000000000
Binary files a/docs/images/cwyd-solution-architecture.png and /dev/null differ
diff --git a/docs/images/db_selection.png b/docs/images/db_selection.png
new file mode 100644
index 000000000..8e8d4f0e4
Binary files /dev/null and b/docs/images/db_selection.png differ
diff --git a/docs/model_configuration.md b/docs/model_configuration.md
index 0eeeef4d1..f9cc93853 100644
--- a/docs/model_configuration.md
+++ b/docs/model_configuration.md
@@ -11,15 +11,14 @@ This document outlines the necessary steps and configurations required for setti
- For a list of available models, see the [Microsoft Azure AI Services - OpenAI Models documentation](https://learn.microsoft.com/en-us/azure/ai-services/openai/concepts/models).
## Environment Variables (as listed in Azure AI Studio)
-- You can access the Environment Variables section of the `LOCAL_DEPLOYMENT.md` file by clicking on this link: [Environment Variables section in LOCAL_DEPLOYMENT.md](docs/LOCAL_DEPLOYMENT.md#environment-variables).
+- You can access the Environment Variables section of the `LOCAL_DEPLOYMENT.md` file by clicking on this link: [Environment Variables section in LOCAL_DEPLOYMENT.md](LOCAL_DEPLOYMENT.md#environment-variables).
### LLM
-- `AZURE_OPENAI_MODEL`: The Azure OpenAI Model Deployment Name
- - example: `my-gpt-35-turbo-16k`
-- `AZURE_OPENAI_MODEL_NAME`: The Azure OpenAI Model Name
- - example: `gpt-35-turbo-16k`
-- `AZURE_OPENAI_MODEL_VERSION`: The Azure OpenAI Model Version
- - example: `0613`
+- `AZURE_OPENAI_MODEL_INFO`: The Azure OpenAI Model Info
+ - example: `{"model":"gpt-35-turbo-16k","modelName":"gpt-35-turbo-16k","modelVersion":"0613"}`
+ - `model` - The Azure OpenAI Model Deployment Name
+ - `modelName` - The Azure OpenAI Model Name
+ - `modelVersion` - The Azure OpenAI Model Version
- `AZURE_OPENAI_MODEL_CAPACITY`: The Tokens per Minute Rate Limit (thousands)
- example: `30`
@@ -34,12 +33,11 @@ This document outlines the necessary steps and configurations required for setti
- example: `10`
### EMBEDDINGS
-- `AZURE_OPENAI_EMBEDDING_MODEL`: The Azure OpenAI Model Deployment Name
- - example: `my-text-embedding-ada-002`
-- `AZURE_OPENAI_EMBEDDING_MODEL_NAME`: The Azure OpenAI Model Name
- - example: `text-embedding-ada-002`
-- `AZURE_OPENAI_EMBEDDING_MODEL_VERSION`: The Azure OpenAI Model Version
- - example: `2`
+- `AZURE_OPENAI_EMBEDDING_MODEL_INFO`: The Azure OpenAI Model Deployment Name
+ - example: `{"model":"text-embedding-ada-002","modelName":"text-embedding-ada-002","modelVersion":"2"}`
+ - `model` - The name of your Azure OpenAI embeddings model deployment.
+ - `modelName` - The name of the embeddings model (can be found in Azure AI Studio).
+ - `modelVersion` - The version of the embeddings model to use (can be found in Azure AI Studio).
- `AZURE_OPENAI_EMBEDDING_MODEL_CAPACITY`: The Tokens per Minute Rate Limit (thousands)
- example: `30`
- `AZURE_SEARCH_DIMENSIONS`: Azure OpenAI Embeddings dimensions. A full list of dimensions can be found [here](https://learn.microsoft.com/en-us/azure/ai-services/openai/concepts/models#embeddings-models).
@@ -65,12 +63,8 @@ This document outlines the necessary steps and configurations required for setti
## GPT-4o & Text-Embeddings-3-Large
- The following environment variables are set for the GPT-4o and Text-Embeddings-3-Large models:
- `AZURE_OPENAI_API_VERSION`: `2024-05-01-preview`
- - `AZURE_OPENAI_MODEL`: `my-gpt-4o`
- - `AZURE_OPENAI_MODEL_NAME`: `gpt-4o`
- - `AZURE_OPENAI_MODEL_VERSION`: `2024-05-13`
- - `AZURE_OPENAI_EMBEDDING_MODEL`: `my-text-embedding-3-large`
- - `AZURE_OPENAI_EMBEDDING_MODEL_NAME`: `text-embedding-3-large`
- - `AZURE_OPENAI_EMBEDDING_MODEL_VERSION`: `1`
+ - `AZURE_OPENAI_MODEL_INFO`: `{"model":"my-gpt-4o","modelName":"gpt-4o","modelVersion":"2024-05-13"}`
+ - `AZURE_OPENAI_EMBEDDING_MODEL_INFO`: `{"model":"my-text-embedding-3-large","modelName":"text-embedding-3-large","modelVersion":"1"}`
- `AZURE_SEARCH_DIMENSIONS`: `3072`
- `AZURE_MAX_TOKENS`: `4096`
diff --git a/docs/postgreSQL.md b/docs/postgreSQL.md
new file mode 100644
index 000000000..5e982f57c
--- /dev/null
+++ b/docs/postgreSQL.md
@@ -0,0 +1,88 @@
+### PostgreSQL Integration in CWYD
+
+The CWYD has been enhanced with PostgreSQL as a core feature, enabling flexible, robust, and scalable database capabilities. This document outlines the features, configurations, and functionality introduced with PostgreSQL support.
+
+---
+
+## Features and Enhancements
+
+### 1. **Default Database Configuration**
+PostgreSQL is now the default database for CWYD deployments. If no database preference is specified (`DATABASE_TYPE` is unset or empty), the platform defaults to PostgreSQL. This ensures a streamlined deployment process while utilizing PostgreSQL’s advanced capabilities.
+
+---
+
+### 2. **Unified Environment Configuration**
+To simplify environment setup, PostgreSQL configurations are now grouped under a unified JSON environment variable:
+
+Example:
+```json
+{
+ "type": "PostgreSQL",
+ "user": "DBUSER",
+ "database": "DBNAME",
+ "host": "DBHOST"
+}
+```
+This structure ensures easier management of environment variables and dynamic database selection during runtime.
+
+---
+
+### 3. **PostgreSQL as the Relational and Vector Store Database**
+The PostgreSQL `vector_store` table is used for managing search-related indexing. It supports vector-based similarity searches.
+
+**Table Schema**:
+```sql
+CREATE TABLE IF NOT EXISTS vector_store(
+ id TEXT,
+ title TEXT,
+ chunk INTEGER,
+ chunk_id TEXT,
+ offset INTEGER,
+ page_number INTEGER,
+ content TEXT,
+ source TEXT,
+ metadata TEXT,
+ content_vector VECTOR(1536)
+);
+```
+
+**Similarity Query Example**:
+```sql
+SELECT content
+FROM vector_store
+ORDER BY content_vector <=> $1
+LIMIT $2;
+```
+
+
+---
+
+### 4. **Automated Table Creation**
+The PostgreSQL deployment process automatically creates the necessary tables for chat history and vector storage, including table indexes. The script `create_postgres_tables.py` is executed as part of the infrastructure deployment, ensuring the database is ready for use immediately after setup.
+
+---
+
+### 8. **Secure PostgreSQL Connections**
+All PostgreSQL connections use secure configurations:
+- SSL is enabled with parameters such as `sslmode=verify-full`.
+- Credentials are securely managed via environment variables and Key Vault integrations.
+
+---
+
+### 9. **Backend Enhancements**
+- PostgreSQL database integration is included in the implementation of the Semantic Kernel orchestrator to ensure unified functionality.
+- Database operations, including indexing and similarity searches, align with the CWYD workflow.
+
+---
+
+## Benefits of PostgreSQL Integration
+1. **Scalability**: PostgreSQL offers robust data storage and table indexing capabilities suitable for large-scale deployments
+2. **Flexibility**: Dynamic database switching allows users to choose between PostgreSQL and CosmosDB based on their requirements.
+3. **Ease of Use**: Automated table creation and environment configuration simplify deployment and management.
+4. **Security**: SSL-enabled connections and secure credential handling ensure data protection.
+
+
+---
+
+## Conclusion
+PostgreSQL integration transforms CWYD into a versatile, scalable platform capable of handling advanced database storage, table indexing, and query scenarios. By leveraging PostgreSQL’s cutting edge features, CWYD ensures a seamless user experience, robust performance, and future-ready architecture.
diff --git a/infra/app/adminweb.bicep b/infra/app/adminweb.bicep
index d2e993282..9347a8c33 100644
--- a/infra/app/adminweb.bicep
+++ b/infra/app/adminweb.bicep
@@ -19,8 +19,8 @@ param computerVisionName string = ''
param appSettings object = {}
param useKeyVault bool
param openAIKeyName string = ''
-param storageAccountKeyName string = ''
-param formRecognizerKeyName string = ''
+param azureBlobStorageInfo string = ''
+param azureFormRecognizerInfo string = ''
param searchKeyName string = ''
param computerVisionKeyName string = ''
param contentSafetyKeyName string = ''
@@ -28,6 +28,31 @@ param speechKeyName string = ''
param authType string
param dockerFullImageName string = ''
param useDocker bool = dockerFullImageName != ''
+param databaseType string = 'CosmosDB' // 'CosmosDB' or 'PostgreSQL'
+
+var azureFormRecognizerInfoUpdated = useKeyVault
+ ? azureFormRecognizerInfo
+ : replace(azureFormRecognizerInfo, '$FORM_RECOGNIZER_KEY', listKeys(
+ resourceId(
+ subscription().subscriptionId,
+ resourceGroup().name,
+ 'Microsoft.CognitiveServices/accounts',
+ formRecognizerName
+ ),
+ '2023-05-01'
+ ).key1)
+
+var azureBlobStorageInfoUpdated = useKeyVault
+ ? azureBlobStorageInfo
+ : replace(azureBlobStorageInfo, '$STORAGE_ACCOUNT_KEY', listKeys(
+ resourceId(
+ subscription().subscriptionId,
+ resourceGroup().name,
+ 'Microsoft.Storage/storageAccounts',
+ storageAccountName
+ ),
+ '2021-09-01'
+ ).keys[0].value)
module adminweb '../core/host/appservice.bicep' = {
name: '${name}-app-module'
@@ -44,6 +69,7 @@ module adminweb '../core/host/appservice.bicep' = {
scmDoBuildDuringDeployment: useDocker ? false : true
applicationInsightsName: applicationInsightsName
appServicePlanId: appServicePlanId
+ managedIdentity: databaseType == 'PostgreSQL' || !empty(keyVaultName)
appSettings: union(appSettings, {
AZURE_AUTH_TYPE: authType
USE_KEY_VAULT: useKeyVault ? useKeyVault : ''
@@ -69,28 +95,8 @@ module adminweb '../core/host/appservice.bicep' = {
),
'2021-04-01-preview'
).primaryKey
- AZURE_BLOB_ACCOUNT_KEY: useKeyVault
- ? storageAccountKeyName
- : listKeys(
- resourceId(
- subscription().subscriptionId,
- resourceGroup().name,
- 'Microsoft.Storage/storageAccounts',
- storageAccountName
- ),
- '2021-09-01'
- ).keys[0].value
- AZURE_FORM_RECOGNIZER_KEY: useKeyVault
- ? formRecognizerKeyName
- : listKeys(
- resourceId(
- subscription().subscriptionId,
- resourceGroup().name,
- 'Microsoft.CognitiveServices/accounts',
- formRecognizerName
- ),
- '2023-05-01'
- ).key1
+ AZURE_BLOB_STORAGE_INFO: azureBlobStorageInfoUpdated
+ AZURE_FORM_RECOGNIZER_INFO: azureFormRecognizerInfoUpdated
AZURE_CONTENT_SAFETY_KEY: useKeyVault
? contentSafetyKeyName
: listKeys(
diff --git a/infra/app/function.bicep b/infra/app/function.bicep
index 8a0739e7d..2ec146344 100644
--- a/infra/app/function.bicep
+++ b/infra/app/function.bicep
@@ -19,15 +19,39 @@ param speechServiceName string = ''
param computerVisionName string = ''
param useKeyVault bool
param openAIKeyName string = ''
-param storageAccountKeyName string = ''
-param formRecognizerKeyName string = ''
+param azureBlobStorageInfo string = ''
+param azureFormRecognizerInfo string = ''
param searchKeyName string = ''
param computerVisionKeyName string = ''
param contentSafetyKeyName string = ''
param speechKeyName string = ''
param authType string
param dockerFullImageName string = ''
-param cosmosDBKeyName string = ''
+param databaseType string
+
+var azureFormRecognizerInfoUpdated = useKeyVault
+ ? azureFormRecognizerInfo
+ : replace(azureFormRecognizerInfo, '$FORM_RECOGNIZER_KEY', listKeys(
+ resourceId(
+ subscription().subscriptionId,
+ resourceGroup().name,
+ 'Microsoft.CognitiveServices/accounts',
+ formRecognizerName
+ ),
+ '2023-05-01'
+ ).key1)
+
+var azureBlobStorageInfoUpdated = useKeyVault
+ ? azureBlobStorageInfo
+ : replace(azureBlobStorageInfo, '$STORAGE_ACCOUNT_KEY', listKeys(
+ resourceId(
+ subscription().subscriptionId,
+ resourceGroup().name,
+ 'Microsoft.Storage/storageAccounts',
+ storageAccountName
+ ),
+ '2021-09-01'
+ ).keys[0].value)
module function '../core/host/functions.bicep' = {
name: '${name}-app-module'
@@ -43,6 +67,7 @@ module function '../core/host/functions.bicep' = {
runtimeVersion: runtimeVersion
dockerFullImageName: dockerFullImageName
useKeyVault: useKeyVault
+ managedIdentity: databaseType == 'PostgreSQL' || !empty(keyVaultName)
appSettings: union(appSettings, {
WEBSITES_ENABLE_APP_SERVICE_STORAGE: 'false'
AZURE_AUTH_TYPE: authType
@@ -69,28 +94,8 @@ module function '../core/host/functions.bicep' = {
),
'2021-04-01-preview'
).primaryKey
- AZURE_BLOB_ACCOUNT_KEY: useKeyVault
- ? storageAccountKeyName
- : listKeys(
- resourceId(
- subscription().subscriptionId,
- resourceGroup().name,
- 'Microsoft.Storage/storageAccounts',
- storageAccountName
- ),
- '2021-09-01'
- ).keys[0].value
- AZURE_FORM_RECOGNIZER_KEY: useKeyVault
- ? formRecognizerKeyName
- : listKeys(
- resourceId(
- subscription().subscriptionId,
- resourceGroup().name,
- 'Microsoft.CognitiveServices/accounts',
- formRecognizerName
- ),
- '2023-05-01'
- ).key1
+ AZURE_BLOB_STORAGE_INFO: azureBlobStorageInfoUpdated
+ AZURE_FORM_RECOGNIZER_INFO: azureFormRecognizerInfoUpdated
AZURE_CONTENT_SAFETY_KEY: useKeyVault
? contentSafetyKeyName
: listKeys(
diff --git a/infra/app/storekeys.bicep b/infra/app/storekeys.bicep
index 506087efb..b2f9b9f39 100644
--- a/infra/app/storekeys.bicep
+++ b/infra/app/storekeys.bicep
@@ -7,6 +7,10 @@ param formRecognizerName string = ''
param contentSafetyName string = ''
param speechServiceName string = ''
param computerVisionName string = ''
+param postgresServerName string = '' // PostgreSQL server name
+param postgresDatabaseName string = 'postgres' // Default database name
+param postgresInfoName string = 'AZURE-POSTGRESQL-INFO' // Secret name for PostgreSQL info
+param postgresDatabaseAdminUserName string = ''
param storageAccountKeyName string = 'AZURE-STORAGE-ACCOUNT-KEY'
param openAIKeyName string = 'AZURE-OPENAI-API-KEY'
param searchKeyName string = 'AZURE-SEARCH-KEY'
@@ -96,15 +100,32 @@ resource computerVisionKeySecret 'Microsoft.KeyVault/vaults/secrets@2022-07-01'
}
}
-// add cosmos db account key
-resource cosmosDbAccountKey 'Microsoft.KeyVault/vaults/secrets@2022-07-01' = {
+// Add PostgreSQL info in JSON format
+resource postgresInfoSecret 'Microsoft.KeyVault/vaults/secrets@2022-07-01' = if (postgresServerName != '') {
+ parent: keyVault
+ name: postgresInfoName
+ properties: {
+ value: postgresServerName != ''
+ ? string({
+ user: postgresDatabaseAdminUserName
+ dbname: postgresDatabaseName
+ host: postgresServerName
+ })
+ : ''
+ }
+}
+
+// Conditional CosmosDB key secret
+resource cosmosDbAccountKey 'Microsoft.KeyVault/vaults/secrets@2022-07-01' = if (cosmosAccountName != '') {
parent: keyVault
name: cosmosAccountKeyName
properties: {
- value: listKeys(
- resourceId(subscription().subscriptionId, rgName, 'Microsoft.DocumentDB/databaseAccounts', cosmosAccountName),
- '2022-08-15'
- ).primaryMasterKey
+ value: cosmosAccountName != ''
+ ? listKeys(
+ resourceId(subscription().subscriptionId, rgName, 'Microsoft.DocumentDB/databaseAccounts', cosmosAccountName),
+ '2022-08-15'
+ ).primaryMasterKey
+ : ''
}
}
@@ -119,4 +140,5 @@ output OPENAI_KEY_NAME string = openAIKeySecret.name
output STORAGE_ACCOUNT_KEY_NAME string = storageAccountKeySecret.name
output SPEECH_KEY_NAME string = speechKeySecret.name
output COMPUTER_VISION_KEY_NAME string = computerVisionName != '' ? computerVisionKeySecret.name : ''
-output COSMOS_ACCOUNT_KEY_NAME string = cosmosDbAccountKey.name
+output COSMOS_ACCOUNT_KEY_NAME string = cosmosAccountName != '' ? cosmosDbAccountKey.name : ''
+output POSTGRESQL_INFO_NAME string = postgresServerName != '' ? postgresInfoSecret.name : ''
diff --git a/infra/app/web.bicep b/infra/app/web.bicep
index 65588b2e7..1efd7f6f4 100644
--- a/infra/app/web.bicep
+++ b/infra/app/web.bicep
@@ -19,8 +19,8 @@ param computerVisionName string = ''
param appSettings object = {}
param useKeyVault bool
param openAIKeyName string = ''
-param storageAccountKeyName string = ''
-param formRecognizerKeyName string = ''
+param azureBlobStorageInfo string = ''
+param azureFormRecognizerInfo string = ''
param searchKeyName string = ''
param computerVisionKeyName string = ''
param contentSafetyKeyName string = ''
@@ -29,98 +29,47 @@ param authType string
param dockerFullImageName string = ''
param useDocker bool = dockerFullImageName != ''
param healthCheckPath string = ''
+
+// Database parameters
+param databaseType string = 'CosmosDB' // 'CosmosDB' or 'PostgreSQL'
param cosmosDBKeyName string = ''
+param postgresInfoName string = ''
-module web '../core/host/appservice.bicep' = {
- name: '${name}-app-module'
- params: {
- name: name
- location: location
- tags: tags
- allowedOrigins: allowedOrigins
- appCommandLine: useDocker ? '' : appCommandLine
- applicationInsightsName: applicationInsightsName
- appServicePlanId: appServicePlanId
- appSettings: union(appSettings, {
- AZURE_AUTH_TYPE: authType
- USE_KEY_VAULT: useKeyVault ? useKeyVault : ''
- AZURE_OPENAI_API_KEY: useKeyVault
- ? openAIKeyName
- : listKeys(
- resourceId(
- subscription().subscriptionId,
- resourceGroup().name,
- 'Microsoft.CognitiveServices/accounts',
- azureOpenAIName
- ),
- '2023-05-01'
- ).key1
- AZURE_SEARCH_KEY: useKeyVault
- ? searchKeyName
- : listAdminKeys(
- resourceId(
- subscription().subscriptionId,
- resourceGroup().name,
- 'Microsoft.Search/searchServices',
- azureAISearchName
- ),
- '2021-04-01-preview'
- ).primaryKey
- AZURE_BLOB_ACCOUNT_KEY: useKeyVault
- ? storageAccountKeyName
- : listKeys(
- resourceId(
- subscription().subscriptionId,
- resourceGroup().name,
- 'Microsoft.Storage/storageAccounts',
- storageAccountName
- ),
- '2021-09-01'
- ).keys[0].value
- AZURE_FORM_RECOGNIZER_KEY: useKeyVault
- ? formRecognizerKeyName
- : listKeys(
- resourceId(
- subscription().subscriptionId,
- resourceGroup().name,
- 'Microsoft.CognitiveServices/accounts',
- formRecognizerName
- ),
- '2023-05-01'
- ).key1
- AZURE_CONTENT_SAFETY_KEY: useKeyVault
- ? contentSafetyKeyName
- : listKeys(
- resourceId(
- subscription().subscriptionId,
- resourceGroup().name,
- 'Microsoft.CognitiveServices/accounts',
- contentSafetyName
- ),
- '2023-05-01'
- ).key1
- AZURE_SPEECH_SERVICE_KEY: useKeyVault
- ? speechKeyName
- : listKeys(
- resourceId(
- subscription().subscriptionId,
- resourceGroup().name,
- 'Microsoft.CognitiveServices/accounts',
- speechServiceName
- ),
- '2023-05-01'
- ).key1
- AZURE_COMPUTER_VISION_KEY: (useKeyVault || computerVisionName == '')
- ? computerVisionKeyName
- : listKeys(
- resourceId(
- subscription().subscriptionId,
- resourceGroup().name,
- 'Microsoft.CognitiveServices/accounts',
- computerVisionName
- ),
- '2023-05-01'
- ).key1
+var azureFormRecognizerInfoUpdated = useKeyVault
+ ? azureFormRecognizerInfo
+ : replace(
+ azureFormRecognizerInfo,
+ '$FORM_RECOGNIZER_KEY',
+ listKeys(
+ resourceId(
+ subscription().subscriptionId,
+ resourceGroup().name,
+ 'Microsoft.CognitiveServices/accounts',
+ formRecognizerName
+ ),
+ '2023-05-01'
+ ).key1
+ )
+
+var azureBlobStorageInfoUpdated = useKeyVault
+ ? azureBlobStorageInfo
+ : replace(
+ azureBlobStorageInfo,
+ '$STORAGE_ACCOUNT_KEY',
+ listKeys(
+ resourceId(
+ subscription().subscriptionId,
+ resourceGroup().name,
+ 'Microsoft.Storage/storageAccounts',
+ storageAccountName
+ ),
+ '2021-09-01'
+ ).keys[0].value
+ )
+
+// Database-specific settings
+var databaseSettings = databaseType == 'CosmosDB'
+ ? {
AZURE_COSMOSDB_ACCOUNT_KEY: (useKeyVault || cosmosDBKeyName == '')
? cosmosDBKeyName
: listKeys(
@@ -132,13 +81,90 @@ module web '../core/host/appservice.bicep' = {
),
'2022-08-15'
).primaryMasterKey
- })
+ }
+ : {}
+
+module web '../core/host/appservice.bicep' = {
+ name: '${name}-app-module'
+ params: {
+ name: name
+ location: location
+ tags: tags
+ allowedOrigins: allowedOrigins
+ appCommandLine: useDocker ? '' : appCommandLine
+ applicationInsightsName: applicationInsightsName
+ appServicePlanId: appServicePlanId
+ appSettings: union(
+ appSettings,
+ union(databaseSettings, {
+ AZURE_AUTH_TYPE: authType
+ USE_KEY_VAULT: useKeyVault ? useKeyVault : ''
+ AZURE_OPENAI_API_KEY: useKeyVault
+ ? openAIKeyName
+ : listKeys(
+ resourceId(
+ subscription().subscriptionId,
+ resourceGroup().name,
+ 'Microsoft.CognitiveServices/accounts',
+ azureOpenAIName
+ ),
+ '2023-05-01'
+ ).key1
+ AZURE_SEARCH_KEY: useKeyVault
+ ? searchKeyName
+ : listAdminKeys(
+ resourceId(
+ subscription().subscriptionId,
+ resourceGroup().name,
+ 'Microsoft.Search/searchServices',
+ azureAISearchName
+ ),
+ '2021-04-01-preview'
+ ).primaryKey
+ AZURE_BLOB_STORAGE_INFO: azureBlobStorageInfoUpdated
+ AZURE_FORM_RECOGNIZER_INFO: azureFormRecognizerInfoUpdated
+ AZURE_CONTENT_SAFETY_KEY: useKeyVault
+ ? contentSafetyKeyName
+ : listKeys(
+ resourceId(
+ subscription().subscriptionId,
+ resourceGroup().name,
+ 'Microsoft.CognitiveServices/accounts',
+ contentSafetyName
+ ),
+ '2023-05-01'
+ ).key1
+ AZURE_SPEECH_SERVICE_KEY: useKeyVault
+ ? speechKeyName
+ : listKeys(
+ resourceId(
+ subscription().subscriptionId,
+ resourceGroup().name,
+ 'Microsoft.CognitiveServices/accounts',
+ speechServiceName
+ ),
+ '2023-05-01'
+ ).key1
+ AZURE_COMPUTER_VISION_KEY: (useKeyVault || computerVisionName == '')
+ ? computerVisionKeyName
+ : listKeys(
+ resourceId(
+ subscription().subscriptionId,
+ resourceGroup().name,
+ 'Microsoft.CognitiveServices/accounts',
+ computerVisionName
+ ),
+ '2023-05-01'
+ ).key1
+ })
+ )
keyVaultName: keyVaultName
runtimeName: runtimeName
runtimeVersion: runtimeVersion
dockerFullImageName: dockerFullImageName
scmDoBuildDuringDeployment: useDocker ? false : true
healthCheckPath: healthCheckPath
+ managedIdentity: databaseType == 'PostgreSQL' || !empty(keyVaultName)
}
}
@@ -163,8 +189,6 @@ module openAIRoleWeb '../core/security/role.bicep' = if (authType == 'rbac') {
}
// Contributor
-// This role is used to grant the service principal contributor access to the resource group
-// See if this is needed in the future.
module openAIRoleWebContributor '../core/security/role.bicep' = if (authType == 'rbac') {
name: 'openai-role-web-contributor'
params: {
@@ -196,7 +220,7 @@ resource cosmosRoleDefinition 'Microsoft.DocumentDB/databaseAccounts/sqlRoleDefi
name: '${json(appSettings.AZURE_COSMOSDB_INFO).accountName}/00000000-0000-0000-0000-000000000002'
}
-module cosmosUserRole '../core/database/cosmos-sql-role-assign.bicep' = {
+module cosmosUserRole '../core/database/cosmos-sql-role-assign.bicep' = if (databaseType == 'CosmosDB') {
name: 'cosmos-sql-user-role-${web.name}'
params: {
accountName: json(appSettings.AZURE_COSMOSDB_INFO).accountName
diff --git a/infra/core/database/deploy_create_table_script.bicep b/infra/core/database/deploy_create_table_script.bicep
new file mode 100644
index 000000000..9ca5ed0a1
--- /dev/null
+++ b/infra/core/database/deploy_create_table_script.bicep
@@ -0,0 +1,31 @@
+@description('Specifies the location for resources.')
+param solutionLocation string
+
+param baseUrl string
+param keyVaultName string
+param identity string
+param postgresSqlServerName string
+param webAppPrincipalName string
+param adminAppPrincipalName string
+param managedIdentityName string
+param functionAppPrincipalName string
+
+resource create_index 'Microsoft.Resources/deploymentScripts@2020-10-01' = {
+ kind:'AzureCLI'
+ name: 'create_postgres_table'
+ location: solutionLocation // Replace with your desired location
+ identity: {
+ type: 'UserAssigned'
+ userAssignedIdentities: {
+ '${identity}' : {}
+ }
+ }
+ properties: {
+ azCliVersion: '2.52.0'
+ primaryScriptUri: '${baseUrl}scripts/run_create_table_script.sh'
+ arguments: '${baseUrl} ${keyVaultName} ${resourceGroup().name} ${postgresSqlServerName} ${webAppPrincipalName} ${adminAppPrincipalName} ${functionAppPrincipalName} ${managedIdentityName}' // Specify any arguments for the script
+ timeout: 'PT1H' // Specify the desired timeout duration
+ retentionInterval: 'PT1H' // Specify the desired retention interval
+ cleanupPreference:'OnSuccess'
+ }
+}
diff --git a/infra/core/database/postgresdb.bicep b/infra/core/database/postgresdb.bicep
new file mode 100644
index 000000000..9b28795fe
--- /dev/null
+++ b/infra/core/database/postgresdb.bicep
@@ -0,0 +1,141 @@
+param solutionName string
+param solutionLocation string
+param managedIdentityObjectId string
+param managedIdentityObjectName string
+@description('The name of the SQL logical server.')
+param serverName string = '${solutionName}-postgres'
+
+param administratorLogin string = 'admintest'
+@secure()
+param administratorLoginPassword string = 'Initial_0524'
+param serverEdition string = 'Burstable'
+param skuSizeGB int = 32
+param dbInstanceType string = 'Standard_B1ms'
+// param haMode string = 'ZoneRedundant'
+param availabilityZone string = '1'
+param allowAllIPsFirewall bool = false
+param allowAzureIPsFirewall bool = false
+@description('PostgreSQL version')
+@allowed([
+ '11'
+ '12'
+ '13'
+ '14'
+ '15'
+ '16'
+])
+param version string = '16'
+
+resource serverName_resource 'Microsoft.DBforPostgreSQL/flexibleServers@2023-12-01-preview' = {
+ name: serverName
+ location: solutionLocation
+ sku: {
+ name: dbInstanceType
+ tier: serverEdition
+ }
+ properties: {
+ version: version
+ administratorLogin: administratorLogin
+ administratorLoginPassword: administratorLoginPassword
+ authConfig: {
+ tenantId: subscription().tenantId
+ activeDirectoryAuth: 'Enabled'
+ passwordAuth: 'Enabled'
+ }
+ highAvailability: {
+ mode: 'Disabled'
+ }
+ storage: {
+ storageSizeGB: skuSizeGB
+ }
+ backup: {
+ backupRetentionDays: 7
+ geoRedundantBackup: 'Disabled'
+ }
+ network: {
+ publicNetworkAccess: 'Enabled'
+ }
+ availabilityZone: availabilityZone
+ }
+}
+
+resource delayScript 'Microsoft.Resources/deploymentScripts@2020-10-01' = {
+ name: 'waitForServerReady'
+ location: resourceGroup().location
+ kind: 'AzurePowerShell'
+ properties: {
+ azPowerShellVersion: '3.0'
+ scriptContent: 'start-sleep -Seconds 300'
+ cleanupPreference: 'Always'
+ retentionInterval: 'PT1H'
+ }
+ dependsOn: [
+ serverName_resource
+ ]
+}
+
+resource configurations 'Microsoft.DBforPostgreSQL/flexibleServers/configurations@2023-12-01-preview' = {
+ name: 'azure.extensions'
+ parent: serverName_resource
+ properties: {
+ value: 'vector'
+ source: 'user-override'
+ }
+ dependsOn: [
+ delayScript
+ ]
+}
+
+resource azureADAdministrator 'Microsoft.DBforPostgreSQL/flexibleServers/administrators@2022-12-01' = {
+ parent: serverName_resource
+ name: managedIdentityObjectId
+ properties: {
+ principalType: 'SERVICEPRINCIPAL'
+ principalName: managedIdentityObjectName
+ tenantId: subscription().tenantId
+ }
+ dependsOn: [
+ configurations
+ ]
+}
+
+// resource serverName_firewallrules 'Microsoft.DBforPostgreSQL/flexibleServers/firewallRules@2021-06-01' = [for rule in firewallrules: {
+// parent: serverName_resource
+// name: rule.Name
+// properties: {
+// startIpAddress: rule.StartIpAddress
+// endIpAddress: rule.EndIpAddress
+// }
+// }]
+
+resource firewall_all 'Microsoft.DBforPostgreSQL/flexibleServers/firewallRules@2023-12-01-preview' = if (allowAllIPsFirewall) {
+ parent: serverName_resource
+ name: 'allow-all-IPs'
+ properties: {
+ startIpAddress: '0.0.0.0'
+ endIpAddress: '255.255.255.255'
+ }
+ dependsOn: [
+ azureADAdministrator
+ ]
+}
+
+resource firewall_azure 'Microsoft.DBforPostgreSQL/flexibleServers/firewallRules@2023-12-01-preview' = if (allowAzureIPsFirewall) {
+ parent: serverName_resource
+ name: 'allow-all-azure-internal-IPs'
+ properties: {
+ startIpAddress: '0.0.0.0'
+ endIpAddress: '0.0.0.0'
+ }
+ dependsOn: [
+ azureADAdministrator
+ ]
+}
+
+output postgresDbOutput object = {
+ postgresSQLName: serverName_resource.name
+ postgreSQLServerName: '${serverName_resource.name}.postgres.database.azure.com'
+ postgreSQLDatabaseName: 'postgres'
+ postgreSQLDbUser: administratorLogin
+ sslMode: 'Require'
+}
diff --git a/infra/core/security/keyvault.bicep b/infra/core/security/keyvault.bicep
index 3920c3b3a..120b3c074 100644
--- a/infra/core/security/keyvault.bicep
+++ b/infra/core/security/keyvault.bicep
@@ -2,6 +2,7 @@ metadata description = 'Creates an Azure Key Vault.'
param name string
param location string = resourceGroup().location
param tags object = {}
+param managedIdentityObjectId string = ''
param principalId string = ''
@@ -12,18 +13,58 @@ resource keyVault 'Microsoft.KeyVault/vaults@2022-07-01' = {
properties: {
tenantId: subscription().tenantId
sku: { family: 'A', name: 'standard' }
- accessPolicies: !empty(principalId)
- ? [
- {
- objectId: principalId
- permissions: { secrets: [ 'get', 'list' ] }
- tenantId: subscription().tenantId
- }
- ]
- : []
+ accessPolicies: concat(
+ managedIdentityObjectId != '' ? [
+ {
+ objectId: managedIdentityObjectId
+ permissions: {
+ keys: [
+ 'get'
+ 'list'
+ ]
+ secrets: [
+ 'get'
+ 'list'
+ ]
+ }
+ tenantId: subscription().tenantId
+ }
+ ] : [],
+ principalId != '' ? [
+ {
+ objectId: principalId
+ permissions: {
+ keys: [
+ 'get'
+ 'list'
+ ]
+ secrets: [
+ 'get'
+ 'list'
+ ]
+ }
+ tenantId: subscription().tenantId
+ }
+ ] : []
+ )
}
}
+// @description('This is the built-in Key Vault Administrator role.')
+// resource kvAdminRole 'Microsoft.Authorization/roleDefinitions@2018-01-01-preview' existing = {
+// scope: resourceGroup()
+// name: '00482a5a-887f-4fb3-b363-3b7fe8e74483'
+// }
+
+// resource roleAssignment 'Microsoft.Authorization/roleAssignments@2022-04-01' = {
+// name: guid(resourceGroup().id, managedIdentityObjectId, kvAdminRole.id)
+// properties: {
+// principalId: managedIdentityObjectId
+// roleDefinitionId:kvAdminRole.id
+// principalType: 'ServicePrincipal'
+// }
+// }
+
output endpoint string = keyVault.properties.vaultUri
output name string = keyVault.name
-output id string = keyVault.id
\ No newline at end of file
+output id string = keyVault.id
diff --git a/infra/core/security/managed-identity.bicep b/infra/core/security/managed-identity.bicep
new file mode 100644
index 000000000..ba7176b80
--- /dev/null
+++ b/infra/core/security/managed-identity.bicep
@@ -0,0 +1,43 @@
+// ========== Managed Identity ========== //
+targetScope = 'resourceGroup'
+
+@minLength(3)
+@maxLength(15)
+@description('Solution Name')
+param solutionName string
+
+@description('Solution Location')
+param solutionLocation string
+
+@description('Name')
+param miName string = '${ solutionName }-managed-identity'
+
+resource managedIdentity 'Microsoft.ManagedIdentity/userAssignedIdentities@2023-01-31' = {
+ name: miName
+ location: solutionLocation
+ tags: {
+ app: solutionName
+ location: solutionLocation
+ }
+}
+
+@description('This is the built-in owner role. See https://docs.microsoft.com/azure/role-based-access-control/built-in-roles#owner')
+resource ownerRoleDefinition 'Microsoft.Authorization/roleDefinitions@2018-01-01-preview' existing = {
+ scope: resourceGroup()
+ name: '8e3af657-a8ff-443c-a75c-2fe8c4bcb635'
+}
+
+resource roleAssignment 'Microsoft.Authorization/roleAssignments@2022-04-01' = {
+ name: guid(resourceGroup().id, managedIdentity.id, ownerRoleDefinition.id)
+ properties: {
+ principalId: managedIdentity.properties.principalId
+ roleDefinitionId: ownerRoleDefinition.id
+ principalType: 'ServicePrincipal'
+ }
+}
+
+output managedIdentityOutput object = {
+ id: managedIdentity.id
+ objectId: managedIdentity.properties.principalId
+ name: miName
+}
diff --git a/infra/main.bicep b/infra/main.bicep
index d0e21d59c..99b5cef99 100644
--- a/infra/main.bicep
+++ b/infra/main.bicep
@@ -42,6 +42,20 @@ param hostingPlanSku string = 'B3'
])
param skuTier string = 'Basic'
+@description('The type of database to deploy (cosmos or postgres)')
+@allowed([
+ 'PostgreSQL'
+ 'CosmosDB'
+])
+param databaseType string = 'PostgreSQL'
+
+@description('Azure Cosmos DB Account Name')
+param azureCosmosDBAccountName string = 'cosmos-${resourceToken}'
+
+@description('Azure Postgres DB Account Name')
+param azurePostgresDBAccountName string = 'postgres-${resourceToken}'
+
+
@description('Name of Web App')
param websiteName string = 'web-${resourceToken}'
@@ -102,7 +116,7 @@ param azureSearchOffsetColumn string = 'offset'
@description('Url column')
param azureSearchUrlColumn string = 'url'
-@description('Use Azure Search Integrated Vectorization')
+@description('Whether to use Azure Search Integrated Vectorization. If the database type is PostgreSQL, set this to false.')
param azureSearchUseIntegratedVectorization bool = false
@description('Name of Azure OpenAI Resource')
@@ -123,7 +137,7 @@ param azureOpenAIModelVersion string = '0613'
@description('Azure OpenAI Model Capacity - See here for more info https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/quota')
param azureOpenAIModelCapacity int = 30
-@description('Enables the use of a vision LLM and Computer Vision for embedding images')
+@description('Whether to enable the use of a vision LLM and Computer Vision for embedding images. If the database type is PostgreSQL, set this to false.')
param useAdvancedImageProcessing bool = false
@description('The maximum number of images to pass to the vision model in a single request')
@@ -141,16 +155,16 @@ param azureOpenAIVisionModelVersion string = 'vision-preview'
@description('Azure OpenAI Vision Model Capacity - See here for more info https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/quota')
param azureOpenAIVisionModelCapacity int = 10
-@description('Orchestration strategy: openai_function or semantic_kernel or langchain str. If you use a old version of turbo (0301), please select langchain')
+@description('Orchestration strategy: openai_function or semantic_kernel or langchain str. If you use a old version of turbo (0301), please select langchain. If the database type is PostgreSQL, set this to sementic_kernel.')
@allowed([
'openai_function'
'semantic_kernel'
'langchain'
'prompt_flow'
])
-param orchestrationStrategy string = 'openai_function'
+param orchestrationStrategy string = 'semantic_kernel'
-@description('Chat conversation type: custom or byod.')
+@description('Chat conversation type: custom or byod. If the database type is PostgreSQL, set this to custom.')
@allowed([
'custom'
'byod'
@@ -277,7 +291,7 @@ param principalId string = ''
'rbac'
'keys'
])
-param authType string = 'keys'
+param authType string = 'rbac'
@description('Hosting model for the web apps. Containers are prebuilt and can be deployed faster, but code allows for more customization.')
@allowed([
@@ -301,15 +315,6 @@ param recognizedLanguages string = 'en-US,fr-FR,de-DE,it-IT'
@description('Azure Machine Learning Name')
param azureMachineLearningName string = 'aml-${resourceToken}'
-@description('Azure Cosmos DB Account Name')
-param azureCosmosDBAccountName string = 'cosmos-${resourceToken}'
-
-@description('Whether or not to enable chat history')
-@allowed([
- 'true'
- 'false'
-])
-param chatHistoryEnabled string = 'true'
var blobContainerName = 'documents'
var queueName = 'doc-processing'
@@ -318,6 +323,7 @@ var eventGridSystemTopicName = 'doc-processing'
var tags = { 'azd-env-name': environmentName }
var rgName = 'rg-${environmentName}'
var keyVaultName = 'kv-${resourceToken}'
+var baseUrl = 'https://raw.githubusercontent.com/Azure-Samples/chat-with-your-data-solution-accelerator/main/'
var azureOpenAIModelInfo = string({
model: azureOpenAIModel
modelName: azureOpenAIModelName
@@ -329,8 +335,8 @@ var azureOpenAIEmbeddingModelInfo = string({
modelVersion: azureOpenAIEmbeddingModelVersion
})
-var appversion = 'latest' // Update GIT deployment branch
-var registryName = 'fruoccopublic' // Update Registry name
+var appversion = 'latest' // Update GIT deployment branch
+var registryName = 'fruoccopublic' // Update Registry name
// Organize resources in a resource group
resource rg 'Microsoft.Resources/resourceGroups@2021-04-01' = {
@@ -339,7 +345,17 @@ resource rg 'Microsoft.Resources/resourceGroups@2021-04-01' = {
tags: tags
}
-module cosmosDBModule './core/database/cosmosdb.bicep' = {
+// ========== Managed Identity ========== //
+module managedIdentityModule './core/security/managed-identity.bicep' = if (databaseType == 'PostgreSQL') {
+ name: 'deploy_managed_identity'
+ params: {
+ solutionName: resourceToken
+ solutionLocation: location
+ }
+ scope: rg
+}
+
+module cosmosDBModule './core/database/cosmosdb.bicep' = if (databaseType == 'CosmosDB') {
name: 'deploy_cosmos_db'
params: {
name: azureCosmosDBAccountName
@@ -348,6 +364,18 @@ module cosmosDBModule './core/database/cosmosdb.bicep' = {
scope: rg
}
+module postgresDBModule './core/database/postgresdb.bicep' = if (databaseType == 'PostgreSQL') {
+ name: 'deploy_postgres_sql'
+ params: {
+ solutionName: azurePostgresDBAccountName
+ solutionLocation: 'eastus2'
+ managedIdentityObjectId: managedIdentityModule.outputs.managedIdentityOutput.objectId
+ managedIdentityObjectName: managedIdentityModule.outputs.managedIdentityOutput.name
+ allowAzureIPsFirewall: true
+ }
+ scope: rg
+}
+
// Store secrets in a keyvault
module keyvault './core/security/keyvault.bicep' = if (useKeyVault || authType == 'rbac') {
name: 'keyvault'
@@ -357,6 +385,9 @@ module keyvault './core/security/keyvault.bicep' = if (useKeyVault || authType =
location: location
tags: tags
principalId: principalId
+ managedIdentityObjectId: databaseType == 'PostgreSQL'
+ ? managedIdentityModule.outputs.managedIdentityOutput.objectId
+ : ''
}
}
@@ -505,7 +536,14 @@ module storekeys './app/storekeys.bicep' = if (useKeyVault) {
contentSafetyName: contentsafety.outputs.name
speechServiceName: speechServiceName
computerVisionName: useAdvancedImageProcessing ? computerVision.outputs.name : ''
- cosmosAccountName: cosmosDBModule.outputs.cosmosOutput.cosmosAccountName
+ cosmosAccountName: databaseType == 'CosmosDB' ? cosmosDBModule.outputs.cosmosOutput.cosmosAccountName : ''
+ postgresServerName: databaseType == 'PostgreSQL'
+ ? postgresDBModule.outputs.postgresDbOutput.postgreSQLServerName
+ : ''
+ postgresDatabaseName: databaseType == 'PostgreSQL' ? 'postgres' : ''
+ postgresDatabaseAdminUserName: databaseType == 'PostgreSQL'
+ ? postgresDBModule.outputs.postgresDbOutput.postgreSQLDbUser
+ : ''
rgName: rgName
}
}
@@ -547,9 +585,15 @@ module hostingplan './core/host/appserviceplan.bicep' = {
}
var azureCosmosDBInfo = string({
- accountName: cosmosDBModule.outputs.cosmosOutput.cosmosAccountName
- databaseName: cosmosDBModule.outputs.cosmosOutput.cosmosDatabaseName
- containerName: cosmosDBModule.outputs.cosmosOutput.cosmosContainerName
+ accountName: databaseType == 'CosmosDB' ? cosmosDBModule.outputs.cosmosOutput.cosmosAccountName : ''
+ databaseName: databaseType == 'CosmosDB' ? cosmosDBModule.outputs.cosmosOutput.cosmosDatabaseName : ''
+ containerName: databaseType == 'CosmosDB' ? cosmosDBModule.outputs.cosmosOutput.cosmosContainerName : ''
+})
+
+var azurePostgresDBInfo = string({
+ serverName: databaseType == 'PostgreSQL' ? postgresDBModule.outputs.postgresDbOutput.postgreSQLServerName : ''
+ databaseName: databaseType == 'PostgreSQL' ? postgresDBModule.outputs.postgresDbOutput.postgreSQLDatabaseName : ''
+ userName: ''
})
module web './app/web.bicep' = if (hostingModel == 'code') {
@@ -571,67 +615,90 @@ module web './app/web.bicep' = if (hostingModel == 'code') {
contentSafetyName: contentsafety.outputs.name
speechServiceName: speechService.outputs.name
computerVisionName: useAdvancedImageProcessing ? computerVision.outputs.name : ''
+
+ // New database-related parameters
+ databaseType: databaseType // Add this parameter to specify 'PostgreSQL' or 'CosmosDB'
+
+ // Conditional key vault key names
openAIKeyName: useKeyVault ? storekeys.outputs.OPENAI_KEY_NAME : ''
- storageAccountKeyName: useKeyVault ? storekeys.outputs.STORAGE_ACCOUNT_KEY_NAME : ''
- formRecognizerKeyName: useKeyVault ? storekeys.outputs.FORM_RECOGNIZER_KEY_NAME : ''
+ azureBlobStorageInfo: azureBlobStorageInfo
+ azureFormRecognizerInfo: azureFormRecognizerInfo
searchKeyName: useKeyVault ? storekeys.outputs.SEARCH_KEY_NAME : ''
contentSafetyKeyName: useKeyVault ? storekeys.outputs.CONTENT_SAFETY_KEY_NAME : ''
speechKeyName: useKeyVault ? storekeys.outputs.SPEECH_KEY_NAME : ''
computerVisionKeyName: useKeyVault ? storekeys.outputs.COMPUTER_VISION_KEY_NAME : ''
- cosmosDBKeyName: useKeyVault ? storekeys.outputs.COSMOS_ACCOUNT_KEY_NAME : ''
+
+ // Conditionally set database key names
+ cosmosDBKeyName: databaseType == 'CosmosDB' && useKeyVault ? storekeys.outputs.COSMOS_ACCOUNT_KEY_NAME : ''
+ postgresInfoName: databaseType == 'PostgreSQL' && useKeyVault ? storekeys.outputs.POSTGRESQL_INFO_NAME : ''
+
useKeyVault: useKeyVault
keyVaultName: useKeyVault || authType == 'rbac' ? keyvault.outputs.name : ''
authType: authType
- appSettings: {
- AZURE_BLOB_ACCOUNT_NAME: storageAccountName
- AZURE_BLOB_CONTAINER_NAME: blobContainerName
- AZURE_COMPUTER_VISION_ENDPOINT: useAdvancedImageProcessing ? computerVision.outputs.endpoint : ''
- AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_API_VERSION: computerVisionVectorizeImageApiVersion
- AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_MODEL_VERSION: computerVisionVectorizeImageModelVersion
- AZURE_CONTENT_SAFETY_ENDPOINT: contentsafety.outputs.endpoint
- AZURE_FORM_RECOGNIZER_ENDPOINT: formrecognizer.outputs.endpoint
- AZURE_OPENAI_RESOURCE: azureOpenAIResourceName
- AZURE_OPENAI_MODEL_INFO: azureOpenAIModelInfo
- AZURE_OPENAI_TEMPERATURE: azureOpenAITemperature
- AZURE_OPENAI_TOP_P: azureOpenAITopP
- AZURE_OPENAI_MAX_TOKENS: azureOpenAIMaxTokens
- AZURE_OPENAI_STOP_SEQUENCE: azureOpenAIStopSequence
- AZURE_OPENAI_SYSTEM_MESSAGE: azureOpenAISystemMessage
- AZURE_OPENAI_API_VERSION: azureOpenAIApiVersion
- AZURE_OPENAI_STREAM: azureOpenAIStream
- AZURE_OPENAI_EMBEDDING_MODEL_INFO: azureOpenAIEmbeddingModelInfo
- AZURE_SEARCH_USE_SEMANTIC_SEARCH: azureSearchUseSemanticSearch
- AZURE_SEARCH_SERVICE: 'https://${azureAISearchName}.search.windows.net'
- AZURE_SEARCH_INDEX: azureSearchIndex
- AZURE_SEARCH_CONVERSATIONS_LOG_INDEX: azureSearchConversationLogIndex
- AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG: azureSearchSemanticSearchConfig
- AZURE_SEARCH_INDEX_IS_PRECHUNKED: azureSearchIndexIsPrechunked
- AZURE_SEARCH_TOP_K: azureSearchTopK
- AZURE_SEARCH_ENABLE_IN_DOMAIN: azureSearchEnableInDomain
- AZURE_SEARCH_FILENAME_COLUMN: azureSearchFilenameColumn
- AZURE_SEARCH_FILTER: azureSearchFilter
- AZURE_SEARCH_FIELDS_ID: azureSearchFieldId
- AZURE_SEARCH_CONTENT_COLUMN: azureSearchContentColumn
- AZURE_SEARCH_CONTENT_VECTOR_COLUMN: azureSearchVectorColumn
- AZURE_SEARCH_TITLE_COLUMN: azureSearchTitleColumn
- AZURE_SEARCH_FIELDS_METADATA: azureSearchFieldsMetadata
- AZURE_SEARCH_SOURCE_COLUMN: azureSearchSourceColumn
- AZURE_SEARCH_CHUNK_COLUMN: azureSearchChunkColumn
- AZURE_SEARCH_OFFSET_COLUMN: azureSearchOffsetColumn
- AZURE_SEARCH_URL_COLUMN: azureSearchUrlColumn
- AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION: azureSearchUseIntegratedVectorization
- AZURE_SPEECH_SERVICE_NAME: speechServiceName
- AZURE_SPEECH_SERVICE_REGION: location
- AZURE_SPEECH_RECOGNIZER_LANGUAGES: recognizedLanguages
- USE_ADVANCED_IMAGE_PROCESSING: useAdvancedImageProcessing
- ADVANCED_IMAGE_PROCESSING_MAX_IMAGES: advancedImageProcessingMaxImages
- ORCHESTRATION_STRATEGY: orchestrationStrategy
- CONVERSATION_FLOW: conversationFlow
- LOGLEVEL: logLevel
- AZURE_COSMOSDB_INFO: azureCosmosDBInfo
- AZURE_COSMOSDB_ENABLE_FEEDBACK: true
- CHAT_HISTORY_ENABLED: chatHistoryEnabled
- }
+
+ appSettings: union(
+ {
+ // Existing app settings
+ AZURE_COMPUTER_VISION_ENDPOINT: useAdvancedImageProcessing ? computerVision.outputs.endpoint : ''
+ AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_API_VERSION: computerVisionVectorizeImageApiVersion
+ AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_MODEL_VERSION: computerVisionVectorizeImageModelVersion
+ AZURE_CONTENT_SAFETY_ENDPOINT: contentsafety.outputs.endpoint
+ AZURE_OPENAI_RESOURCE: azureOpenAIResourceName
+ AZURE_OPENAI_MODEL_INFO: azureOpenAIModelInfo
+ AZURE_OPENAI_TEMPERATURE: azureOpenAITemperature
+ AZURE_OPENAI_TOP_P: azureOpenAITopP
+ AZURE_OPENAI_MAX_TOKENS: azureOpenAIMaxTokens
+ AZURE_OPENAI_STOP_SEQUENCE: azureOpenAIStopSequence
+ AZURE_OPENAI_SYSTEM_MESSAGE: azureOpenAISystemMessage
+ AZURE_OPENAI_API_VERSION: azureOpenAIApiVersion
+ AZURE_OPENAI_STREAM: azureOpenAIStream
+ AZURE_OPENAI_EMBEDDING_MODEL_INFO: azureOpenAIEmbeddingModelInfo
+ AZURE_SEARCH_USE_SEMANTIC_SEARCH: azureSearchUseSemanticSearch
+ AZURE_SEARCH_SERVICE: 'https://${azureAISearchName}.search.windows.net'
+ AZURE_SEARCH_INDEX: azureSearchIndex
+ AZURE_SEARCH_CONVERSATIONS_LOG_INDEX: azureSearchConversationLogIndex
+ AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG: azureSearchSemanticSearchConfig
+ AZURE_SEARCH_INDEX_IS_PRECHUNKED: azureSearchIndexIsPrechunked
+ AZURE_SEARCH_TOP_K: azureSearchTopK
+ AZURE_SEARCH_ENABLE_IN_DOMAIN: azureSearchEnableInDomain
+ AZURE_SEARCH_FILENAME_COLUMN: azureSearchFilenameColumn
+ AZURE_SEARCH_FILTER: azureSearchFilter
+ AZURE_SEARCH_FIELDS_ID: azureSearchFieldId
+ AZURE_SEARCH_CONTENT_COLUMN: azureSearchContentColumn
+ AZURE_SEARCH_CONTENT_VECTOR_COLUMN: azureSearchVectorColumn
+ AZURE_SEARCH_TITLE_COLUMN: azureSearchTitleColumn
+ AZURE_SEARCH_FIELDS_METADATA: azureSearchFieldsMetadata
+ AZURE_SEARCH_SOURCE_COLUMN: azureSearchSourceColumn
+ AZURE_SEARCH_CHUNK_COLUMN: azureSearchChunkColumn
+ AZURE_SEARCH_OFFSET_COLUMN: azureSearchOffsetColumn
+ AZURE_SEARCH_URL_COLUMN: azureSearchUrlColumn
+ AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION: azureSearchUseIntegratedVectorization
+ AZURE_SPEECH_SERVICE_NAME: speechServiceName
+ AZURE_SPEECH_SERVICE_REGION: location
+ AZURE_SPEECH_RECOGNIZER_LANGUAGES: recognizedLanguages
+ USE_ADVANCED_IMAGE_PROCESSING: useAdvancedImageProcessing
+ ADVANCED_IMAGE_PROCESSING_MAX_IMAGES: advancedImageProcessingMaxImages
+ ORCHESTRATION_STRATEGY: orchestrationStrategy
+ CONVERSATION_FLOW: conversationFlow
+ LOGLEVEL: logLevel
+ DATABASE_TYPE: databaseType
+ },
+ // Conditionally add database-specific settings
+ databaseType == 'CosmosDB'
+ ? {
+ AZURE_COSMOSDB_INFO: azureCosmosDBInfo
+ AZURE_COSMOSDB_ENABLE_FEEDBACK: true
+ }
+ : databaseType == 'PostgreSQL'
+ ? {
+ AZURE_POSTGRESQL_INFO: string({
+ host: postgresDBModule.outputs.postgresDbOutput.postgreSQLServerName
+ dbname: postgresDBModule.outputs.postgresDbOutput.postgreSQLDatabaseName
+ user: websiteName
+ })
+ }
+ : {}
+ )
}
}
@@ -653,67 +720,90 @@ module web_docker './app/web.bicep' = if (hostingModel == 'container') {
contentSafetyName: contentsafety.outputs.name
speechServiceName: speechService.outputs.name
computerVisionName: useAdvancedImageProcessing ? computerVision.outputs.name : ''
+
+ // New database-related parameters
+ databaseType: databaseType
+
+ // Conditional key vault key names
openAIKeyName: useKeyVault ? storekeys.outputs.OPENAI_KEY_NAME : ''
- storageAccountKeyName: useKeyVault ? storekeys.outputs.STORAGE_ACCOUNT_KEY_NAME : ''
- formRecognizerKeyName: useKeyVault ? storekeys.outputs.FORM_RECOGNIZER_KEY_NAME : ''
+ azureBlobStorageInfo: azureBlobStorageInfo
+ azureFormRecognizerInfo: azureFormRecognizerInfo
searchKeyName: useKeyVault ? storekeys.outputs.SEARCH_KEY_NAME : ''
computerVisionKeyName: useKeyVault ? storekeys.outputs.COMPUTER_VISION_KEY_NAME : ''
contentSafetyKeyName: useKeyVault ? storekeys.outputs.CONTENT_SAFETY_KEY_NAME : ''
speechKeyName: useKeyVault ? storekeys.outputs.SPEECH_KEY_NAME : ''
- cosmosDBKeyName: useKeyVault ? storekeys.outputs.COSMOS_ACCOUNT_KEY_NAME : ''
+
+ // Conditionally set database key names
+ cosmosDBKeyName: databaseType == 'CosmosDB' && useKeyVault ? storekeys.outputs.COSMOS_ACCOUNT_KEY_NAME : ''
+ postgresInfoName: databaseType == 'PostgreSQL' && useKeyVault ? storekeys.outputs.POSTGRESQL_INFO_NAME : ''
+
useKeyVault: useKeyVault
keyVaultName: useKeyVault || authType == 'rbac' ? keyvault.outputs.name : ''
authType: authType
- appSettings: {
- AZURE_BLOB_ACCOUNT_NAME: storageAccountName
- AZURE_BLOB_CONTAINER_NAME: blobContainerName
- AZURE_COMPUTER_VISION_ENDPOINT: useAdvancedImageProcessing ? computerVision.outputs.endpoint : ''
- AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_API_VERSION: computerVisionVectorizeImageApiVersion
- AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_MODEL_VERSION: computerVisionVectorizeImageModelVersion
- AZURE_CONTENT_SAFETY_ENDPOINT: contentsafety.outputs.endpoint
- AZURE_FORM_RECOGNIZER_ENDPOINT: formrecognizer.outputs.endpoint
- AZURE_OPENAI_RESOURCE: azureOpenAIResourceName
- AZURE_OPENAI_MODEL_INFO: azureOpenAIModelInfo
- AZURE_OPENAI_TEMPERATURE: azureOpenAITemperature
- AZURE_OPENAI_TOP_P: azureOpenAITopP
- AZURE_OPENAI_MAX_TOKENS: azureOpenAIMaxTokens
- AZURE_OPENAI_STOP_SEQUENCE: azureOpenAIStopSequence
- AZURE_OPENAI_SYSTEM_MESSAGE: azureOpenAISystemMessage
- AZURE_OPENAI_API_VERSION: azureOpenAIApiVersion
- AZURE_OPENAI_STREAM: azureOpenAIStream
- AZURE_OPENAI_EMBEDDING_MODEL_INFO: azureOpenAIEmbeddingModelInfo
- AZURE_SEARCH_USE_SEMANTIC_SEARCH: azureSearchUseSemanticSearch
- AZURE_SEARCH_SERVICE: 'https://${azureAISearchName}.search.windows.net'
- AZURE_SEARCH_INDEX: azureSearchIndex
- AZURE_SEARCH_CONVERSATIONS_LOG_INDEX: azureSearchConversationLogIndex
- AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG: azureSearchSemanticSearchConfig
- AZURE_SEARCH_INDEX_IS_PRECHUNKED: azureSearchIndexIsPrechunked
- AZURE_SEARCH_TOP_K: azureSearchTopK
- AZURE_SEARCH_ENABLE_IN_DOMAIN: azureSearchEnableInDomain
- AZURE_SEARCH_FILENAME_COLUMN: azureSearchFilenameColumn
- AZURE_SEARCH_FILTER: azureSearchFilter
- AZURE_SEARCH_FIELDS_ID: azureSearchFieldId
- AZURE_SEARCH_CONTENT_COLUMN: azureSearchContentColumn
- AZURE_SEARCH_CONTENT_VECTOR_COLUMN: azureSearchVectorColumn
- AZURE_SEARCH_TITLE_COLUMN: azureSearchTitleColumn
- AZURE_SEARCH_FIELDS_METADATA: azureSearchFieldsMetadata
- AZURE_SEARCH_SOURCE_COLUMN: azureSearchSourceColumn
- AZURE_SEARCH_CHUNK_COLUMN: azureSearchChunkColumn
- AZURE_SEARCH_OFFSET_COLUMN: azureSearchOffsetColumn
- AZURE_SEARCH_URL_COLUMN: azureSearchUrlColumn
- AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION: azureSearchUseIntegratedVectorization
- AZURE_SPEECH_SERVICE_NAME: speechServiceName
- AZURE_SPEECH_SERVICE_REGION: location
- AZURE_SPEECH_RECOGNIZER_LANGUAGES: recognizedLanguages
- USE_ADVANCED_IMAGE_PROCESSING: useAdvancedImageProcessing
- ADVANCED_IMAGE_PROCESSING_MAX_IMAGES: advancedImageProcessingMaxImages
- ORCHESTRATION_STRATEGY: orchestrationStrategy
- CONVERSATION_FLOW: conversationFlow
- LOGLEVEL: logLevel
- AZURE_COSMOSDB_INFO: azureCosmosDBInfo
- AZURE_COSMOSDB_ENABLE_FEEDBACK: true
- CHAT_HISTORY_ENABLED: chatHistoryEnabled
- }
+
+ appSettings: union(
+ {
+ // Existing app settings
+ AZURE_COMPUTER_VISION_ENDPOINT: useAdvancedImageProcessing ? computerVision.outputs.endpoint : ''
+ AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_API_VERSION: computerVisionVectorizeImageApiVersion
+ AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_MODEL_VERSION: computerVisionVectorizeImageModelVersion
+ AZURE_CONTENT_SAFETY_ENDPOINT: contentsafety.outputs.endpoint
+ AZURE_OPENAI_RESOURCE: azureOpenAIResourceName
+ AZURE_OPENAI_MODEL_INFO: azureOpenAIModelInfo
+ AZURE_OPENAI_TEMPERATURE: azureOpenAITemperature
+ AZURE_OPENAI_TOP_P: azureOpenAITopP
+ AZURE_OPENAI_MAX_TOKENS: azureOpenAIMaxTokens
+ AZURE_OPENAI_STOP_SEQUENCE: azureOpenAIStopSequence
+ AZURE_OPENAI_SYSTEM_MESSAGE: azureOpenAISystemMessage
+ AZURE_OPENAI_API_VERSION: azureOpenAIApiVersion
+ AZURE_OPENAI_STREAM: azureOpenAIStream
+ AZURE_OPENAI_EMBEDDING_MODEL_INFO: azureOpenAIEmbeddingModelInfo
+ AZURE_SEARCH_USE_SEMANTIC_SEARCH: azureSearchUseSemanticSearch
+ AZURE_SEARCH_SERVICE: 'https://${azureAISearchName}.search.windows.net'
+ AZURE_SEARCH_INDEX: azureSearchIndex
+ AZURE_SEARCH_CONVERSATIONS_LOG_INDEX: azureSearchConversationLogIndex
+ AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG: azureSearchSemanticSearchConfig
+ AZURE_SEARCH_INDEX_IS_PRECHUNKED: azureSearchIndexIsPrechunked
+ AZURE_SEARCH_TOP_K: azureSearchTopK
+ AZURE_SEARCH_ENABLE_IN_DOMAIN: azureSearchEnableInDomain
+ AZURE_SEARCH_FILENAME_COLUMN: azureSearchFilenameColumn
+ AZURE_SEARCH_FILTER: azureSearchFilter
+ AZURE_SEARCH_FIELDS_ID: azureSearchFieldId
+ AZURE_SEARCH_CONTENT_COLUMN: azureSearchContentColumn
+ AZURE_SEARCH_CONTENT_VECTOR_COLUMN: azureSearchVectorColumn
+ AZURE_SEARCH_TITLE_COLUMN: azureSearchTitleColumn
+ AZURE_SEARCH_FIELDS_METADATA: azureSearchFieldsMetadata
+ AZURE_SEARCH_SOURCE_COLUMN: azureSearchSourceColumn
+ AZURE_SEARCH_CHUNK_COLUMN: azureSearchChunkColumn
+ AZURE_SEARCH_OFFSET_COLUMN: azureSearchOffsetColumn
+ AZURE_SEARCH_URL_COLUMN: azureSearchUrlColumn
+ AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION: azureSearchUseIntegratedVectorization
+ AZURE_SPEECH_SERVICE_NAME: speechServiceName
+ AZURE_SPEECH_SERVICE_REGION: location
+ AZURE_SPEECH_RECOGNIZER_LANGUAGES: recognizedLanguages
+ USE_ADVANCED_IMAGE_PROCESSING: useAdvancedImageProcessing
+ ADVANCED_IMAGE_PROCESSING_MAX_IMAGES: advancedImageProcessingMaxImages
+ ORCHESTRATION_STRATEGY: orchestrationStrategy
+ CONVERSATION_FLOW: conversationFlow
+ LOGLEVEL: logLevel
+ DATABASE_TYPE: databaseType
+ },
+ // Conditionally add database-specific settings
+ databaseType == 'CosmosDB'
+ ? {
+ AZURE_COSMOSDB_INFO: azureCosmosDBInfo
+ AZURE_COSMOSDB_ENABLE_FEEDBACK: true
+ }
+ : databaseType == 'PostgreSQL'
+ ? {
+ AZURE_POSTGRESQL_INFO: string({
+ host: postgresDBModule.outputs.postgresDbOutput.postgreSQLServerName
+ dbname: postgresDBModule.outputs.postgresDbOutput.postgreSQLDatabaseName
+ user: '${websiteName}-docker'
+ })
+ }
+ : {}
+ )
}
}
@@ -736,8 +826,8 @@ module adminweb './app/adminweb.bicep' = if (hostingModel == 'code') {
speechServiceName: speechService.outputs.name
computerVisionName: useAdvancedImageProcessing ? computerVision.outputs.name : ''
openAIKeyName: useKeyVault ? storekeys.outputs.OPENAI_KEY_NAME : ''
- storageAccountKeyName: useKeyVault ? storekeys.outputs.STORAGE_ACCOUNT_KEY_NAME : ''
- formRecognizerKeyName: useKeyVault ? storekeys.outputs.FORM_RECOGNIZER_KEY_NAME : ''
+ azureBlobStorageInfo: azureBlobStorageInfo
+ azureFormRecognizerInfo: azureFormRecognizerInfo
searchKeyName: useKeyVault ? storekeys.outputs.SEARCH_KEY_NAME : ''
computerVisionKeyName: useKeyVault ? storekeys.outputs.COMPUTER_VISION_KEY_NAME : ''
contentSafetyKeyName: useKeyVault ? storekeys.outputs.CONTENT_SAFETY_KEY_NAME : ''
@@ -745,53 +835,62 @@ module adminweb './app/adminweb.bicep' = if (hostingModel == 'code') {
useKeyVault: useKeyVault
keyVaultName: useKeyVault || authType == 'rbac' ? keyvault.outputs.name : ''
authType: authType
- appSettings: {
- AZURE_BLOB_ACCOUNT_NAME: storageAccountName
- AZURE_BLOB_CONTAINER_NAME: blobContainerName
- AZURE_COMPUTER_VISION_ENDPOINT: useAdvancedImageProcessing ? computerVision.outputs.endpoint : ''
- AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_API_VERSION: computerVisionVectorizeImageApiVersion
- AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_MODEL_VERSION: computerVisionVectorizeImageModelVersion
- AZURE_CONTENT_SAFETY_ENDPOINT: contentsafety.outputs.endpoint
- AZURE_FORM_RECOGNIZER_ENDPOINT: formrecognizer.outputs.endpoint
- AZURE_OPENAI_RESOURCE: azureOpenAIResourceName
- AZURE_OPENAI_MODEL_INFO: azureOpenAIModelInfo
- AZURE_OPENAI_TEMPERATURE: azureOpenAITemperature
- AZURE_OPENAI_TOP_P: azureOpenAITopP
- AZURE_OPENAI_MAX_TOKENS: azureOpenAIMaxTokens
- AZURE_OPENAI_STOP_SEQUENCE: azureOpenAIStopSequence
- AZURE_OPENAI_SYSTEM_MESSAGE: azureOpenAISystemMessage
- AZURE_OPENAI_API_VERSION: azureOpenAIApiVersion
- AZURE_OPENAI_STREAM: azureOpenAIStream
- AZURE_OPENAI_EMBEDDING_MODEL_INFO: azureOpenAIEmbeddingModelInfo
- AZURE_SEARCH_SERVICE: 'https://${azureAISearchName}.search.windows.net'
- AZURE_SEARCH_INDEX: azureSearchIndex
- AZURE_SEARCH_USE_SEMANTIC_SEARCH: azureSearchUseSemanticSearch
- AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG: azureSearchSemanticSearchConfig
- AZURE_SEARCH_INDEX_IS_PRECHUNKED: azureSearchIndexIsPrechunked
- AZURE_SEARCH_TOP_K: azureSearchTopK
- AZURE_SEARCH_ENABLE_IN_DOMAIN: azureSearchEnableInDomain
- AZURE_SEARCH_FILENAME_COLUMN: azureSearchFilenameColumn
- AZURE_SEARCH_FILTER: azureSearchFilter
- AZURE_SEARCH_FIELDS_ID: azureSearchFieldId
- AZURE_SEARCH_CONTENT_COLUMN: azureSearchContentColumn
- AZURE_SEARCH_CONTENT_VECTOR_COLUMN: azureSearchVectorColumn
- AZURE_SEARCH_TITLE_COLUMN: azureSearchTitleColumn
- AZURE_SEARCH_FIELDS_METADATA: azureSearchFieldsMetadata
- AZURE_SEARCH_SOURCE_COLUMN: azureSearchSourceColumn
- AZURE_SEARCH_CHUNK_COLUMN: azureSearchChunkColumn
- AZURE_SEARCH_OFFSET_COLUMN: azureSearchOffsetColumn
- AZURE_SEARCH_URL_COLUMN: azureSearchUrlColumn
- AZURE_SEARCH_DATASOURCE_NAME: azureSearchDatasource
- AZURE_SEARCH_INDEXER_NAME: azureSearchIndexer
- AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION: azureSearchUseIntegratedVectorization
- USE_ADVANCED_IMAGE_PROCESSING: useAdvancedImageProcessing
- BACKEND_URL: 'https://${functionName}.azurewebsites.net'
- DOCUMENT_PROCESSING_QUEUE_NAME: queueName
- FUNCTION_KEY: clientKey
- ORCHESTRATION_STRATEGY: orchestrationStrategy
- LOGLEVEL: logLevel
- CHAT_HISTORY_ENABLED: chatHistoryEnabled
- }
+ databaseType: databaseType
+ appSettings: union(
+ {
+ AZURE_COMPUTER_VISION_ENDPOINT: useAdvancedImageProcessing ? computerVision.outputs.endpoint : ''
+ AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_API_VERSION: computerVisionVectorizeImageApiVersion
+ AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_MODEL_VERSION: computerVisionVectorizeImageModelVersion
+ AZURE_CONTENT_SAFETY_ENDPOINT: contentsafety.outputs.endpoint
+ AZURE_OPENAI_RESOURCE: azureOpenAIResourceName
+ AZURE_OPENAI_MODEL_INFO: azureOpenAIModelInfo
+ AZURE_OPENAI_TEMPERATURE: azureOpenAITemperature
+ AZURE_OPENAI_TOP_P: azureOpenAITopP
+ AZURE_OPENAI_MAX_TOKENS: azureOpenAIMaxTokens
+ AZURE_OPENAI_STOP_SEQUENCE: azureOpenAIStopSequence
+ AZURE_OPENAI_SYSTEM_MESSAGE: azureOpenAISystemMessage
+ AZURE_OPENAI_API_VERSION: azureOpenAIApiVersion
+ AZURE_OPENAI_STREAM: azureOpenAIStream
+ AZURE_OPENAI_EMBEDDING_MODEL_INFO: azureOpenAIEmbeddingModelInfo
+ AZURE_SEARCH_SERVICE: 'https://${azureAISearchName}.search.windows.net'
+ AZURE_SEARCH_INDEX: azureSearchIndex
+ AZURE_SEARCH_USE_SEMANTIC_SEARCH: azureSearchUseSemanticSearch
+ AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG: azureSearchSemanticSearchConfig
+ AZURE_SEARCH_INDEX_IS_PRECHUNKED: azureSearchIndexIsPrechunked
+ AZURE_SEARCH_TOP_K: azureSearchTopK
+ AZURE_SEARCH_ENABLE_IN_DOMAIN: azureSearchEnableInDomain
+ AZURE_SEARCH_FILENAME_COLUMN: azureSearchFilenameColumn
+ AZURE_SEARCH_FILTER: azureSearchFilter
+ AZURE_SEARCH_FIELDS_ID: azureSearchFieldId
+ AZURE_SEARCH_CONTENT_COLUMN: azureSearchContentColumn
+ AZURE_SEARCH_CONTENT_VECTOR_COLUMN: azureSearchVectorColumn
+ AZURE_SEARCH_TITLE_COLUMN: azureSearchTitleColumn
+ AZURE_SEARCH_FIELDS_METADATA: azureSearchFieldsMetadata
+ AZURE_SEARCH_SOURCE_COLUMN: azureSearchSourceColumn
+ AZURE_SEARCH_CHUNK_COLUMN: azureSearchChunkColumn
+ AZURE_SEARCH_OFFSET_COLUMN: azureSearchOffsetColumn
+ AZURE_SEARCH_URL_COLUMN: azureSearchUrlColumn
+ AZURE_SEARCH_DATASOURCE_NAME: azureSearchDatasource
+ AZURE_SEARCH_INDEXER_NAME: azureSearchIndexer
+ AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION: azureSearchUseIntegratedVectorization
+ USE_ADVANCED_IMAGE_PROCESSING: useAdvancedImageProcessing
+ BACKEND_URL: 'https://${functionName}.azurewebsites.net'
+ DOCUMENT_PROCESSING_QUEUE_NAME: queueName
+ FUNCTION_KEY: clientKey
+ ORCHESTRATION_STRATEGY: orchestrationStrategy
+ LOGLEVEL: logLevel
+ DATABASE_TYPE: databaseType
+ },
+ databaseType == 'PostgreSQL'
+ ? {
+ AZURE_POSTGRESQL_INFO: string({
+ host: postgresDBModule.outputs.postgresDbOutput.postgreSQLServerName
+ dbname: postgresDBModule.outputs.postgresDbOutput.postgreSQLDatabaseName
+ user: adminWebsiteName
+ })
+ }
+ : {}
+ )
}
}
@@ -813,8 +912,8 @@ module adminweb_docker './app/adminweb.bicep' = if (hostingModel == 'container')
speechServiceName: speechService.outputs.name
computerVisionName: useAdvancedImageProcessing ? computerVision.outputs.name : ''
openAIKeyName: useKeyVault ? storekeys.outputs.OPENAI_KEY_NAME : ''
- storageAccountKeyName: useKeyVault ? storekeys.outputs.STORAGE_ACCOUNT_KEY_NAME : ''
- formRecognizerKeyName: useKeyVault ? storekeys.outputs.FORM_RECOGNIZER_KEY_NAME : ''
+ azureBlobStorageInfo: azureBlobStorageInfo
+ azureFormRecognizerInfo: azureFormRecognizerInfo
searchKeyName: useKeyVault ? storekeys.outputs.SEARCH_KEY_NAME : ''
contentSafetyKeyName: useKeyVault ? storekeys.outputs.CONTENT_SAFETY_KEY_NAME : ''
speechKeyName: useKeyVault ? storekeys.outputs.SPEECH_KEY_NAME : ''
@@ -822,53 +921,62 @@ module adminweb_docker './app/adminweb.bicep' = if (hostingModel == 'container')
useKeyVault: useKeyVault
keyVaultName: useKeyVault || authType == 'rbac' ? keyvault.outputs.name : ''
authType: authType
- appSettings: {
- AZURE_BLOB_ACCOUNT_NAME: storageAccountName
- AZURE_BLOB_CONTAINER_NAME: blobContainerName
- AZURE_COMPUTER_VISION_ENDPOINT: useAdvancedImageProcessing ? computerVision.outputs.endpoint : ''
- AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_API_VERSION: computerVisionVectorizeImageApiVersion
- AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_MODEL_VERSION: computerVisionVectorizeImageModelVersion
- AZURE_CONTENT_SAFETY_ENDPOINT: contentsafety.outputs.endpoint
- AZURE_FORM_RECOGNIZER_ENDPOINT: formrecognizer.outputs.endpoint
- AZURE_OPENAI_RESOURCE: azureOpenAIResourceName
- AZURE_OPENAI_MODEL_INFO: azureOpenAIModelInfo
- AZURE_OPENAI_TEMPERATURE: azureOpenAITemperature
- AZURE_OPENAI_TOP_P: azureOpenAITopP
- AZURE_OPENAI_MAX_TOKENS: azureOpenAIMaxTokens
- AZURE_OPENAI_STOP_SEQUENCE: azureOpenAIStopSequence
- AZURE_OPENAI_SYSTEM_MESSAGE: azureOpenAISystemMessage
- AZURE_OPENAI_API_VERSION: azureOpenAIApiVersion
- AZURE_OPENAI_STREAM: azureOpenAIStream
- AZURE_OPENAI_EMBEDDING_MODEL_INFO: azureOpenAIEmbeddingModelInfo
- AZURE_SEARCH_SERVICE: 'https://${azureAISearchName}.search.windows.net'
- AZURE_SEARCH_INDEX: azureSearchIndex
- AZURE_SEARCH_USE_SEMANTIC_SEARCH: azureSearchUseSemanticSearch
- AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG: azureSearchSemanticSearchConfig
- AZURE_SEARCH_INDEX_IS_PRECHUNKED: azureSearchIndexIsPrechunked
- AZURE_SEARCH_TOP_K: azureSearchTopK
- AZURE_SEARCH_ENABLE_IN_DOMAIN: azureSearchEnableInDomain
- AZURE_SEARCH_FILENAME_COLUMN: azureSearchFilenameColumn
- AZURE_SEARCH_FILTER: azureSearchFilter
- AZURE_SEARCH_FIELDS_ID: azureSearchFieldId
- AZURE_SEARCH_CONTENT_COLUMN: azureSearchContentColumn
- AZURE_SEARCH_CONTENT_VECTOR_COLUMN: azureSearchVectorColumn
- AZURE_SEARCH_TITLE_COLUMN: azureSearchTitleColumn
- AZURE_SEARCH_FIELDS_METADATA: azureSearchFieldsMetadata
- AZURE_SEARCH_SOURCE_COLUMN: azureSearchSourceColumn
- AZURE_SEARCH_CHUNK_COLUMN: azureSearchChunkColumn
- AZURE_SEARCH_OFFSET_COLUMN: azureSearchOffsetColumn
- AZURE_SEARCH_URL_COLUMN: azureSearchUrlColumn
- AZURE_SEARCH_DATASOURCE_NAME: azureSearchDatasource
- AZURE_SEARCH_INDEXER_NAME: azureSearchIndexer
- AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION: azureSearchUseIntegratedVectorization
- USE_ADVANCED_IMAGE_PROCESSING: useAdvancedImageProcessing
- BACKEND_URL: 'https://${functionName}-docker.azurewebsites.net'
- DOCUMENT_PROCESSING_QUEUE_NAME: queueName
- FUNCTION_KEY: clientKey
- ORCHESTRATION_STRATEGY: orchestrationStrategy
- LOGLEVEL: logLevel
- CHAT_HISTORY_ENABLED: chatHistoryEnabled
- }
+ databaseType: databaseType
+ appSettings: union(
+ {
+ AZURE_COMPUTER_VISION_ENDPOINT: useAdvancedImageProcessing ? computerVision.outputs.endpoint : ''
+ AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_API_VERSION: computerVisionVectorizeImageApiVersion
+ AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_MODEL_VERSION: computerVisionVectorizeImageModelVersion
+ AZURE_CONTENT_SAFETY_ENDPOINT: contentsafety.outputs.endpoint
+ AZURE_OPENAI_RESOURCE: azureOpenAIResourceName
+ AZURE_OPENAI_MODEL_INFO: azureOpenAIModelInfo
+ AZURE_OPENAI_TEMPERATURE: azureOpenAITemperature
+ AZURE_OPENAI_TOP_P: azureOpenAITopP
+ AZURE_OPENAI_MAX_TOKENS: azureOpenAIMaxTokens
+ AZURE_OPENAI_STOP_SEQUENCE: azureOpenAIStopSequence
+ AZURE_OPENAI_SYSTEM_MESSAGE: azureOpenAISystemMessage
+ AZURE_OPENAI_API_VERSION: azureOpenAIApiVersion
+ AZURE_OPENAI_STREAM: azureOpenAIStream
+ AZURE_OPENAI_EMBEDDING_MODEL_INFO: azureOpenAIEmbeddingModelInfo
+ AZURE_SEARCH_SERVICE: 'https://${azureAISearchName}.search.windows.net'
+ AZURE_SEARCH_INDEX: azureSearchIndex
+ AZURE_SEARCH_USE_SEMANTIC_SEARCH: azureSearchUseSemanticSearch
+ AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG: azureSearchSemanticSearchConfig
+ AZURE_SEARCH_INDEX_IS_PRECHUNKED: azureSearchIndexIsPrechunked
+ AZURE_SEARCH_TOP_K: azureSearchTopK
+ AZURE_SEARCH_ENABLE_IN_DOMAIN: azureSearchEnableInDomain
+ AZURE_SEARCH_FILENAME_COLUMN: azureSearchFilenameColumn
+ AZURE_SEARCH_FILTER: azureSearchFilter
+ AZURE_SEARCH_FIELDS_ID: azureSearchFieldId
+ AZURE_SEARCH_CONTENT_COLUMN: azureSearchContentColumn
+ AZURE_SEARCH_CONTENT_VECTOR_COLUMN: azureSearchVectorColumn
+ AZURE_SEARCH_TITLE_COLUMN: azureSearchTitleColumn
+ AZURE_SEARCH_FIELDS_METADATA: azureSearchFieldsMetadata
+ AZURE_SEARCH_SOURCE_COLUMN: azureSearchSourceColumn
+ AZURE_SEARCH_CHUNK_COLUMN: azureSearchChunkColumn
+ AZURE_SEARCH_OFFSET_COLUMN: azureSearchOffsetColumn
+ AZURE_SEARCH_URL_COLUMN: azureSearchUrlColumn
+ AZURE_SEARCH_DATASOURCE_NAME: azureSearchDatasource
+ AZURE_SEARCH_INDEXER_NAME: azureSearchIndexer
+ AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION: azureSearchUseIntegratedVectorization
+ USE_ADVANCED_IMAGE_PROCESSING: useAdvancedImageProcessing
+ BACKEND_URL: 'https://${functionName}-docker.azurewebsites.net'
+ DOCUMENT_PROCESSING_QUEUE_NAME: queueName
+ FUNCTION_KEY: clientKey
+ ORCHESTRATION_STRATEGY: orchestrationStrategy
+ LOGLEVEL: logLevel
+ DATABASE_TYPE: databaseType
+ },
+ databaseType == 'PostgreSQL'
+ ? {
+ AZURE_POSTGRESQL_INFO: string({
+ host: postgresDBModule.outputs.postgresDbOutput.postgreSQLServerName
+ dbname: postgresDBModule.outputs.postgresDbOutput.postgreSQLDatabaseName
+ user: '${adminWebsiteName}-docker'
+ })
+ }
+ : {}
+ )
}
}
@@ -926,8 +1034,8 @@ module function './app/function.bicep' = if (hostingModel == 'code') {
computerVisionName: useAdvancedImageProcessing ? computerVision.outputs.name : ''
clientKey: clientKey
openAIKeyName: useKeyVault ? storekeys.outputs.OPENAI_KEY_NAME : ''
- storageAccountKeyName: useKeyVault ? storekeys.outputs.STORAGE_ACCOUNT_KEY_NAME : ''
- formRecognizerKeyName: useKeyVault ? storekeys.outputs.FORM_RECOGNIZER_KEY_NAME : ''
+ azureBlobStorageInfo: azureBlobStorageInfo
+ azureFormRecognizerInfo: azureFormRecognizerInfo
searchKeyName: useKeyVault ? storekeys.outputs.SEARCH_KEY_NAME : ''
contentSafetyKeyName: useKeyVault ? storekeys.outputs.CONTENT_SAFETY_KEY_NAME : ''
speechKeyName: useKeyVault ? storekeys.outputs.SPEECH_KEY_NAME : ''
@@ -935,38 +1043,48 @@ module function './app/function.bicep' = if (hostingModel == 'code') {
useKeyVault: useKeyVault
keyVaultName: useKeyVault || authType == 'rbac' ? keyvault.outputs.name : ''
authType: authType
- appSettings: {
- AZURE_BLOB_ACCOUNT_NAME: storageAccountName
- AZURE_BLOB_CONTAINER_NAME: blobContainerName
- AZURE_COMPUTER_VISION_ENDPOINT: useAdvancedImageProcessing ? computerVision.outputs.endpoint : ''
- AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_API_VERSION: computerVisionVectorizeImageApiVersion
- AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_MODEL_VERSION: computerVisionVectorizeImageModelVersion
- AZURE_CONTENT_SAFETY_ENDPOINT: contentsafety.outputs.endpoint
- AZURE_FORM_RECOGNIZER_ENDPOINT: formrecognizer.outputs.endpoint
- AZURE_OPENAI_MODEL_INFO: azureOpenAIModelInfo
- AZURE_OPENAI_EMBEDDING_MODEL_INFO: azureOpenAIEmbeddingModelInfo
- AZURE_OPENAI_RESOURCE: azureOpenAIResourceName
- AZURE_OPENAI_API_VERSION: azureOpenAIApiVersion
- AZURE_SEARCH_INDEX: azureSearchIndex
- AZURE_SEARCH_SERVICE: 'https://${azureAISearchName}.search.windows.net'
- AZURE_SEARCH_DATASOURCE_NAME: azureSearchDatasource
- AZURE_SEARCH_INDEXER_NAME: azureSearchIndexer
- AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION: azureSearchUseIntegratedVectorization
- AZURE_SEARCH_FIELDS_ID: azureSearchFieldId
- AZURE_SEARCH_CONTENT_COLUMN: azureSearchContentColumn
- AZURE_SEARCH_CONTENT_VECTOR_COLUMN: azureSearchVectorColumn
- AZURE_SEARCH_TITLE_COLUMN: azureSearchTitleColumn
- AZURE_SEARCH_FIELDS_METADATA: azureSearchFieldsMetadata
- AZURE_SEARCH_SOURCE_COLUMN: azureSearchSourceColumn
- AZURE_SEARCH_CHUNK_COLUMN: azureSearchChunkColumn
- AZURE_SEARCH_OFFSET_COLUMN: azureSearchOffsetColumn
- USE_ADVANCED_IMAGE_PROCESSING: useAdvancedImageProcessing
- DOCUMENT_PROCESSING_QUEUE_NAME: queueName
- ORCHESTRATION_STRATEGY: orchestrationStrategy
- LOGLEVEL: logLevel
- AZURE_OPENAI_SYSTEM_MESSAGE: azureOpenAISystemMessage
- AZURE_SEARCH_TOP_K: azureSearchTopK
- }
+ databaseType: databaseType
+ appSettings: union(
+ {
+ AZURE_COMPUTER_VISION_ENDPOINT: useAdvancedImageProcessing ? computerVision.outputs.endpoint : ''
+ AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_API_VERSION: computerVisionVectorizeImageApiVersion
+ AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_MODEL_VERSION: computerVisionVectorizeImageModelVersion
+ AZURE_CONTENT_SAFETY_ENDPOINT: contentsafety.outputs.endpoint
+ AZURE_OPENAI_MODEL_INFO: azureOpenAIModelInfo
+ AZURE_OPENAI_EMBEDDING_MODEL_INFO: azureOpenAIEmbeddingModelInfo
+ AZURE_OPENAI_RESOURCE: azureOpenAIResourceName
+ AZURE_OPENAI_API_VERSION: azureOpenAIApiVersion
+ AZURE_SEARCH_INDEX: azureSearchIndex
+ AZURE_SEARCH_SERVICE: 'https://${azureAISearchName}.search.windows.net'
+ AZURE_SEARCH_DATASOURCE_NAME: azureSearchDatasource
+ AZURE_SEARCH_INDEXER_NAME: azureSearchIndexer
+ AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION: azureSearchUseIntegratedVectorization
+ AZURE_SEARCH_FIELDS_ID: azureSearchFieldId
+ AZURE_SEARCH_CONTENT_COLUMN: azureSearchContentColumn
+ AZURE_SEARCH_CONTENT_VECTOR_COLUMN: azureSearchVectorColumn
+ AZURE_SEARCH_TITLE_COLUMN: azureSearchTitleColumn
+ AZURE_SEARCH_FIELDS_METADATA: azureSearchFieldsMetadata
+ AZURE_SEARCH_SOURCE_COLUMN: azureSearchSourceColumn
+ AZURE_SEARCH_CHUNK_COLUMN: azureSearchChunkColumn
+ AZURE_SEARCH_OFFSET_COLUMN: azureSearchOffsetColumn
+ USE_ADVANCED_IMAGE_PROCESSING: useAdvancedImageProcessing
+ DOCUMENT_PROCESSING_QUEUE_NAME: queueName
+ ORCHESTRATION_STRATEGY: orchestrationStrategy
+ LOGLEVEL: logLevel
+ AZURE_OPENAI_SYSTEM_MESSAGE: azureOpenAISystemMessage
+ AZURE_SEARCH_TOP_K: azureSearchTopK
+ DATABASE_TYPE: databaseType
+ },
+ databaseType == 'PostgreSQL'
+ ? {
+ AZURE_POSTGRESQL_INFO: string({
+ host: postgresDBModule.outputs.postgresDbOutput.postgreSQLServerName
+ dbname: postgresDBModule.outputs.postgresDbOutput.postgreSQLDatabaseName
+ user: functionName
+ })
+ }
+ : {}
+ )
}
}
@@ -989,8 +1107,8 @@ module function_docker './app/function.bicep' = if (hostingModel == 'container')
computerVisionName: useAdvancedImageProcessing ? computerVision.outputs.name : ''
clientKey: clientKey
openAIKeyName: useKeyVault ? storekeys.outputs.OPENAI_KEY_NAME : ''
- storageAccountKeyName: useKeyVault ? storekeys.outputs.STORAGE_ACCOUNT_KEY_NAME : ''
- formRecognizerKeyName: useKeyVault ? storekeys.outputs.FORM_RECOGNIZER_KEY_NAME : ''
+ azureBlobStorageInfo: azureBlobStorageInfo
+ azureFormRecognizerInfo: azureFormRecognizerInfo
searchKeyName: useKeyVault ? storekeys.outputs.SEARCH_KEY_NAME : ''
contentSafetyKeyName: useKeyVault ? storekeys.outputs.CONTENT_SAFETY_KEY_NAME : ''
speechKeyName: useKeyVault ? storekeys.outputs.SPEECH_KEY_NAME : ''
@@ -998,38 +1116,48 @@ module function_docker './app/function.bicep' = if (hostingModel == 'container')
useKeyVault: useKeyVault
keyVaultName: useKeyVault || authType == 'rbac' ? keyvault.outputs.name : ''
authType: authType
- appSettings: {
- AZURE_BLOB_ACCOUNT_NAME: storageAccountName
- AZURE_BLOB_CONTAINER_NAME: blobContainerName
- AZURE_COMPUTER_VISION_ENDPOINT: useAdvancedImageProcessing ? computerVision.outputs.endpoint : ''
- AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_API_VERSION: computerVisionVectorizeImageApiVersion
- AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_MODEL_VERSION: computerVisionVectorizeImageModelVersion
- AZURE_CONTENT_SAFETY_ENDPOINT: contentsafety.outputs.endpoint
- AZURE_FORM_RECOGNIZER_ENDPOINT: formrecognizer.outputs.endpoint
- AZURE_OPENAI_MODEL_INFO: azureOpenAIModelInfo
- AZURE_OPENAI_EMBEDDING_MODEL_INFO: azureOpenAIEmbeddingModelInfo
- AZURE_OPENAI_RESOURCE: azureOpenAIResourceName
- AZURE_OPENAI_API_VERSION: azureOpenAIApiVersion
- AZURE_SEARCH_INDEX: azureSearchIndex
- AZURE_SEARCH_SERVICE: 'https://${azureAISearchName}.search.windows.net'
- AZURE_SEARCH_DATASOURCE_NAME: azureSearchDatasource
- AZURE_SEARCH_INDEXER_NAME: azureSearchIndexer
- AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION: azureSearchUseIntegratedVectorization
- AZURE_SEARCH_FIELDS_ID: azureSearchFieldId
- AZURE_SEARCH_CONTENT_COLUMN: azureSearchContentColumn
- AZURE_SEARCH_CONTENT_VECTOR_COLUMN: azureSearchVectorColumn
- AZURE_SEARCH_TITLE_COLUMN: azureSearchTitleColumn
- AZURE_SEARCH_FIELDS_METADATA: azureSearchFieldsMetadata
- AZURE_SEARCH_SOURCE_COLUMN: azureSearchSourceColumn
- AZURE_SEARCH_CHUNK_COLUMN: azureSearchChunkColumn
- AZURE_SEARCH_OFFSET_COLUMN: azureSearchOffsetColumn
- USE_ADVANCED_IMAGE_PROCESSING: useAdvancedImageProcessing
- DOCUMENT_PROCESSING_QUEUE_NAME: queueName
- ORCHESTRATION_STRATEGY: orchestrationStrategy
- LOGLEVEL: logLevel
- AZURE_OPENAI_SYSTEM_MESSAGE: azureOpenAISystemMessage
- AZURE_SEARCH_TOP_K: azureSearchTopK
- }
+ databaseType: databaseType
+ appSettings: union(
+ {
+ AZURE_COMPUTER_VISION_ENDPOINT: useAdvancedImageProcessing ? computerVision.outputs.endpoint : ''
+ AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_API_VERSION: computerVisionVectorizeImageApiVersion
+ AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_MODEL_VERSION: computerVisionVectorizeImageModelVersion
+ AZURE_CONTENT_SAFETY_ENDPOINT: contentsafety.outputs.endpoint
+ AZURE_OPENAI_MODEL_INFO: azureOpenAIModelInfo
+ AZURE_OPENAI_EMBEDDING_MODEL_INFO: azureOpenAIEmbeddingModelInfo
+ AZURE_OPENAI_RESOURCE: azureOpenAIResourceName
+ AZURE_OPENAI_API_VERSION: azureOpenAIApiVersion
+ AZURE_SEARCH_INDEX: azureSearchIndex
+ AZURE_SEARCH_SERVICE: 'https://${azureAISearchName}.search.windows.net'
+ AZURE_SEARCH_DATASOURCE_NAME: azureSearchDatasource
+ AZURE_SEARCH_INDEXER_NAME: azureSearchIndexer
+ AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION: azureSearchUseIntegratedVectorization
+ AZURE_SEARCH_FIELDS_ID: azureSearchFieldId
+ AZURE_SEARCH_CONTENT_COLUMN: azureSearchContentColumn
+ AZURE_SEARCH_CONTENT_VECTOR_COLUMN: azureSearchVectorColumn
+ AZURE_SEARCH_TITLE_COLUMN: azureSearchTitleColumn
+ AZURE_SEARCH_FIELDS_METADATA: azureSearchFieldsMetadata
+ AZURE_SEARCH_SOURCE_COLUMN: azureSearchSourceColumn
+ AZURE_SEARCH_CHUNK_COLUMN: azureSearchChunkColumn
+ AZURE_SEARCH_OFFSET_COLUMN: azureSearchOffsetColumn
+ USE_ADVANCED_IMAGE_PROCESSING: useAdvancedImageProcessing
+ DOCUMENT_PROCESSING_QUEUE_NAME: queueName
+ ORCHESTRATION_STRATEGY: orchestrationStrategy
+ LOGLEVEL: logLevel
+ AZURE_OPENAI_SYSTEM_MESSAGE: azureOpenAISystemMessage
+ AZURE_SEARCH_TOP_K: azureSearchTopK
+ DATABASE_TYPE: databaseType
+ },
+ databaseType == 'PostgreSQL'
+ ? {
+ AZURE_POSTGRESQL_INFO: string({
+ host: postgresDBModule.outputs.postgresDbOutput.postgreSQLServerName
+ dbname: postgresDBModule.outputs.postgresDbOutput.postgreSQLDatabaseName
+ user: '${functionName}-docker'
+ })
+ }
+ : {}
+ )
}
}
@@ -1044,6 +1172,11 @@ module formrecognizer 'core/ai/cognitiveservices.bicep' = {
}
}
+var azureFormRecognizerInfo = string({
+ endpoint: formrecognizer.outputs.endpoint
+ key: useKeyVault ? storekeys.outputs.FORM_RECOGNIZER_KEY_NAME : '$FORM_RECOGNIZER_KEY'
+})
+
module contentsafety 'core/ai/cognitiveservices.bicep' = {
name: contentSafetyName
scope: rg
@@ -1116,6 +1249,12 @@ module storageRoleUser 'core/security/role.bicep' = if (authType == 'rbac' && pr
}
}
+var azureBlobStorageInfo = string({
+ containerName: blobContainerName
+ accountName: storageAccountName
+ accountKey: useKeyVault ? storekeys.outputs.STORAGE_ACCOUNT_KEY_NAME : '$STORAGE_ACCOUNT_KEY'
+})
+
// Cognitive Services User
module openaiRoleUser 'core/security/role.bicep' = if (authType == 'rbac' && principalId != '') {
scope: rg
@@ -1165,11 +1304,34 @@ module machineLearning 'app/machinelearning.bicep' = if (orchestrationStrategy =
}
}
+module createIndex './core/database/deploy_create_table_script.bicep' = if (databaseType == 'PostgreSQL') {
+ name: 'deploy_create_table_script'
+ params: {
+ solutionLocation: location
+ identity: managedIdentityModule.outputs.managedIdentityOutput.id
+ baseUrl: baseUrl
+ keyVaultName: keyvault.outputs.name
+ postgresSqlServerName: postgresDBModule.outputs.postgresDbOutput.postgreSQLServerName
+ webAppPrincipalName: hostingModel == 'code' ? web.outputs.FRONTEND_API_NAME : web_docker.outputs.FRONTEND_API_NAME
+ adminAppPrincipalName: hostingModel == 'code'
+ ? adminweb.outputs.WEBSITE_ADMIN_NAME
+ : adminweb_docker.outputs.WEBSITE_ADMIN_NAME
+ functionAppPrincipalName: hostingModel == 'code'
+ ? function.outputs.functionName
+ : function_docker.outputs.functionName
+ managedIdentityName: managedIdentityModule.outputs.managedIdentityOutput.name
+ }
+ scope: rg
+ dependsOn: hostingModel == 'code'
+ ? [keyvault, postgresDBModule, storekeys, web, adminweb]
+ : [
+ [keyvault, postgresDBModule, storekeys, web_docker, adminweb_docker]
+ ]
+}
+
output APPLICATIONINSIGHTS_CONNECTION_STRING string = monitoring.outputs.applicationInsightsConnectionString
output AZURE_APP_SERVICE_HOSTING_MODEL string = hostingModel
-output AZURE_BLOB_CONTAINER_NAME string = blobContainerName
-output AZURE_BLOB_ACCOUNT_NAME string = storageAccountName
-output AZURE_BLOB_ACCOUNT_KEY string = useKeyVault ? storekeys.outputs.STORAGE_ACCOUNT_KEY_NAME : ''
+output AZURE_BLOB_STORAGE_INFO string = replace(azureBlobStorageInfo, '$STORAGE_ACCOUNT_KEY', '')
output AZURE_COMPUTER_VISION_ENDPOINT string = useAdvancedImageProcessing ? computerVision.outputs.endpoint : ''
output AZURE_COMPUTER_VISION_LOCATION string = useAdvancedImageProcessing ? computerVision.outputs.location : ''
output AZURE_COMPUTER_VISION_KEY string = useKeyVault ? storekeys.outputs.COMPUTER_VISION_KEY_NAME : ''
@@ -1177,8 +1339,7 @@ output AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_API_VERSION string = computerVision
output AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_MODEL_VERSION string = computerVisionVectorizeImageModelVersion
output AZURE_CONTENT_SAFETY_ENDPOINT string = contentsafety.outputs.endpoint
output AZURE_CONTENT_SAFETY_KEY string = useKeyVault ? storekeys.outputs.CONTENT_SAFETY_KEY_NAME : ''
-output AZURE_FORM_RECOGNIZER_ENDPOINT string = formrecognizer.outputs.endpoint
-output AZURE_FORM_RECOGNIZER_KEY string = useKeyVault ? storekeys.outputs.FORM_RECOGNIZER_KEY_NAME : ''
+output AZURE_FORM_RECOGNIZER_INFO string = replace(azureFormRecognizerInfo, '$FORM_RECOGNIZER_KEY', '')
output AZURE_KEY_VAULT_ENDPOINT string = useKeyVault ? keyvault.outputs.endpoint : ''
output AZURE_KEY_VAULT_NAME string = useKeyVault || authType == 'rbac' ? keyvault.outputs.name : ''
output AZURE_LOCATION string = location
@@ -1234,3 +1395,4 @@ output AZURE_ML_WORKSPACE_NAME string = orchestrationStrategy == 'prompt_flow'
: ''
output RESOURCE_TOKEN string = resourceToken
output AZURE_COSMOSDB_INFO string = azureCosmosDBInfo
+output AZURE_POSTGRESQL_INFO string = azurePostgresDBInfo
diff --git a/infra/main.bicepparam b/infra/main.bicepparam
index 860a4520e..e8c0615e7 100644
--- a/infra/main.bicepparam
+++ b/infra/main.bicepparam
@@ -19,7 +19,6 @@ param orchestrationStrategy = readEnvironmentVariable('ORCHESTRATION_STRATEGY',
param logLevel = readEnvironmentVariable('LOGLEVEL', 'INFO')
param recognizedLanguages = readEnvironmentVariable('AZURE_SPEECH_RECOGNIZER_LANGUAGES', 'en-US,fr-FR,de-DE,it-IT')
param conversationFlow = readEnvironmentVariable('CONVERSATION_FLOW', 'custom')
-param chatHistoryEnabled = readEnvironmentVariable('CHAT_HISTORY_ENABLED', 'true')
//Azure Search
param azureSearchFieldId = readEnvironmentVariable('AZURE_SEARCH_FIELDS_ID', 'id')
@@ -83,4 +82,6 @@ param azureAISearchName = searchServiceName == '' ? 'search-${resourceToken}' :
param azureSearchIndex = readEnvironmentVariable('AZURE_SEARCH_INDEX', 'index-${resourceToken}')
param azureOpenAIResourceName = readEnvironmentVariable('AZURE_OPENAI_RESOURCE', 'openai-${resourceToken}')
-param storageAccountName = readEnvironmentVariable('AZURE_BLOB_ACCOUNT_NAME', 'str${resourceToken}')
+var azureBlobStorageInfo = readEnvironmentVariable('AZURE_BLOB_STORAGE_INFO', '{"containerName": "documents", "accountName": "${resourceToken}", "accountKey": ""}')
+var azureBlobStorageInfoParsed = json(replace(azureBlobStorageInfo, '\\', '')) // Remove escape characters
+param storageAccountName = azureBlobStorageInfoParsed.accountName
diff --git a/infra/main.json b/infra/main.json
index 2ad41a12e..c0181ce9c 100644
--- a/infra/main.json
+++ b/infra/main.json
@@ -4,8 +4,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "9243131736696562084"
+ "version": "0.32.4.45862",
+ "templateHash": "14222791824136321938"
}
},
"parameters": {
@@ -71,6 +71,31 @@
"description": "The sku tier for the App Service plan"
}
},
+ "databaseType": {
+ "type": "string",
+ "defaultValue": "PostgreSQL",
+ "allowedValues": [
+ "PostgreSQL",
+ "CosmosDB"
+ ],
+ "metadata": {
+ "description": "The type of database to deploy (cosmos or postgres)"
+ }
+ },
+ "azureCosmosDBAccountName": {
+ "type": "string",
+ "defaultValue": "[format('cosmos-{0}', parameters('resourceToken'))]",
+ "metadata": {
+ "description": "Azure Cosmos DB Account Name"
+ }
+ },
+ "azurePostgresDBAccountName": {
+ "type": "string",
+ "defaultValue": "[format('postgres-{0}', parameters('resourceToken'))]",
+ "metadata": {
+ "description": "Azure Postgres DB Account Name"
+ }
+ },
"websiteName": {
"type": "string",
"defaultValue": "[format('web-{0}', parameters('resourceToken'))]",
@@ -215,7 +240,7 @@
"type": "bool",
"defaultValue": false,
"metadata": {
- "description": "Use Azure Search Integrated Vectorization"
+ "description": "Whether to use Azure Search Integrated Vectorization. If the database type is PostgreSQL, set this to false."
}
},
"azureOpenAIResourceName": {
@@ -264,7 +289,7 @@
"type": "bool",
"defaultValue": false,
"metadata": {
- "description": "Enables the use of a vision LLM and Computer Vision for embedding images"
+ "description": "Whether to enable the use of a vision LLM and Computer Vision for embedding images. If the database type is PostgreSQL, set this to false."
}
},
"advancedImageProcessingMaxImages": {
@@ -304,7 +329,7 @@
},
"orchestrationStrategy": {
"type": "string",
- "defaultValue": "openai_function",
+ "defaultValue": "semantic_kernel",
"allowedValues": [
"openai_function",
"semantic_kernel",
@@ -312,7 +337,7 @@
"prompt_flow"
],
"metadata": {
- "description": "Orchestration strategy: openai_function or semantic_kernel or langchain str. If you use a old version of turbo (0301), please select langchain"
+ "description": "Orchestration strategy: openai_function or semantic_kernel or langchain str. If you use a old version of turbo (0301), please select langchain. If the database type is PostgreSQL, set this to sementic_kernel."
}
},
"conversationFlow": {
@@ -323,7 +348,7 @@
"byod"
],
"metadata": {
- "description": "Chat conversation type: custom or byod."
+ "description": "Chat conversation type: custom or byod. If the database type is PostgreSQL, set this to custom."
}
},
"azureOpenAITemperature": {
@@ -567,7 +592,7 @@
},
"authType": {
"type": "string",
- "defaultValue": "keys",
+ "defaultValue": "rbac",
"allowedValues": [
"rbac",
"keys"
@@ -611,24 +636,6 @@
"metadata": {
"description": "Azure Machine Learning Name"
}
- },
- "azureCosmosDBAccountName": {
- "type": "string",
- "defaultValue": "[format('cosmos-{0}', parameters('resourceToken'))]",
- "metadata": {
- "description": "Azure Cosmos DB Account Name"
- }
- },
- "chatHistoryEnabled": {
- "type": "string",
- "defaultValue": "true",
- "allowedValues": [
- "true",
- "false"
- ],
- "metadata": {
- "description": "Whether or not to enable chat history"
- }
}
},
"variables": {
@@ -641,6 +648,7 @@
},
"rgName": "[format('rg-{0}', parameters('environmentName'))]",
"keyVaultName": "[format('kv-{0}', parameters('resourceToken'))]",
+ "baseUrl": "https://raw.githubusercontent.com/Azure-Samples/chat-with-your-data-solution-accelerator/main/",
"azureOpenAIModelInfo": "[string(createObject('model', parameters('azureOpenAIModel'), 'modelName', parameters('azureOpenAIModelName'), 'modelVersion', parameters('azureOpenAIModelVersion')))]",
"azureOpenAIEmbeddingModelInfo": "[string(createObject('model', parameters('azureOpenAIEmbeddingModel'), 'modelName', parameters('azureOpenAIEmbeddingModelName'), 'modelVersion', parameters('azureOpenAIEmbeddingModelVersion')))]",
"appversion": "latest",
@@ -682,6 +690,100 @@
"tags": "[variables('tags')]"
},
{
+ "condition": "[equals(parameters('databaseType'), 'PostgreSQL')]",
+ "type": "Microsoft.Resources/deployments",
+ "apiVersion": "2022-09-01",
+ "name": "deploy_managed_identity",
+ "resourceGroup": "[variables('rgName')]",
+ "properties": {
+ "expressionEvaluationOptions": {
+ "scope": "inner"
+ },
+ "mode": "Incremental",
+ "parameters": {
+ "solutionName": {
+ "value": "[parameters('resourceToken')]"
+ },
+ "solutionLocation": {
+ "value": "[parameters('location')]"
+ }
+ },
+ "template": {
+ "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#",
+ "contentVersion": "1.0.0.0",
+ "metadata": {
+ "_generator": {
+ "name": "bicep",
+ "version": "0.32.4.45862",
+ "templateHash": "9540019694218374629"
+ }
+ },
+ "parameters": {
+ "solutionName": {
+ "type": "string",
+ "minLength": 3,
+ "maxLength": 15,
+ "metadata": {
+ "description": "Solution Name"
+ }
+ },
+ "solutionLocation": {
+ "type": "string",
+ "metadata": {
+ "description": "Solution Location"
+ }
+ },
+ "miName": {
+ "type": "string",
+ "defaultValue": "[format('{0}-managed-identity', parameters('solutionName'))]",
+ "metadata": {
+ "description": "Name"
+ }
+ }
+ },
+ "resources": [
+ {
+ "type": "Microsoft.ManagedIdentity/userAssignedIdentities",
+ "apiVersion": "2023-01-31",
+ "name": "[parameters('miName')]",
+ "location": "[parameters('solutionLocation')]",
+ "tags": {
+ "app": "[parameters('solutionName')]",
+ "location": "[parameters('solutionLocation')]"
+ }
+ },
+ {
+ "type": "Microsoft.Authorization/roleAssignments",
+ "apiVersion": "2022-04-01",
+ "name": "[guid(resourceGroup().id, resourceId('Microsoft.ManagedIdentity/userAssignedIdentities', parameters('miName')), resourceId('Microsoft.Authorization/roleDefinitions', '8e3af657-a8ff-443c-a75c-2fe8c4bcb635'))]",
+ "properties": {
+ "principalId": "[reference(resourceId('Microsoft.ManagedIdentity/userAssignedIdentities', parameters('miName')), '2023-01-31').principalId]",
+ "roleDefinitionId": "[resourceId('Microsoft.Authorization/roleDefinitions', '8e3af657-a8ff-443c-a75c-2fe8c4bcb635')]",
+ "principalType": "ServicePrincipal"
+ },
+ "dependsOn": [
+ "[resourceId('Microsoft.ManagedIdentity/userAssignedIdentities', parameters('miName'))]"
+ ]
+ }
+ ],
+ "outputs": {
+ "managedIdentityOutput": {
+ "type": "object",
+ "value": {
+ "id": "[resourceId('Microsoft.ManagedIdentity/userAssignedIdentities', parameters('miName'))]",
+ "objectId": "[reference(resourceId('Microsoft.ManagedIdentity/userAssignedIdentities', parameters('miName')), '2023-01-31').principalId]",
+ "name": "[parameters('miName')]"
+ }
+ }
+ }
+ }
+ },
+ "dependsOn": [
+ "[subscriptionResourceId('Microsoft.Resources/resourceGroups', variables('rgName'))]"
+ ]
+ },
+ {
+ "condition": "[equals(parameters('databaseType'), 'CosmosDB')]",
"type": "Microsoft.Resources/deployments",
"apiVersion": "2022-09-01",
"name": "deploy_cosmos_db",
@@ -705,8 +807,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "16376502235448567731"
+ "version": "0.32.4.45862",
+ "templateHash": "11302375145443237554"
}
},
"parameters": {
@@ -843,6 +945,238 @@
"[subscriptionResourceId('Microsoft.Resources/resourceGroups', variables('rgName'))]"
]
},
+ {
+ "condition": "[equals(parameters('databaseType'), 'PostgreSQL')]",
+ "type": "Microsoft.Resources/deployments",
+ "apiVersion": "2022-09-01",
+ "name": "deploy_postgres_sql",
+ "resourceGroup": "[variables('rgName')]",
+ "properties": {
+ "expressionEvaluationOptions": {
+ "scope": "inner"
+ },
+ "mode": "Incremental",
+ "parameters": {
+ "solutionName": {
+ "value": "[parameters('azurePostgresDBAccountName')]"
+ },
+ "solutionLocation": {
+ "value": "eastus2"
+ },
+ "managedIdentityObjectId": {
+ "value": "[reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_managed_identity'), '2022-09-01').outputs.managedIdentityOutput.value.objectId]"
+ },
+ "managedIdentityObjectName": {
+ "value": "[reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_managed_identity'), '2022-09-01').outputs.managedIdentityOutput.value.name]"
+ },
+ "allowAzureIPsFirewall": {
+ "value": true
+ }
+ },
+ "template": {
+ "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#",
+ "contentVersion": "1.0.0.0",
+ "metadata": {
+ "_generator": {
+ "name": "bicep",
+ "version": "0.32.4.45862",
+ "templateHash": "1604911639919456619"
+ }
+ },
+ "parameters": {
+ "solutionName": {
+ "type": "string"
+ },
+ "solutionLocation": {
+ "type": "string"
+ },
+ "managedIdentityObjectId": {
+ "type": "string"
+ },
+ "managedIdentityObjectName": {
+ "type": "string"
+ },
+ "serverName": {
+ "type": "string",
+ "defaultValue": "[format('{0}-postgres', parameters('solutionName'))]",
+ "metadata": {
+ "description": "The name of the SQL logical server."
+ }
+ },
+ "administratorLogin": {
+ "type": "string",
+ "defaultValue": "admintest"
+ },
+ "administratorLoginPassword": {
+ "type": "securestring",
+ "defaultValue": "Initial_0524"
+ },
+ "serverEdition": {
+ "type": "string",
+ "defaultValue": "Burstable"
+ },
+ "skuSizeGB": {
+ "type": "int",
+ "defaultValue": 32
+ },
+ "dbInstanceType": {
+ "type": "string",
+ "defaultValue": "Standard_B1ms"
+ },
+ "availabilityZone": {
+ "type": "string",
+ "defaultValue": "1"
+ },
+ "allowAllIPsFirewall": {
+ "type": "bool",
+ "defaultValue": false
+ },
+ "allowAzureIPsFirewall": {
+ "type": "bool",
+ "defaultValue": false
+ },
+ "version": {
+ "type": "string",
+ "defaultValue": "16",
+ "allowedValues": [
+ "11",
+ "12",
+ "13",
+ "14",
+ "15",
+ "16"
+ ],
+ "metadata": {
+ "description": "PostgreSQL version"
+ }
+ }
+ },
+ "resources": [
+ {
+ "type": "Microsoft.DBforPostgreSQL/flexibleServers",
+ "apiVersion": "2023-12-01-preview",
+ "name": "[parameters('serverName')]",
+ "location": "[parameters('solutionLocation')]",
+ "sku": {
+ "name": "[parameters('dbInstanceType')]",
+ "tier": "[parameters('serverEdition')]"
+ },
+ "properties": {
+ "version": "[parameters('version')]",
+ "administratorLogin": "[parameters('administratorLogin')]",
+ "administratorLoginPassword": "[parameters('administratorLoginPassword')]",
+ "authConfig": {
+ "tenantId": "[subscription().tenantId]",
+ "activeDirectoryAuth": "Enabled",
+ "passwordAuth": "Enabled"
+ },
+ "highAvailability": {
+ "mode": "Disabled"
+ },
+ "storage": {
+ "storageSizeGB": "[parameters('skuSizeGB')]"
+ },
+ "backup": {
+ "backupRetentionDays": 7,
+ "geoRedundantBackup": "Disabled"
+ },
+ "network": {
+ "publicNetworkAccess": "Enabled"
+ },
+ "availabilityZone": "[parameters('availabilityZone')]"
+ }
+ },
+ {
+ "type": "Microsoft.Resources/deploymentScripts",
+ "apiVersion": "2020-10-01",
+ "name": "waitForServerReady",
+ "location": "[resourceGroup().location]",
+ "kind": "AzurePowerShell",
+ "properties": {
+ "azPowerShellVersion": "3.0",
+ "scriptContent": "start-sleep -Seconds 300",
+ "cleanupPreference": "Always",
+ "retentionInterval": "PT1H"
+ },
+ "dependsOn": [
+ "[resourceId('Microsoft.DBforPostgreSQL/flexibleServers', parameters('serverName'))]"
+ ]
+ },
+ {
+ "type": "Microsoft.DBforPostgreSQL/flexibleServers/configurations",
+ "apiVersion": "2023-12-01-preview",
+ "name": "[format('{0}/{1}', parameters('serverName'), 'azure.extensions')]",
+ "properties": {
+ "value": "vector",
+ "source": "user-override"
+ },
+ "dependsOn": [
+ "[resourceId('Microsoft.Resources/deploymentScripts', 'waitForServerReady')]",
+ "[resourceId('Microsoft.DBforPostgreSQL/flexibleServers', parameters('serverName'))]"
+ ]
+ },
+ {
+ "type": "Microsoft.DBforPostgreSQL/flexibleServers/administrators",
+ "apiVersion": "2022-12-01",
+ "name": "[format('{0}/{1}', parameters('serverName'), parameters('managedIdentityObjectId'))]",
+ "properties": {
+ "principalType": "SERVICEPRINCIPAL",
+ "principalName": "[parameters('managedIdentityObjectName')]",
+ "tenantId": "[subscription().tenantId]"
+ },
+ "dependsOn": [
+ "[resourceId('Microsoft.DBforPostgreSQL/flexibleServers/configurations', parameters('serverName'), 'azure.extensions')]",
+ "[resourceId('Microsoft.DBforPostgreSQL/flexibleServers', parameters('serverName'))]"
+ ]
+ },
+ {
+ "condition": "[parameters('allowAllIPsFirewall')]",
+ "type": "Microsoft.DBforPostgreSQL/flexibleServers/firewallRules",
+ "apiVersion": "2023-12-01-preview",
+ "name": "[format('{0}/{1}', parameters('serverName'), 'allow-all-IPs')]",
+ "properties": {
+ "startIpAddress": "0.0.0.0",
+ "endIpAddress": "255.255.255.255"
+ },
+ "dependsOn": [
+ "[resourceId('Microsoft.DBforPostgreSQL/flexibleServers/administrators', parameters('serverName'), parameters('managedIdentityObjectId'))]",
+ "[resourceId('Microsoft.DBforPostgreSQL/flexibleServers', parameters('serverName'))]"
+ ]
+ },
+ {
+ "condition": "[parameters('allowAzureIPsFirewall')]",
+ "type": "Microsoft.DBforPostgreSQL/flexibleServers/firewallRules",
+ "apiVersion": "2023-12-01-preview",
+ "name": "[format('{0}/{1}', parameters('serverName'), 'allow-all-azure-internal-IPs')]",
+ "properties": {
+ "startIpAddress": "0.0.0.0",
+ "endIpAddress": "0.0.0.0"
+ },
+ "dependsOn": [
+ "[resourceId('Microsoft.DBforPostgreSQL/flexibleServers/administrators', parameters('serverName'), parameters('managedIdentityObjectId'))]",
+ "[resourceId('Microsoft.DBforPostgreSQL/flexibleServers', parameters('serverName'))]"
+ ]
+ }
+ ],
+ "outputs": {
+ "postgresDbOutput": {
+ "type": "object",
+ "value": {
+ "postgresSQLName": "[parameters('serverName')]",
+ "postgreSQLServerName": "[format('{0}.postgres.database.azure.com', parameters('serverName'))]",
+ "postgreSQLDatabaseName": "postgres",
+ "postgreSQLDbUser": "[parameters('administratorLogin')]",
+ "sslMode": "Require"
+ }
+ }
+ }
+ }
+ },
+ "dependsOn": [
+ "[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_managed_identity')]",
+ "[subscriptionResourceId('Microsoft.Resources/resourceGroups', variables('rgName'))]"
+ ]
+ },
{
"condition": "[or(parameters('useKeyVault'), equals(parameters('authType'), 'rbac'))]",
"type": "Microsoft.Resources/deployments",
@@ -866,7 +1200,8 @@
},
"principalId": {
"value": "[parameters('principalId')]"
- }
+ },
+ "managedIdentityObjectId": "[if(equals(parameters('databaseType'), 'PostgreSQL'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_managed_identity'), '2022-09-01').outputs.managedIdentityOutput.value.objectId), createObject('value', ''))]"
},
"template": {
"$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#",
@@ -874,8 +1209,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "13364147767022226969"
+ "version": "0.32.4.45862",
+ "templateHash": "8917459410228534148"
},
"description": "Creates an Azure Key Vault."
},
@@ -891,6 +1226,10 @@
"type": "object",
"defaultValue": {}
},
+ "managedIdentityObjectId": {
+ "type": "string",
+ "defaultValue": ""
+ },
"principalId": {
"type": "string",
"defaultValue": ""
@@ -909,7 +1248,7 @@
"family": "A",
"name": "standard"
},
- "accessPolicies": "[if(not(empty(parameters('principalId'))), createArray(createObject('objectId', parameters('principalId'), 'permissions', createObject('secrets', createArray('get', 'list')), 'tenantId', subscription().tenantId)), createArray())]"
+ "accessPolicies": "[concat(if(not(equals(parameters('managedIdentityObjectId'), '')), createArray(createObject('objectId', parameters('managedIdentityObjectId'), 'permissions', createObject('keys', createArray('get', 'list'), 'secrets', createArray('get', 'list')), 'tenantId', subscription().tenantId)), createArray()), if(not(equals(parameters('principalId'), '')), createArray(createObject('objectId', parameters('principalId'), 'permissions', createObject('keys', createArray('get', 'list'), 'secrets', createArray('get', 'list')), 'tenantId', subscription().tenantId)), createArray()))]"
}
}
],
@@ -930,6 +1269,7 @@
}
},
"dependsOn": [
+ "[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_managed_identity')]",
"[subscriptionResourceId('Microsoft.Resources/resourceGroups', variables('rgName'))]"
]
},
@@ -971,8 +1311,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "5846053745240336221"
+ "version": "0.32.4.45862",
+ "templateHash": "5038087255133909729"
},
"description": "Creates an Azure Cognitive Services instance."
},
@@ -1130,8 +1470,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "5846053745240336221"
+ "version": "0.32.4.45862",
+ "templateHash": "5038087255133909729"
},
"description": "Creates an Azure Cognitive Services instance."
},
@@ -1283,8 +1623,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "5620801774479515492"
+ "version": "0.32.4.45862",
+ "templateHash": "2541084448726511572"
},
"description": "Creates a role assignment for a service principal."
},
@@ -1354,8 +1694,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "5620801774479515492"
+ "version": "0.32.4.45862",
+ "templateHash": "2541084448726511572"
},
"description": "Creates a role assignment for a service principal."
},
@@ -1425,8 +1765,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "5620801774479515492"
+ "version": "0.32.4.45862",
+ "templateHash": "2541084448726511572"
},
"description": "Creates a role assignment for a service principal."
},
@@ -1496,8 +1836,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "5620801774479515492"
+ "version": "0.32.4.45862",
+ "templateHash": "2541084448726511572"
},
"description": "Creates a role assignment for a service principal."
},
@@ -1571,8 +1911,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "5846053745240336221"
+ "version": "0.32.4.45862",
+ "templateHash": "5038087255133909729"
},
"description": "Creates an Azure Cognitive Services instance."
},
@@ -1730,9 +2070,10 @@
"value": "[parameters('speechServiceName')]"
},
"computerVisionName": "[if(parameters('useAdvancedImageProcessing'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'computerVision'), '2022-09-01').outputs.name.value), createObject('value', ''))]",
- "cosmosAccountName": {
- "value": "[reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_cosmos_db'), '2022-09-01').outputs.cosmosOutput.value.cosmosAccountName]"
- },
+ "cosmosAccountName": "[if(equals(parameters('databaseType'), 'CosmosDB'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_cosmos_db'), '2022-09-01').outputs.cosmosOutput.value.cosmosAccountName), createObject('value', ''))]",
+ "postgresServerName": "[if(equals(parameters('databaseType'), 'PostgreSQL'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql'), '2022-09-01').outputs.postgresDbOutput.value.postgreSQLServerName), createObject('value', ''))]",
+ "postgresDatabaseName": "[if(equals(parameters('databaseType'), 'PostgreSQL'), createObject('value', 'postgres'), createObject('value', ''))]",
+ "postgresDatabaseAdminUserName": "[if(equals(parameters('databaseType'), 'PostgreSQL'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql'), '2022-09-01').outputs.postgresDbOutput.value.postgreSQLDbUser), createObject('value', ''))]",
"rgName": {
"value": "[variables('rgName')]"
}
@@ -1743,8 +2084,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "9526483378278704058"
+ "version": "0.32.4.45862",
+ "templateHash": "70372532799191179"
}
},
"parameters": {
@@ -1784,6 +2125,22 @@
"type": "string",
"defaultValue": ""
},
+ "postgresServerName": {
+ "type": "string",
+ "defaultValue": ""
+ },
+ "postgresDatabaseName": {
+ "type": "string",
+ "defaultValue": "postgres"
+ },
+ "postgresInfoName": {
+ "type": "string",
+ "defaultValue": "AZURE-POSTGRESQL-INFO"
+ },
+ "postgresDatabaseAdminUserName": {
+ "type": "string",
+ "defaultValue": ""
+ },
"storageAccountKeyName": {
"type": "string",
"defaultValue": "AZURE-STORAGE-ACCOUNT-KEY"
@@ -1880,11 +2237,21 @@
}
},
{
+ "condition": "[not(equals(parameters('postgresServerName'), ''))]",
+ "type": "Microsoft.KeyVault/vaults/secrets",
+ "apiVersion": "2022-07-01",
+ "name": "[format('{0}/{1}', parameters('keyVaultName'), parameters('postgresInfoName'))]",
+ "properties": {
+ "value": "[if(not(equals(parameters('postgresServerName'), '')), string(createObject('user', parameters('postgresDatabaseAdminUserName'), 'dbname', parameters('postgresDatabaseName'), 'host', parameters('postgresServerName'))), '')]"
+ }
+ },
+ {
+ "condition": "[not(equals(parameters('cosmosAccountName'), ''))]",
"type": "Microsoft.KeyVault/vaults/secrets",
"apiVersion": "2022-07-01",
"name": "[format('{0}/{1}', parameters('keyVaultName'), parameters('cosmosAccountKeyName'))]",
"properties": {
- "value": "[listKeys(resourceId(subscription().subscriptionId, parameters('rgName'), 'Microsoft.DocumentDB/databaseAccounts', parameters('cosmosAccountName')), '2022-08-15').primaryMasterKey]"
+ "value": "[if(not(equals(parameters('cosmosAccountName'), '')), listKeys(resourceId(subscription().subscriptionId, parameters('rgName'), 'Microsoft.DocumentDB/databaseAccounts', parameters('cosmosAccountName')), '2022-08-15').primaryMasterKey, '')]"
}
}
],
@@ -1919,7 +2286,11 @@
},
"COSMOS_ACCOUNT_KEY_NAME": {
"type": "string",
- "value": "[parameters('cosmosAccountKeyName')]"
+ "value": "[if(not(equals(parameters('cosmosAccountName'), '')), parameters('cosmosAccountKeyName'), '')]"
+ },
+ "POSTGRESQL_INFO_NAME": {
+ "type": "string",
+ "value": "[if(not(equals(parameters('postgresServerName'), '')), parameters('postgresInfoName'), '')]"
}
}
}
@@ -1930,6 +2301,7 @@
"[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_cosmos_db')]",
"[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('formRecognizerName'))]",
"[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('azureOpenAIResourceName'))]",
+ "[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql')]",
"[subscriptionResourceId('Microsoft.Resources/resourceGroups', variables('rgName'))]",
"[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('azureAISearchName'))]",
"[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('storageAccountName'))]"
@@ -1977,8 +2349,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "12402174270479558945"
+ "version": "0.32.4.45862",
+ "templateHash": "11105223970664406813"
},
"description": "Creates an Azure AI Search instance."
},
@@ -2146,8 +2518,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "11168587044178660695"
+ "version": "0.32.4.45862",
+ "templateHash": "18435750249773494638"
},
"description": "Creates an Azure App Service plan."
},
@@ -2260,14 +2632,22 @@
"value": "[reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('speechServiceName')), '2022-09-01').outputs.name.value]"
},
"computerVisionName": "[if(parameters('useAdvancedImageProcessing'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'computerVision'), '2022-09-01').outputs.name.value), createObject('value', ''))]",
+ "databaseType": {
+ "value": "[parameters('databaseType')]"
+ },
"openAIKeyName": "[if(parameters('useKeyVault'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.OPENAI_KEY_NAME.value), createObject('value', ''))]",
- "storageAccountKeyName": "[if(parameters('useKeyVault'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.STORAGE_ACCOUNT_KEY_NAME.value), createObject('value', ''))]",
- "formRecognizerKeyName": "[if(parameters('useKeyVault'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.FORM_RECOGNIZER_KEY_NAME.value), createObject('value', ''))]",
+ "azureBlobStorageInfo": {
+ "value": "[string(createObject('containerName', variables('blobContainerName'), 'accountName', parameters('storageAccountName'), 'accountKey', if(parameters('useKeyVault'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.STORAGE_ACCOUNT_KEY_NAME.value, '$STORAGE_ACCOUNT_KEY')))]"
+ },
+ "azureFormRecognizerInfo": {
+ "value": "[string(createObject('endpoint', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('formRecognizerName')), '2022-09-01').outputs.endpoint.value, 'key', if(parameters('useKeyVault'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.FORM_RECOGNIZER_KEY_NAME.value, '$FORM_RECOGNIZER_KEY')))]"
+ },
"searchKeyName": "[if(parameters('useKeyVault'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.SEARCH_KEY_NAME.value), createObject('value', ''))]",
"contentSafetyKeyName": "[if(parameters('useKeyVault'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.CONTENT_SAFETY_KEY_NAME.value), createObject('value', ''))]",
"speechKeyName": "[if(parameters('useKeyVault'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.SPEECH_KEY_NAME.value), createObject('value', ''))]",
"computerVisionKeyName": "[if(parameters('useKeyVault'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.COMPUTER_VISION_KEY_NAME.value), createObject('value', ''))]",
- "cosmosDBKeyName": "[if(parameters('useKeyVault'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.COSMOS_ACCOUNT_KEY_NAME.value), createObject('value', ''))]",
+ "cosmosDBKeyName": "[if(and(equals(parameters('databaseType'), 'CosmosDB'), parameters('useKeyVault')), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.COSMOS_ACCOUNT_KEY_NAME.value), createObject('value', ''))]",
+ "postgresInfoName": "[if(and(equals(parameters('databaseType'), 'PostgreSQL'), parameters('useKeyVault')), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.POSTGRESQL_INFO_NAME.value), createObject('value', ''))]",
"useKeyVault": {
"value": "[parameters('useKeyVault')]"
},
@@ -2276,56 +2656,7 @@
"value": "[parameters('authType')]"
},
"appSettings": {
- "value": {
- "AZURE_BLOB_ACCOUNT_NAME": "[parameters('storageAccountName')]",
- "AZURE_BLOB_CONTAINER_NAME": "[variables('blobContainerName')]",
- "AZURE_COMPUTER_VISION_ENDPOINT": "[if(parameters('useAdvancedImageProcessing'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'computerVision'), '2022-09-01').outputs.endpoint.value, '')]",
- "AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_API_VERSION": "[parameters('computerVisionVectorizeImageApiVersion')]",
- "AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_MODEL_VERSION": "[parameters('computerVisionVectorizeImageModelVersion')]",
- "AZURE_CONTENT_SAFETY_ENDPOINT": "[reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('contentSafetyName')), '2022-09-01').outputs.endpoint.value]",
- "AZURE_FORM_RECOGNIZER_ENDPOINT": "[reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('formRecognizerName')), '2022-09-01').outputs.endpoint.value]",
- "AZURE_OPENAI_RESOURCE": "[parameters('azureOpenAIResourceName')]",
- "AZURE_OPENAI_MODEL_INFO": "[variables('azureOpenAIModelInfo')]",
- "AZURE_OPENAI_TEMPERATURE": "[parameters('azureOpenAITemperature')]",
- "AZURE_OPENAI_TOP_P": "[parameters('azureOpenAITopP')]",
- "AZURE_OPENAI_MAX_TOKENS": "[parameters('azureOpenAIMaxTokens')]",
- "AZURE_OPENAI_STOP_SEQUENCE": "[parameters('azureOpenAIStopSequence')]",
- "AZURE_OPENAI_SYSTEM_MESSAGE": "[parameters('azureOpenAISystemMessage')]",
- "AZURE_OPENAI_API_VERSION": "[parameters('azureOpenAIApiVersion')]",
- "AZURE_OPENAI_STREAM": "[parameters('azureOpenAIStream')]",
- "AZURE_OPENAI_EMBEDDING_MODEL_INFO": "[variables('azureOpenAIEmbeddingModelInfo')]",
- "AZURE_SEARCH_USE_SEMANTIC_SEARCH": "[parameters('azureSearchUseSemanticSearch')]",
- "AZURE_SEARCH_SERVICE": "[format('https://{0}.search.windows.net', parameters('azureAISearchName'))]",
- "AZURE_SEARCH_INDEX": "[parameters('azureSearchIndex')]",
- "AZURE_SEARCH_CONVERSATIONS_LOG_INDEX": "[parameters('azureSearchConversationLogIndex')]",
- "AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG": "[parameters('azureSearchSemanticSearchConfig')]",
- "AZURE_SEARCH_INDEX_IS_PRECHUNKED": "[parameters('azureSearchIndexIsPrechunked')]",
- "AZURE_SEARCH_TOP_K": "[parameters('azureSearchTopK')]",
- "AZURE_SEARCH_ENABLE_IN_DOMAIN": "[parameters('azureSearchEnableInDomain')]",
- "AZURE_SEARCH_FILENAME_COLUMN": "[parameters('azureSearchFilenameColumn')]",
- "AZURE_SEARCH_FILTER": "[parameters('azureSearchFilter')]",
- "AZURE_SEARCH_FIELDS_ID": "[parameters('azureSearchFieldId')]",
- "AZURE_SEARCH_CONTENT_COLUMN": "[parameters('azureSearchContentColumn')]",
- "AZURE_SEARCH_CONTENT_VECTOR_COLUMN": "[parameters('azureSearchVectorColumn')]",
- "AZURE_SEARCH_TITLE_COLUMN": "[parameters('azureSearchTitleColumn')]",
- "AZURE_SEARCH_FIELDS_METADATA": "[parameters('azureSearchFieldsMetadata')]",
- "AZURE_SEARCH_SOURCE_COLUMN": "[parameters('azureSearchSourceColumn')]",
- "AZURE_SEARCH_CHUNK_COLUMN": "[parameters('azureSearchChunkColumn')]",
- "AZURE_SEARCH_OFFSET_COLUMN": "[parameters('azureSearchOffsetColumn')]",
- "AZURE_SEARCH_URL_COLUMN": "[parameters('azureSearchUrlColumn')]",
- "AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION": "[parameters('azureSearchUseIntegratedVectorization')]",
- "AZURE_SPEECH_SERVICE_NAME": "[parameters('speechServiceName')]",
- "AZURE_SPEECH_SERVICE_REGION": "[parameters('location')]",
- "AZURE_SPEECH_RECOGNIZER_LANGUAGES": "[parameters('recognizedLanguages')]",
- "USE_ADVANCED_IMAGE_PROCESSING": "[parameters('useAdvancedImageProcessing')]",
- "ADVANCED_IMAGE_PROCESSING_MAX_IMAGES": "[parameters('advancedImageProcessingMaxImages')]",
- "ORCHESTRATION_STRATEGY": "[parameters('orchestrationStrategy')]",
- "CONVERSATION_FLOW": "[parameters('conversationFlow')]",
- "LOGLEVEL": "[parameters('logLevel')]",
- "AZURE_COSMOSDB_INFO": "[string(createObject('accountName', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_cosmos_db'), '2022-09-01').outputs.cosmosOutput.value.cosmosAccountName, 'databaseName', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_cosmos_db'), '2022-09-01').outputs.cosmosOutput.value.cosmosDatabaseName, 'containerName', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_cosmos_db'), '2022-09-01').outputs.cosmosOutput.value.cosmosContainerName))]",
- "AZURE_COSMOSDB_ENABLE_FEEDBACK": true,
- "CHAT_HISTORY_ENABLED": "[parameters('chatHistoryEnabled')]"
- }
+ "value": "[union(createObject('AZURE_COMPUTER_VISION_ENDPOINT', if(parameters('useAdvancedImageProcessing'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'computerVision'), '2022-09-01').outputs.endpoint.value, ''), 'AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_API_VERSION', parameters('computerVisionVectorizeImageApiVersion'), 'AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_MODEL_VERSION', parameters('computerVisionVectorizeImageModelVersion'), 'AZURE_CONTENT_SAFETY_ENDPOINT', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('contentSafetyName')), '2022-09-01').outputs.endpoint.value, 'AZURE_OPENAI_RESOURCE', parameters('azureOpenAIResourceName'), 'AZURE_OPENAI_MODEL_INFO', variables('azureOpenAIModelInfo'), 'AZURE_OPENAI_TEMPERATURE', parameters('azureOpenAITemperature'), 'AZURE_OPENAI_TOP_P', parameters('azureOpenAITopP'), 'AZURE_OPENAI_MAX_TOKENS', parameters('azureOpenAIMaxTokens'), 'AZURE_OPENAI_STOP_SEQUENCE', parameters('azureOpenAIStopSequence'), 'AZURE_OPENAI_SYSTEM_MESSAGE', parameters('azureOpenAISystemMessage'), 'AZURE_OPENAI_API_VERSION', parameters('azureOpenAIApiVersion'), 'AZURE_OPENAI_STREAM', parameters('azureOpenAIStream'), 'AZURE_OPENAI_EMBEDDING_MODEL_INFO', variables('azureOpenAIEmbeddingModelInfo'), 'AZURE_SEARCH_USE_SEMANTIC_SEARCH', parameters('azureSearchUseSemanticSearch'), 'AZURE_SEARCH_SERVICE', format('https://{0}.search.windows.net', parameters('azureAISearchName')), 'AZURE_SEARCH_INDEX', parameters('azureSearchIndex'), 'AZURE_SEARCH_CONVERSATIONS_LOG_INDEX', parameters('azureSearchConversationLogIndex'), 'AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG', parameters('azureSearchSemanticSearchConfig'), 'AZURE_SEARCH_INDEX_IS_PRECHUNKED', parameters('azureSearchIndexIsPrechunked'), 'AZURE_SEARCH_TOP_K', parameters('azureSearchTopK'), 'AZURE_SEARCH_ENABLE_IN_DOMAIN', parameters('azureSearchEnableInDomain'), 'AZURE_SEARCH_FILENAME_COLUMN', parameters('azureSearchFilenameColumn'), 'AZURE_SEARCH_FILTER', parameters('azureSearchFilter'), 'AZURE_SEARCH_FIELDS_ID', parameters('azureSearchFieldId'), 'AZURE_SEARCH_CONTENT_COLUMN', parameters('azureSearchContentColumn'), 'AZURE_SEARCH_CONTENT_VECTOR_COLUMN', parameters('azureSearchVectorColumn'), 'AZURE_SEARCH_TITLE_COLUMN', parameters('azureSearchTitleColumn'), 'AZURE_SEARCH_FIELDS_METADATA', parameters('azureSearchFieldsMetadata'), 'AZURE_SEARCH_SOURCE_COLUMN', parameters('azureSearchSourceColumn'), 'AZURE_SEARCH_CHUNK_COLUMN', parameters('azureSearchChunkColumn'), 'AZURE_SEARCH_OFFSET_COLUMN', parameters('azureSearchOffsetColumn'), 'AZURE_SEARCH_URL_COLUMN', parameters('azureSearchUrlColumn'), 'AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION', parameters('azureSearchUseIntegratedVectorization'), 'AZURE_SPEECH_SERVICE_NAME', parameters('speechServiceName'), 'AZURE_SPEECH_SERVICE_REGION', parameters('location'), 'AZURE_SPEECH_RECOGNIZER_LANGUAGES', parameters('recognizedLanguages'), 'USE_ADVANCED_IMAGE_PROCESSING', parameters('useAdvancedImageProcessing'), 'ADVANCED_IMAGE_PROCESSING_MAX_IMAGES', parameters('advancedImageProcessingMaxImages'), 'ORCHESTRATION_STRATEGY', parameters('orchestrationStrategy'), 'CONVERSATION_FLOW', parameters('conversationFlow'), 'LOGLEVEL', parameters('logLevel'), 'DATABASE_TYPE', parameters('databaseType')), if(equals(parameters('databaseType'), 'CosmosDB'), createObject('AZURE_COSMOSDB_INFO', string(createObject('accountName', if(equals(parameters('databaseType'), 'CosmosDB'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_cosmos_db'), '2022-09-01').outputs.cosmosOutput.value.cosmosAccountName, ''), 'databaseName', if(equals(parameters('databaseType'), 'CosmosDB'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_cosmos_db'), '2022-09-01').outputs.cosmosOutput.value.cosmosDatabaseName, ''), 'containerName', if(equals(parameters('databaseType'), 'CosmosDB'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_cosmos_db'), '2022-09-01').outputs.cosmosOutput.value.cosmosContainerName, ''))), 'AZURE_COSMOSDB_ENABLE_FEEDBACK', true()), if(equals(parameters('databaseType'), 'PostgreSQL'), createObject('AZURE_POSTGRESQL_INFO', string(createObject('host', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql'), '2022-09-01').outputs.postgresDbOutput.value.postgreSQLServerName, 'dbname', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql'), '2022-09-01').outputs.postgresDbOutput.value.postgreSQLDatabaseName, 'user', parameters('websiteName')))), createObject())))]"
}
},
"template": {
@@ -2334,8 +2665,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "9347651394814311894"
+ "version": "0.32.4.45862",
+ "templateHash": "17484679921003627067"
}
},
"parameters": {
@@ -2416,11 +2747,11 @@
"type": "string",
"defaultValue": ""
},
- "storageAccountKeyName": {
+ "azureBlobStorageInfo": {
"type": "string",
"defaultValue": ""
},
- "formRecognizerKeyName": {
+ "azureFormRecognizerInfo": {
"type": "string",
"defaultValue": ""
},
@@ -2455,9 +2786,17 @@
"type": "string",
"defaultValue": ""
},
+ "databaseType": {
+ "type": "string",
+ "defaultValue": "CosmosDB"
+ },
"cosmosDBKeyName": {
"type": "string",
"defaultValue": ""
+ },
+ "postgresInfoName": {
+ "type": "string",
+ "defaultValue": ""
}
},
"resources": [
@@ -2491,7 +2830,7 @@
"value": "[parameters('appServicePlanId')]"
},
"appSettings": {
- "value": "[union(parameters('appSettings'), createObject('AZURE_AUTH_TYPE', parameters('authType'), 'USE_KEY_VAULT', if(parameters('useKeyVault'), parameters('useKeyVault'), ''), 'AZURE_OPENAI_API_KEY', if(parameters('useKeyVault'), parameters('openAIKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('azureOpenAIName')), '2023-05-01').key1), 'AZURE_SEARCH_KEY', if(parameters('useKeyVault'), parameters('searchKeyName'), listAdminKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.Search/searchServices', parameters('azureAISearchName')), '2021-04-01-preview').primaryKey), 'AZURE_BLOB_ACCOUNT_KEY', if(parameters('useKeyVault'), parameters('storageAccountKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.Storage/storageAccounts', parameters('storageAccountName')), '2021-09-01').keys[0].value), 'AZURE_FORM_RECOGNIZER_KEY', if(parameters('useKeyVault'), parameters('formRecognizerKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('formRecognizerName')), '2023-05-01').key1), 'AZURE_CONTENT_SAFETY_KEY', if(parameters('useKeyVault'), parameters('contentSafetyKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('contentSafetyName')), '2023-05-01').key1), 'AZURE_SPEECH_SERVICE_KEY', if(parameters('useKeyVault'), parameters('speechKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('speechServiceName')), '2023-05-01').key1), 'AZURE_COMPUTER_VISION_KEY', if(or(parameters('useKeyVault'), equals(parameters('computerVisionName'), '')), parameters('computerVisionKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('computerVisionName')), '2023-05-01').key1), 'AZURE_COSMOSDB_ACCOUNT_KEY', if(or(parameters('useKeyVault'), equals(parameters('cosmosDBKeyName'), '')), parameters('cosmosDBKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.DocumentDB/databaseAccounts', parameters('cosmosDBKeyName')), '2022-08-15').primaryMasterKey)))]"
+ "value": "[union(parameters('appSettings'), union(if(equals(parameters('databaseType'), 'CosmosDB'), createObject('AZURE_COSMOSDB_ACCOUNT_KEY', if(or(parameters('useKeyVault'), equals(parameters('cosmosDBKeyName'), '')), parameters('cosmosDBKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.DocumentDB/databaseAccounts', parameters('cosmosDBKeyName')), '2022-08-15').primaryMasterKey)), createObject()), createObject('AZURE_AUTH_TYPE', parameters('authType'), 'USE_KEY_VAULT', if(parameters('useKeyVault'), parameters('useKeyVault'), ''), 'AZURE_OPENAI_API_KEY', if(parameters('useKeyVault'), parameters('openAIKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('azureOpenAIName')), '2023-05-01').key1), 'AZURE_SEARCH_KEY', if(parameters('useKeyVault'), parameters('searchKeyName'), listAdminKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.Search/searchServices', parameters('azureAISearchName')), '2021-04-01-preview').primaryKey), 'AZURE_BLOB_STORAGE_INFO', if(parameters('useKeyVault'), parameters('azureBlobStorageInfo'), replace(parameters('azureBlobStorageInfo'), '$STORAGE_ACCOUNT_KEY', listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.Storage/storageAccounts', parameters('storageAccountName')), '2021-09-01').keys[0].value)), 'AZURE_FORM_RECOGNIZER_INFO', if(parameters('useKeyVault'), parameters('azureFormRecognizerInfo'), replace(parameters('azureFormRecognizerInfo'), '$FORM_RECOGNIZER_KEY', listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('formRecognizerName')), '2023-05-01').key1)), 'AZURE_CONTENT_SAFETY_KEY', if(parameters('useKeyVault'), parameters('contentSafetyKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('contentSafetyName')), '2023-05-01').key1), 'AZURE_SPEECH_SERVICE_KEY', if(parameters('useKeyVault'), parameters('speechKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('speechServiceName')), '2023-05-01').key1), 'AZURE_COMPUTER_VISION_KEY', if(or(parameters('useKeyVault'), equals(parameters('computerVisionName'), '')), parameters('computerVisionKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('computerVisionName')), '2023-05-01').key1))))]"
},
"keyVaultName": {
"value": "[parameters('keyVaultName')]"
@@ -2508,6 +2847,9 @@
"scmDoBuildDuringDeployment": "[if(parameters('useDocker'), createObject('value', false()), createObject('value', true()))]",
"healthCheckPath": {
"value": "[parameters('healthCheckPath')]"
+ },
+ "managedIdentity": {
+ "value": "[or(equals(parameters('databaseType'), 'PostgreSQL'), not(empty(parameters('keyVaultName'))))]"
}
},
"template": {
@@ -2516,8 +2858,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "14818871229133632920"
+ "version": "0.32.4.45862",
+ "templateHash": "1710823743041736936"
},
"description": "Creates an Azure App Service in an existing Azure App Service plan."
},
@@ -2743,8 +3085,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "3955925289075906039"
+ "version": "0.32.4.45862",
+ "templateHash": "3479291286349558867"
},
"description": "Updates app settings for an Azure App Service."
},
@@ -2821,8 +3163,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "5620801774479515492"
+ "version": "0.32.4.45862",
+ "templateHash": "2541084448726511572"
},
"description": "Creates a role assignment for a service principal."
},
@@ -2890,8 +3232,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "5620801774479515492"
+ "version": "0.32.4.45862",
+ "templateHash": "2541084448726511572"
},
"description": "Creates a role assignment for a service principal."
},
@@ -2959,8 +3301,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "5620801774479515492"
+ "version": "0.32.4.45862",
+ "templateHash": "2541084448726511572"
},
"description": "Creates a role assignment for a service principal."
},
@@ -3028,8 +3370,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "5620801774479515492"
+ "version": "0.32.4.45862",
+ "templateHash": "2541084448726511572"
},
"description": "Creates a role assignment for a service principal."
},
@@ -3094,8 +3436,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "17352167468248267479"
+ "version": "0.32.4.45862",
+ "templateHash": "17848638157182929130"
},
"description": "Assigns an Azure Key Vault access policy."
},
@@ -3143,6 +3485,7 @@
]
},
{
+ "condition": "[equals(parameters('databaseType'), 'CosmosDB')]",
"type": "Microsoft.Resources/deployments",
"apiVersion": "2022-09-01",
"name": "[format('cosmos-sql-user-role-{0}', format('{0}-app-module', parameters('name')))]",
@@ -3168,8 +3511,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "8033637033572984239"
+ "version": "0.32.4.45862",
+ "templateHash": "2813064152180428298"
},
"description": "Creates a SQL role assignment under an Azure Cosmos DB account."
},
@@ -3229,6 +3572,7 @@
"[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'keyvault')]",
"[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'monitoring')]",
"[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('azureOpenAIResourceName'))]",
+ "[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql')]",
"[subscriptionResourceId('Microsoft.Resources/resourceGroups', variables('rgName'))]",
"[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('azureAISearchName'))]",
"[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('speechServiceName'))]",
@@ -3288,14 +3632,22 @@
"value": "[reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('speechServiceName')), '2022-09-01').outputs.name.value]"
},
"computerVisionName": "[if(parameters('useAdvancedImageProcessing'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'computerVision'), '2022-09-01').outputs.name.value), createObject('value', ''))]",
+ "databaseType": {
+ "value": "[parameters('databaseType')]"
+ },
"openAIKeyName": "[if(parameters('useKeyVault'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.OPENAI_KEY_NAME.value), createObject('value', ''))]",
- "storageAccountKeyName": "[if(parameters('useKeyVault'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.STORAGE_ACCOUNT_KEY_NAME.value), createObject('value', ''))]",
- "formRecognizerKeyName": "[if(parameters('useKeyVault'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.FORM_RECOGNIZER_KEY_NAME.value), createObject('value', ''))]",
+ "azureBlobStorageInfo": {
+ "value": "[string(createObject('containerName', variables('blobContainerName'), 'accountName', parameters('storageAccountName'), 'accountKey', if(parameters('useKeyVault'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.STORAGE_ACCOUNT_KEY_NAME.value, '$STORAGE_ACCOUNT_KEY')))]"
+ },
+ "azureFormRecognizerInfo": {
+ "value": "[string(createObject('endpoint', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('formRecognizerName')), '2022-09-01').outputs.endpoint.value, 'key', if(parameters('useKeyVault'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.FORM_RECOGNIZER_KEY_NAME.value, '$FORM_RECOGNIZER_KEY')))]"
+ },
"searchKeyName": "[if(parameters('useKeyVault'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.SEARCH_KEY_NAME.value), createObject('value', ''))]",
"computerVisionKeyName": "[if(parameters('useKeyVault'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.COMPUTER_VISION_KEY_NAME.value), createObject('value', ''))]",
"contentSafetyKeyName": "[if(parameters('useKeyVault'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.CONTENT_SAFETY_KEY_NAME.value), createObject('value', ''))]",
"speechKeyName": "[if(parameters('useKeyVault'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.SPEECH_KEY_NAME.value), createObject('value', ''))]",
- "cosmosDBKeyName": "[if(parameters('useKeyVault'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.COSMOS_ACCOUNT_KEY_NAME.value), createObject('value', ''))]",
+ "cosmosDBKeyName": "[if(and(equals(parameters('databaseType'), 'CosmosDB'), parameters('useKeyVault')), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.COSMOS_ACCOUNT_KEY_NAME.value), createObject('value', ''))]",
+ "postgresInfoName": "[if(and(equals(parameters('databaseType'), 'PostgreSQL'), parameters('useKeyVault')), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.POSTGRESQL_INFO_NAME.value), createObject('value', ''))]",
"useKeyVault": {
"value": "[parameters('useKeyVault')]"
},
@@ -3304,56 +3656,7 @@
"value": "[parameters('authType')]"
},
"appSettings": {
- "value": {
- "AZURE_BLOB_ACCOUNT_NAME": "[parameters('storageAccountName')]",
- "AZURE_BLOB_CONTAINER_NAME": "[variables('blobContainerName')]",
- "AZURE_COMPUTER_VISION_ENDPOINT": "[if(parameters('useAdvancedImageProcessing'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'computerVision'), '2022-09-01').outputs.endpoint.value, '')]",
- "AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_API_VERSION": "[parameters('computerVisionVectorizeImageApiVersion')]",
- "AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_MODEL_VERSION": "[parameters('computerVisionVectorizeImageModelVersion')]",
- "AZURE_CONTENT_SAFETY_ENDPOINT": "[reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('contentSafetyName')), '2022-09-01').outputs.endpoint.value]",
- "AZURE_FORM_RECOGNIZER_ENDPOINT": "[reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('formRecognizerName')), '2022-09-01').outputs.endpoint.value]",
- "AZURE_OPENAI_RESOURCE": "[parameters('azureOpenAIResourceName')]",
- "AZURE_OPENAI_MODEL_INFO": "[variables('azureOpenAIModelInfo')]",
- "AZURE_OPENAI_TEMPERATURE": "[parameters('azureOpenAITemperature')]",
- "AZURE_OPENAI_TOP_P": "[parameters('azureOpenAITopP')]",
- "AZURE_OPENAI_MAX_TOKENS": "[parameters('azureOpenAIMaxTokens')]",
- "AZURE_OPENAI_STOP_SEQUENCE": "[parameters('azureOpenAIStopSequence')]",
- "AZURE_OPENAI_SYSTEM_MESSAGE": "[parameters('azureOpenAISystemMessage')]",
- "AZURE_OPENAI_API_VERSION": "[parameters('azureOpenAIApiVersion')]",
- "AZURE_OPENAI_STREAM": "[parameters('azureOpenAIStream')]",
- "AZURE_OPENAI_EMBEDDING_MODEL_INFO": "[variables('azureOpenAIEmbeddingModelInfo')]",
- "AZURE_SEARCH_USE_SEMANTIC_SEARCH": "[parameters('azureSearchUseSemanticSearch')]",
- "AZURE_SEARCH_SERVICE": "[format('https://{0}.search.windows.net', parameters('azureAISearchName'))]",
- "AZURE_SEARCH_INDEX": "[parameters('azureSearchIndex')]",
- "AZURE_SEARCH_CONVERSATIONS_LOG_INDEX": "[parameters('azureSearchConversationLogIndex')]",
- "AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG": "[parameters('azureSearchSemanticSearchConfig')]",
- "AZURE_SEARCH_INDEX_IS_PRECHUNKED": "[parameters('azureSearchIndexIsPrechunked')]",
- "AZURE_SEARCH_TOP_K": "[parameters('azureSearchTopK')]",
- "AZURE_SEARCH_ENABLE_IN_DOMAIN": "[parameters('azureSearchEnableInDomain')]",
- "AZURE_SEARCH_FILENAME_COLUMN": "[parameters('azureSearchFilenameColumn')]",
- "AZURE_SEARCH_FILTER": "[parameters('azureSearchFilter')]",
- "AZURE_SEARCH_FIELDS_ID": "[parameters('azureSearchFieldId')]",
- "AZURE_SEARCH_CONTENT_COLUMN": "[parameters('azureSearchContentColumn')]",
- "AZURE_SEARCH_CONTENT_VECTOR_COLUMN": "[parameters('azureSearchVectorColumn')]",
- "AZURE_SEARCH_TITLE_COLUMN": "[parameters('azureSearchTitleColumn')]",
- "AZURE_SEARCH_FIELDS_METADATA": "[parameters('azureSearchFieldsMetadata')]",
- "AZURE_SEARCH_SOURCE_COLUMN": "[parameters('azureSearchSourceColumn')]",
- "AZURE_SEARCH_CHUNK_COLUMN": "[parameters('azureSearchChunkColumn')]",
- "AZURE_SEARCH_OFFSET_COLUMN": "[parameters('azureSearchOffsetColumn')]",
- "AZURE_SEARCH_URL_COLUMN": "[parameters('azureSearchUrlColumn')]",
- "AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION": "[parameters('azureSearchUseIntegratedVectorization')]",
- "AZURE_SPEECH_SERVICE_NAME": "[parameters('speechServiceName')]",
- "AZURE_SPEECH_SERVICE_REGION": "[parameters('location')]",
- "AZURE_SPEECH_RECOGNIZER_LANGUAGES": "[parameters('recognizedLanguages')]",
- "USE_ADVANCED_IMAGE_PROCESSING": "[parameters('useAdvancedImageProcessing')]",
- "ADVANCED_IMAGE_PROCESSING_MAX_IMAGES": "[parameters('advancedImageProcessingMaxImages')]",
- "ORCHESTRATION_STRATEGY": "[parameters('orchestrationStrategy')]",
- "CONVERSATION_FLOW": "[parameters('conversationFlow')]",
- "LOGLEVEL": "[parameters('logLevel')]",
- "AZURE_COSMOSDB_INFO": "[string(createObject('accountName', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_cosmos_db'), '2022-09-01').outputs.cosmosOutput.value.cosmosAccountName, 'databaseName', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_cosmos_db'), '2022-09-01').outputs.cosmosOutput.value.cosmosDatabaseName, 'containerName', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_cosmos_db'), '2022-09-01').outputs.cosmosOutput.value.cosmosContainerName))]",
- "AZURE_COSMOSDB_ENABLE_FEEDBACK": true,
- "CHAT_HISTORY_ENABLED": "[parameters('chatHistoryEnabled')]"
- }
+ "value": "[union(createObject('AZURE_COMPUTER_VISION_ENDPOINT', if(parameters('useAdvancedImageProcessing'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'computerVision'), '2022-09-01').outputs.endpoint.value, ''), 'AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_API_VERSION', parameters('computerVisionVectorizeImageApiVersion'), 'AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_MODEL_VERSION', parameters('computerVisionVectorizeImageModelVersion'), 'AZURE_CONTENT_SAFETY_ENDPOINT', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('contentSafetyName')), '2022-09-01').outputs.endpoint.value, 'AZURE_OPENAI_RESOURCE', parameters('azureOpenAIResourceName'), 'AZURE_OPENAI_MODEL_INFO', variables('azureOpenAIModelInfo'), 'AZURE_OPENAI_TEMPERATURE', parameters('azureOpenAITemperature'), 'AZURE_OPENAI_TOP_P', parameters('azureOpenAITopP'), 'AZURE_OPENAI_MAX_TOKENS', parameters('azureOpenAIMaxTokens'), 'AZURE_OPENAI_STOP_SEQUENCE', parameters('azureOpenAIStopSequence'), 'AZURE_OPENAI_SYSTEM_MESSAGE', parameters('azureOpenAISystemMessage'), 'AZURE_OPENAI_API_VERSION', parameters('azureOpenAIApiVersion'), 'AZURE_OPENAI_STREAM', parameters('azureOpenAIStream'), 'AZURE_OPENAI_EMBEDDING_MODEL_INFO', variables('azureOpenAIEmbeddingModelInfo'), 'AZURE_SEARCH_USE_SEMANTIC_SEARCH', parameters('azureSearchUseSemanticSearch'), 'AZURE_SEARCH_SERVICE', format('https://{0}.search.windows.net', parameters('azureAISearchName')), 'AZURE_SEARCH_INDEX', parameters('azureSearchIndex'), 'AZURE_SEARCH_CONVERSATIONS_LOG_INDEX', parameters('azureSearchConversationLogIndex'), 'AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG', parameters('azureSearchSemanticSearchConfig'), 'AZURE_SEARCH_INDEX_IS_PRECHUNKED', parameters('azureSearchIndexIsPrechunked'), 'AZURE_SEARCH_TOP_K', parameters('azureSearchTopK'), 'AZURE_SEARCH_ENABLE_IN_DOMAIN', parameters('azureSearchEnableInDomain'), 'AZURE_SEARCH_FILENAME_COLUMN', parameters('azureSearchFilenameColumn'), 'AZURE_SEARCH_FILTER', parameters('azureSearchFilter'), 'AZURE_SEARCH_FIELDS_ID', parameters('azureSearchFieldId'), 'AZURE_SEARCH_CONTENT_COLUMN', parameters('azureSearchContentColumn'), 'AZURE_SEARCH_CONTENT_VECTOR_COLUMN', parameters('azureSearchVectorColumn'), 'AZURE_SEARCH_TITLE_COLUMN', parameters('azureSearchTitleColumn'), 'AZURE_SEARCH_FIELDS_METADATA', parameters('azureSearchFieldsMetadata'), 'AZURE_SEARCH_SOURCE_COLUMN', parameters('azureSearchSourceColumn'), 'AZURE_SEARCH_CHUNK_COLUMN', parameters('azureSearchChunkColumn'), 'AZURE_SEARCH_OFFSET_COLUMN', parameters('azureSearchOffsetColumn'), 'AZURE_SEARCH_URL_COLUMN', parameters('azureSearchUrlColumn'), 'AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION', parameters('azureSearchUseIntegratedVectorization'), 'AZURE_SPEECH_SERVICE_NAME', parameters('speechServiceName'), 'AZURE_SPEECH_SERVICE_REGION', parameters('location'), 'AZURE_SPEECH_RECOGNIZER_LANGUAGES', parameters('recognizedLanguages'), 'USE_ADVANCED_IMAGE_PROCESSING', parameters('useAdvancedImageProcessing'), 'ADVANCED_IMAGE_PROCESSING_MAX_IMAGES', parameters('advancedImageProcessingMaxImages'), 'ORCHESTRATION_STRATEGY', parameters('orchestrationStrategy'), 'CONVERSATION_FLOW', parameters('conversationFlow'), 'LOGLEVEL', parameters('logLevel'), 'DATABASE_TYPE', parameters('databaseType')), if(equals(parameters('databaseType'), 'CosmosDB'), createObject('AZURE_COSMOSDB_INFO', string(createObject('accountName', if(equals(parameters('databaseType'), 'CosmosDB'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_cosmos_db'), '2022-09-01').outputs.cosmosOutput.value.cosmosAccountName, ''), 'databaseName', if(equals(parameters('databaseType'), 'CosmosDB'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_cosmos_db'), '2022-09-01').outputs.cosmosOutput.value.cosmosDatabaseName, ''), 'containerName', if(equals(parameters('databaseType'), 'CosmosDB'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_cosmos_db'), '2022-09-01').outputs.cosmosOutput.value.cosmosContainerName, ''))), 'AZURE_COSMOSDB_ENABLE_FEEDBACK', true()), if(equals(parameters('databaseType'), 'PostgreSQL'), createObject('AZURE_POSTGRESQL_INFO', string(createObject('host', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql'), '2022-09-01').outputs.postgresDbOutput.value.postgreSQLServerName, 'dbname', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql'), '2022-09-01').outputs.postgresDbOutput.value.postgreSQLDatabaseName, 'user', format('{0}-docker', parameters('websiteName'))))), createObject())))]"
}
},
"template": {
@@ -3362,8 +3665,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "9347651394814311894"
+ "version": "0.32.4.45862",
+ "templateHash": "17484679921003627067"
}
},
"parameters": {
@@ -3444,11 +3747,11 @@
"type": "string",
"defaultValue": ""
},
- "storageAccountKeyName": {
+ "azureBlobStorageInfo": {
"type": "string",
"defaultValue": ""
},
- "formRecognizerKeyName": {
+ "azureFormRecognizerInfo": {
"type": "string",
"defaultValue": ""
},
@@ -3483,9 +3786,17 @@
"type": "string",
"defaultValue": ""
},
+ "databaseType": {
+ "type": "string",
+ "defaultValue": "CosmosDB"
+ },
"cosmosDBKeyName": {
"type": "string",
"defaultValue": ""
+ },
+ "postgresInfoName": {
+ "type": "string",
+ "defaultValue": ""
}
},
"resources": [
@@ -3519,7 +3830,7 @@
"value": "[parameters('appServicePlanId')]"
},
"appSettings": {
- "value": "[union(parameters('appSettings'), createObject('AZURE_AUTH_TYPE', parameters('authType'), 'USE_KEY_VAULT', if(parameters('useKeyVault'), parameters('useKeyVault'), ''), 'AZURE_OPENAI_API_KEY', if(parameters('useKeyVault'), parameters('openAIKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('azureOpenAIName')), '2023-05-01').key1), 'AZURE_SEARCH_KEY', if(parameters('useKeyVault'), parameters('searchKeyName'), listAdminKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.Search/searchServices', parameters('azureAISearchName')), '2021-04-01-preview').primaryKey), 'AZURE_BLOB_ACCOUNT_KEY', if(parameters('useKeyVault'), parameters('storageAccountKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.Storage/storageAccounts', parameters('storageAccountName')), '2021-09-01').keys[0].value), 'AZURE_FORM_RECOGNIZER_KEY', if(parameters('useKeyVault'), parameters('formRecognizerKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('formRecognizerName')), '2023-05-01').key1), 'AZURE_CONTENT_SAFETY_KEY', if(parameters('useKeyVault'), parameters('contentSafetyKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('contentSafetyName')), '2023-05-01').key1), 'AZURE_SPEECH_SERVICE_KEY', if(parameters('useKeyVault'), parameters('speechKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('speechServiceName')), '2023-05-01').key1), 'AZURE_COMPUTER_VISION_KEY', if(or(parameters('useKeyVault'), equals(parameters('computerVisionName'), '')), parameters('computerVisionKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('computerVisionName')), '2023-05-01').key1), 'AZURE_COSMOSDB_ACCOUNT_KEY', if(or(parameters('useKeyVault'), equals(parameters('cosmosDBKeyName'), '')), parameters('cosmosDBKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.DocumentDB/databaseAccounts', parameters('cosmosDBKeyName')), '2022-08-15').primaryMasterKey)))]"
+ "value": "[union(parameters('appSettings'), union(if(equals(parameters('databaseType'), 'CosmosDB'), createObject('AZURE_COSMOSDB_ACCOUNT_KEY', if(or(parameters('useKeyVault'), equals(parameters('cosmosDBKeyName'), '')), parameters('cosmosDBKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.DocumentDB/databaseAccounts', parameters('cosmosDBKeyName')), '2022-08-15').primaryMasterKey)), createObject()), createObject('AZURE_AUTH_TYPE', parameters('authType'), 'USE_KEY_VAULT', if(parameters('useKeyVault'), parameters('useKeyVault'), ''), 'AZURE_OPENAI_API_KEY', if(parameters('useKeyVault'), parameters('openAIKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('azureOpenAIName')), '2023-05-01').key1), 'AZURE_SEARCH_KEY', if(parameters('useKeyVault'), parameters('searchKeyName'), listAdminKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.Search/searchServices', parameters('azureAISearchName')), '2021-04-01-preview').primaryKey), 'AZURE_BLOB_STORAGE_INFO', if(parameters('useKeyVault'), parameters('azureBlobStorageInfo'), replace(parameters('azureBlobStorageInfo'), '$STORAGE_ACCOUNT_KEY', listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.Storage/storageAccounts', parameters('storageAccountName')), '2021-09-01').keys[0].value)), 'AZURE_FORM_RECOGNIZER_INFO', if(parameters('useKeyVault'), parameters('azureFormRecognizerInfo'), replace(parameters('azureFormRecognizerInfo'), '$FORM_RECOGNIZER_KEY', listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('formRecognizerName')), '2023-05-01').key1)), 'AZURE_CONTENT_SAFETY_KEY', if(parameters('useKeyVault'), parameters('contentSafetyKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('contentSafetyName')), '2023-05-01').key1), 'AZURE_SPEECH_SERVICE_KEY', if(parameters('useKeyVault'), parameters('speechKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('speechServiceName')), '2023-05-01').key1), 'AZURE_COMPUTER_VISION_KEY', if(or(parameters('useKeyVault'), equals(parameters('computerVisionName'), '')), parameters('computerVisionKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('computerVisionName')), '2023-05-01').key1))))]"
},
"keyVaultName": {
"value": "[parameters('keyVaultName')]"
@@ -3536,6 +3847,9 @@
"scmDoBuildDuringDeployment": "[if(parameters('useDocker'), createObject('value', false()), createObject('value', true()))]",
"healthCheckPath": {
"value": "[parameters('healthCheckPath')]"
+ },
+ "managedIdentity": {
+ "value": "[or(equals(parameters('databaseType'), 'PostgreSQL'), not(empty(parameters('keyVaultName'))))]"
}
},
"template": {
@@ -3544,8 +3858,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "14818871229133632920"
+ "version": "0.32.4.45862",
+ "templateHash": "1710823743041736936"
},
"description": "Creates an Azure App Service in an existing Azure App Service plan."
},
@@ -3771,8 +4085,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "3955925289075906039"
+ "version": "0.32.4.45862",
+ "templateHash": "3479291286349558867"
},
"description": "Updates app settings for an Azure App Service."
},
@@ -3849,8 +4163,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "5620801774479515492"
+ "version": "0.32.4.45862",
+ "templateHash": "2541084448726511572"
},
"description": "Creates a role assignment for a service principal."
},
@@ -3918,8 +4232,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "5620801774479515492"
+ "version": "0.32.4.45862",
+ "templateHash": "2541084448726511572"
},
"description": "Creates a role assignment for a service principal."
},
@@ -3987,8 +4301,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "5620801774479515492"
+ "version": "0.32.4.45862",
+ "templateHash": "2541084448726511572"
},
"description": "Creates a role assignment for a service principal."
},
@@ -4056,8 +4370,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "5620801774479515492"
+ "version": "0.32.4.45862",
+ "templateHash": "2541084448726511572"
},
"description": "Creates a role assignment for a service principal."
},
@@ -4122,8 +4436,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "17352167468248267479"
+ "version": "0.32.4.45862",
+ "templateHash": "17848638157182929130"
},
"description": "Assigns an Azure Key Vault access policy."
},
@@ -4171,6 +4485,7 @@
]
},
{
+ "condition": "[equals(parameters('databaseType'), 'CosmosDB')]",
"type": "Microsoft.Resources/deployments",
"apiVersion": "2022-09-01",
"name": "[format('cosmos-sql-user-role-{0}', format('{0}-app-module', parameters('name')))]",
@@ -4196,8 +4511,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "8033637033572984239"
+ "version": "0.32.4.45862",
+ "templateHash": "2813064152180428298"
},
"description": "Creates a SQL role assignment under an Azure Cosmos DB account."
},
@@ -4257,6 +4572,7 @@
"[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'keyvault')]",
"[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'monitoring')]",
"[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('azureOpenAIResourceName'))]",
+ "[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql')]",
"[subscriptionResourceId('Microsoft.Resources/resourceGroups', variables('rgName'))]",
"[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('azureAISearchName'))]",
"[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('speechServiceName'))]",
@@ -4317,8 +4633,12 @@
},
"computerVisionName": "[if(parameters('useAdvancedImageProcessing'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'computerVision'), '2022-09-01').outputs.name.value), createObject('value', ''))]",
"openAIKeyName": "[if(parameters('useKeyVault'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.OPENAI_KEY_NAME.value), createObject('value', ''))]",
- "storageAccountKeyName": "[if(parameters('useKeyVault'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.STORAGE_ACCOUNT_KEY_NAME.value), createObject('value', ''))]",
- "formRecognizerKeyName": "[if(parameters('useKeyVault'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.FORM_RECOGNIZER_KEY_NAME.value), createObject('value', ''))]",
+ "azureBlobStorageInfo": {
+ "value": "[string(createObject('containerName', variables('blobContainerName'), 'accountName', parameters('storageAccountName'), 'accountKey', if(parameters('useKeyVault'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.STORAGE_ACCOUNT_KEY_NAME.value, '$STORAGE_ACCOUNT_KEY')))]"
+ },
+ "azureFormRecognizerInfo": {
+ "value": "[string(createObject('endpoint', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('formRecognizerName')), '2022-09-01').outputs.endpoint.value, 'key', if(parameters('useKeyVault'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.FORM_RECOGNIZER_KEY_NAME.value, '$FORM_RECOGNIZER_KEY')))]"
+ },
"searchKeyName": "[if(parameters('useKeyVault'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.SEARCH_KEY_NAME.value), createObject('value', ''))]",
"computerVisionKeyName": "[if(parameters('useKeyVault'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.COMPUTER_VISION_KEY_NAME.value), createObject('value', ''))]",
"contentSafetyKeyName": "[if(parameters('useKeyVault'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.CONTENT_SAFETY_KEY_NAME.value), createObject('value', ''))]",
@@ -4330,54 +4650,11 @@
"authType": {
"value": "[parameters('authType')]"
},
+ "databaseType": {
+ "value": "[parameters('databaseType')]"
+ },
"appSettings": {
- "value": {
- "AZURE_BLOB_ACCOUNT_NAME": "[parameters('storageAccountName')]",
- "AZURE_BLOB_CONTAINER_NAME": "[variables('blobContainerName')]",
- "AZURE_COMPUTER_VISION_ENDPOINT": "[if(parameters('useAdvancedImageProcessing'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'computerVision'), '2022-09-01').outputs.endpoint.value, '')]",
- "AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_API_VERSION": "[parameters('computerVisionVectorizeImageApiVersion')]",
- "AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_MODEL_VERSION": "[parameters('computerVisionVectorizeImageModelVersion')]",
- "AZURE_CONTENT_SAFETY_ENDPOINT": "[reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('contentSafetyName')), '2022-09-01').outputs.endpoint.value]",
- "AZURE_FORM_RECOGNIZER_ENDPOINT": "[reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('formRecognizerName')), '2022-09-01').outputs.endpoint.value]",
- "AZURE_OPENAI_RESOURCE": "[parameters('azureOpenAIResourceName')]",
- "AZURE_OPENAI_MODEL_INFO": "[variables('azureOpenAIModelInfo')]",
- "AZURE_OPENAI_TEMPERATURE": "[parameters('azureOpenAITemperature')]",
- "AZURE_OPENAI_TOP_P": "[parameters('azureOpenAITopP')]",
- "AZURE_OPENAI_MAX_TOKENS": "[parameters('azureOpenAIMaxTokens')]",
- "AZURE_OPENAI_STOP_SEQUENCE": "[parameters('azureOpenAIStopSequence')]",
- "AZURE_OPENAI_SYSTEM_MESSAGE": "[parameters('azureOpenAISystemMessage')]",
- "AZURE_OPENAI_API_VERSION": "[parameters('azureOpenAIApiVersion')]",
- "AZURE_OPENAI_STREAM": "[parameters('azureOpenAIStream')]",
- "AZURE_OPENAI_EMBEDDING_MODEL_INFO": "[variables('azureOpenAIEmbeddingModelInfo')]",
- "AZURE_SEARCH_SERVICE": "[format('https://{0}.search.windows.net', parameters('azureAISearchName'))]",
- "AZURE_SEARCH_INDEX": "[parameters('azureSearchIndex')]",
- "AZURE_SEARCH_USE_SEMANTIC_SEARCH": "[parameters('azureSearchUseSemanticSearch')]",
- "AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG": "[parameters('azureSearchSemanticSearchConfig')]",
- "AZURE_SEARCH_INDEX_IS_PRECHUNKED": "[parameters('azureSearchIndexIsPrechunked')]",
- "AZURE_SEARCH_TOP_K": "[parameters('azureSearchTopK')]",
- "AZURE_SEARCH_ENABLE_IN_DOMAIN": "[parameters('azureSearchEnableInDomain')]",
- "AZURE_SEARCH_FILENAME_COLUMN": "[parameters('azureSearchFilenameColumn')]",
- "AZURE_SEARCH_FILTER": "[parameters('azureSearchFilter')]",
- "AZURE_SEARCH_FIELDS_ID": "[parameters('azureSearchFieldId')]",
- "AZURE_SEARCH_CONTENT_COLUMN": "[parameters('azureSearchContentColumn')]",
- "AZURE_SEARCH_CONTENT_VECTOR_COLUMN": "[parameters('azureSearchVectorColumn')]",
- "AZURE_SEARCH_TITLE_COLUMN": "[parameters('azureSearchTitleColumn')]",
- "AZURE_SEARCH_FIELDS_METADATA": "[parameters('azureSearchFieldsMetadata')]",
- "AZURE_SEARCH_SOURCE_COLUMN": "[parameters('azureSearchSourceColumn')]",
- "AZURE_SEARCH_CHUNK_COLUMN": "[parameters('azureSearchChunkColumn')]",
- "AZURE_SEARCH_OFFSET_COLUMN": "[parameters('azureSearchOffsetColumn')]",
- "AZURE_SEARCH_URL_COLUMN": "[parameters('azureSearchUrlColumn')]",
- "AZURE_SEARCH_DATASOURCE_NAME": "[parameters('azureSearchDatasource')]",
- "AZURE_SEARCH_INDEXER_NAME": "[parameters('azureSearchIndexer')]",
- "AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION": "[parameters('azureSearchUseIntegratedVectorization')]",
- "USE_ADVANCED_IMAGE_PROCESSING": "[parameters('useAdvancedImageProcessing')]",
- "BACKEND_URL": "[format('https://{0}.azurewebsites.net', parameters('functionName'))]",
- "DOCUMENT_PROCESSING_QUEUE_NAME": "[variables('queueName')]",
- "FUNCTION_KEY": "[variables('clientKey')]",
- "ORCHESTRATION_STRATEGY": "[parameters('orchestrationStrategy')]",
- "LOGLEVEL": "[parameters('logLevel')]",
- "CHAT_HISTORY_ENABLED": "[parameters('chatHistoryEnabled')]"
- }
+ "value": "[union(createObject('AZURE_COMPUTER_VISION_ENDPOINT', if(parameters('useAdvancedImageProcessing'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'computerVision'), '2022-09-01').outputs.endpoint.value, ''), 'AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_API_VERSION', parameters('computerVisionVectorizeImageApiVersion'), 'AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_MODEL_VERSION', parameters('computerVisionVectorizeImageModelVersion'), 'AZURE_CONTENT_SAFETY_ENDPOINT', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('contentSafetyName')), '2022-09-01').outputs.endpoint.value, 'AZURE_OPENAI_RESOURCE', parameters('azureOpenAIResourceName'), 'AZURE_OPENAI_MODEL_INFO', variables('azureOpenAIModelInfo'), 'AZURE_OPENAI_TEMPERATURE', parameters('azureOpenAITemperature'), 'AZURE_OPENAI_TOP_P', parameters('azureOpenAITopP'), 'AZURE_OPENAI_MAX_TOKENS', parameters('azureOpenAIMaxTokens'), 'AZURE_OPENAI_STOP_SEQUENCE', parameters('azureOpenAIStopSequence'), 'AZURE_OPENAI_SYSTEM_MESSAGE', parameters('azureOpenAISystemMessage'), 'AZURE_OPENAI_API_VERSION', parameters('azureOpenAIApiVersion'), 'AZURE_OPENAI_STREAM', parameters('azureOpenAIStream'), 'AZURE_OPENAI_EMBEDDING_MODEL_INFO', variables('azureOpenAIEmbeddingModelInfo'), 'AZURE_SEARCH_SERVICE', format('https://{0}.search.windows.net', parameters('azureAISearchName')), 'AZURE_SEARCH_INDEX', parameters('azureSearchIndex'), 'AZURE_SEARCH_USE_SEMANTIC_SEARCH', parameters('azureSearchUseSemanticSearch'), 'AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG', parameters('azureSearchSemanticSearchConfig'), 'AZURE_SEARCH_INDEX_IS_PRECHUNKED', parameters('azureSearchIndexIsPrechunked'), 'AZURE_SEARCH_TOP_K', parameters('azureSearchTopK'), 'AZURE_SEARCH_ENABLE_IN_DOMAIN', parameters('azureSearchEnableInDomain'), 'AZURE_SEARCH_FILENAME_COLUMN', parameters('azureSearchFilenameColumn'), 'AZURE_SEARCH_FILTER', parameters('azureSearchFilter'), 'AZURE_SEARCH_FIELDS_ID', parameters('azureSearchFieldId'), 'AZURE_SEARCH_CONTENT_COLUMN', parameters('azureSearchContentColumn'), 'AZURE_SEARCH_CONTENT_VECTOR_COLUMN', parameters('azureSearchVectorColumn'), 'AZURE_SEARCH_TITLE_COLUMN', parameters('azureSearchTitleColumn'), 'AZURE_SEARCH_FIELDS_METADATA', parameters('azureSearchFieldsMetadata'), 'AZURE_SEARCH_SOURCE_COLUMN', parameters('azureSearchSourceColumn'), 'AZURE_SEARCH_CHUNK_COLUMN', parameters('azureSearchChunkColumn'), 'AZURE_SEARCH_OFFSET_COLUMN', parameters('azureSearchOffsetColumn'), 'AZURE_SEARCH_URL_COLUMN', parameters('azureSearchUrlColumn'), 'AZURE_SEARCH_DATASOURCE_NAME', parameters('azureSearchDatasource'), 'AZURE_SEARCH_INDEXER_NAME', parameters('azureSearchIndexer'), 'AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION', parameters('azureSearchUseIntegratedVectorization'), 'USE_ADVANCED_IMAGE_PROCESSING', parameters('useAdvancedImageProcessing'), 'BACKEND_URL', format('https://{0}.azurewebsites.net', parameters('functionName')), 'DOCUMENT_PROCESSING_QUEUE_NAME', variables('queueName'), 'FUNCTION_KEY', variables('clientKey'), 'ORCHESTRATION_STRATEGY', parameters('orchestrationStrategy'), 'LOGLEVEL', parameters('logLevel'), 'DATABASE_TYPE', parameters('databaseType')), if(equals(parameters('databaseType'), 'PostgreSQL'), createObject('AZURE_POSTGRESQL_INFO', string(createObject('host', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql'), '2022-09-01').outputs.postgresDbOutput.value.postgreSQLServerName, 'dbname', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql'), '2022-09-01').outputs.postgresDbOutput.value.postgreSQLDatabaseName, 'user', parameters('adminWebsiteName')))), createObject()))]"
}
},
"template": {
@@ -4386,8 +4663,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "16426772879193976216"
+ "version": "0.32.4.45862",
+ "templateHash": "8759422582622331901"
}
},
"parameters": {
@@ -4468,11 +4745,11 @@
"type": "string",
"defaultValue": ""
},
- "storageAccountKeyName": {
+ "azureBlobStorageInfo": {
"type": "string",
"defaultValue": ""
},
- "formRecognizerKeyName": {
+ "azureFormRecognizerInfo": {
"type": "string",
"defaultValue": ""
},
@@ -4502,6 +4779,10 @@
"useDocker": {
"type": "bool",
"defaultValue": "[not(equals(parameters('dockerFullImageName'), ''))]"
+ },
+ "databaseType": {
+ "type": "string",
+ "defaultValue": "CosmosDB"
}
},
"resources": [
@@ -4547,8 +4828,11 @@
"appServicePlanId": {
"value": "[parameters('appServicePlanId')]"
},
+ "managedIdentity": {
+ "value": "[or(equals(parameters('databaseType'), 'PostgreSQL'), not(empty(parameters('keyVaultName'))))]"
+ },
"appSettings": {
- "value": "[union(parameters('appSettings'), createObject('AZURE_AUTH_TYPE', parameters('authType'), 'USE_KEY_VAULT', if(parameters('useKeyVault'), parameters('useKeyVault'), ''), 'AZURE_OPENAI_API_KEY', if(parameters('useKeyVault'), parameters('openAIKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('azureOpenAIName')), '2023-05-01').key1), 'AZURE_SEARCH_KEY', if(parameters('useKeyVault'), parameters('searchKeyName'), listAdminKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.Search/searchServices', parameters('azureAISearchName')), '2021-04-01-preview').primaryKey), 'AZURE_BLOB_ACCOUNT_KEY', if(parameters('useKeyVault'), parameters('storageAccountKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.Storage/storageAccounts', parameters('storageAccountName')), '2021-09-01').keys[0].value), 'AZURE_FORM_RECOGNIZER_KEY', if(parameters('useKeyVault'), parameters('formRecognizerKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('formRecognizerName')), '2023-05-01').key1), 'AZURE_CONTENT_SAFETY_KEY', if(parameters('useKeyVault'), parameters('contentSafetyKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('contentSafetyName')), '2023-05-01').key1), 'AZURE_SPEECH_SERVICE_KEY', if(parameters('useKeyVault'), parameters('speechKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('speechServiceName')), '2023-05-01').key1), 'AZURE_COMPUTER_VISION_KEY', if(or(parameters('useKeyVault'), equals(parameters('computerVisionName'), '')), parameters('computerVisionKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('computerVisionName')), '2023-05-01').key1)))]"
+ "value": "[union(parameters('appSettings'), createObject('AZURE_AUTH_TYPE', parameters('authType'), 'USE_KEY_VAULT', if(parameters('useKeyVault'), parameters('useKeyVault'), ''), 'AZURE_OPENAI_API_KEY', if(parameters('useKeyVault'), parameters('openAIKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('azureOpenAIName')), '2023-05-01').key1), 'AZURE_SEARCH_KEY', if(parameters('useKeyVault'), parameters('searchKeyName'), listAdminKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.Search/searchServices', parameters('azureAISearchName')), '2021-04-01-preview').primaryKey), 'AZURE_BLOB_STORAGE_INFO', if(parameters('useKeyVault'), parameters('azureBlobStorageInfo'), replace(parameters('azureBlobStorageInfo'), '$STORAGE_ACCOUNT_KEY', listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.Storage/storageAccounts', parameters('storageAccountName')), '2021-09-01').keys[0].value)), 'AZURE_FORM_RECOGNIZER_INFO', if(parameters('useKeyVault'), parameters('azureFormRecognizerInfo'), replace(parameters('azureFormRecognizerInfo'), '$FORM_RECOGNIZER_KEY', listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('formRecognizerName')), '2023-05-01').key1)), 'AZURE_CONTENT_SAFETY_KEY', if(parameters('useKeyVault'), parameters('contentSafetyKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('contentSafetyName')), '2023-05-01').key1), 'AZURE_SPEECH_SERVICE_KEY', if(parameters('useKeyVault'), parameters('speechKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('speechServiceName')), '2023-05-01').key1), 'AZURE_COMPUTER_VISION_KEY', if(or(parameters('useKeyVault'), equals(parameters('computerVisionName'), '')), parameters('computerVisionKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('computerVisionName')), '2023-05-01').key1)))]"
}
},
"template": {
@@ -4557,8 +4841,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "14818871229133632920"
+ "version": "0.32.4.45862",
+ "templateHash": "1710823743041736936"
},
"description": "Creates an Azure App Service in an existing Azure App Service plan."
},
@@ -4784,8 +5068,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "3955925289075906039"
+ "version": "0.32.4.45862",
+ "templateHash": "3479291286349558867"
},
"description": "Updates app settings for an Azure App Service."
},
@@ -4862,8 +5146,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "5620801774479515492"
+ "version": "0.32.4.45862",
+ "templateHash": "2541084448726511572"
},
"description": "Creates a role assignment for a service principal."
},
@@ -4931,8 +5215,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "5620801774479515492"
+ "version": "0.32.4.45862",
+ "templateHash": "2541084448726511572"
},
"description": "Creates a role assignment for a service principal."
},
@@ -5000,8 +5284,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "5620801774479515492"
+ "version": "0.32.4.45862",
+ "templateHash": "2541084448726511572"
},
"description": "Creates a role assignment for a service principal."
},
@@ -5069,8 +5353,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "5620801774479515492"
+ "version": "0.32.4.45862",
+ "templateHash": "2541084448726511572"
},
"description": "Creates a role assignment for a service principal."
},
@@ -5135,8 +5419,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "17352167468248267479"
+ "version": "0.32.4.45862",
+ "templateHash": "17848638157182929130"
},
"description": "Assigns an Azure Key Vault access policy."
},
@@ -5208,6 +5492,7 @@
"[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'keyvault')]",
"[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'monitoring')]",
"[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('azureOpenAIResourceName'))]",
+ "[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql')]",
"[subscriptionResourceId('Microsoft.Resources/resourceGroups', variables('rgName'))]",
"[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('azureAISearchName'))]",
"[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('speechServiceName'))]",
@@ -5265,8 +5550,12 @@
},
"computerVisionName": "[if(parameters('useAdvancedImageProcessing'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'computerVision'), '2022-09-01').outputs.name.value), createObject('value', ''))]",
"openAIKeyName": "[if(parameters('useKeyVault'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.OPENAI_KEY_NAME.value), createObject('value', ''))]",
- "storageAccountKeyName": "[if(parameters('useKeyVault'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.STORAGE_ACCOUNT_KEY_NAME.value), createObject('value', ''))]",
- "formRecognizerKeyName": "[if(parameters('useKeyVault'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.FORM_RECOGNIZER_KEY_NAME.value), createObject('value', ''))]",
+ "azureBlobStorageInfo": {
+ "value": "[string(createObject('containerName', variables('blobContainerName'), 'accountName', parameters('storageAccountName'), 'accountKey', if(parameters('useKeyVault'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.STORAGE_ACCOUNT_KEY_NAME.value, '$STORAGE_ACCOUNT_KEY')))]"
+ },
+ "azureFormRecognizerInfo": {
+ "value": "[string(createObject('endpoint', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('formRecognizerName')), '2022-09-01').outputs.endpoint.value, 'key', if(parameters('useKeyVault'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.FORM_RECOGNIZER_KEY_NAME.value, '$FORM_RECOGNIZER_KEY')))]"
+ },
"searchKeyName": "[if(parameters('useKeyVault'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.SEARCH_KEY_NAME.value), createObject('value', ''))]",
"contentSafetyKeyName": "[if(parameters('useKeyVault'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.CONTENT_SAFETY_KEY_NAME.value), createObject('value', ''))]",
"speechKeyName": "[if(parameters('useKeyVault'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.SPEECH_KEY_NAME.value), createObject('value', ''))]",
@@ -5278,54 +5567,11 @@
"authType": {
"value": "[parameters('authType')]"
},
+ "databaseType": {
+ "value": "[parameters('databaseType')]"
+ },
"appSettings": {
- "value": {
- "AZURE_BLOB_ACCOUNT_NAME": "[parameters('storageAccountName')]",
- "AZURE_BLOB_CONTAINER_NAME": "[variables('blobContainerName')]",
- "AZURE_COMPUTER_VISION_ENDPOINT": "[if(parameters('useAdvancedImageProcessing'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'computerVision'), '2022-09-01').outputs.endpoint.value, '')]",
- "AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_API_VERSION": "[parameters('computerVisionVectorizeImageApiVersion')]",
- "AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_MODEL_VERSION": "[parameters('computerVisionVectorizeImageModelVersion')]",
- "AZURE_CONTENT_SAFETY_ENDPOINT": "[reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('contentSafetyName')), '2022-09-01').outputs.endpoint.value]",
- "AZURE_FORM_RECOGNIZER_ENDPOINT": "[reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('formRecognizerName')), '2022-09-01').outputs.endpoint.value]",
- "AZURE_OPENAI_RESOURCE": "[parameters('azureOpenAIResourceName')]",
- "AZURE_OPENAI_MODEL_INFO": "[variables('azureOpenAIModelInfo')]",
- "AZURE_OPENAI_TEMPERATURE": "[parameters('azureOpenAITemperature')]",
- "AZURE_OPENAI_TOP_P": "[parameters('azureOpenAITopP')]",
- "AZURE_OPENAI_MAX_TOKENS": "[parameters('azureOpenAIMaxTokens')]",
- "AZURE_OPENAI_STOP_SEQUENCE": "[parameters('azureOpenAIStopSequence')]",
- "AZURE_OPENAI_SYSTEM_MESSAGE": "[parameters('azureOpenAISystemMessage')]",
- "AZURE_OPENAI_API_VERSION": "[parameters('azureOpenAIApiVersion')]",
- "AZURE_OPENAI_STREAM": "[parameters('azureOpenAIStream')]",
- "AZURE_OPENAI_EMBEDDING_MODEL_INFO": "[variables('azureOpenAIEmbeddingModelInfo')]",
- "AZURE_SEARCH_SERVICE": "[format('https://{0}.search.windows.net', parameters('azureAISearchName'))]",
- "AZURE_SEARCH_INDEX": "[parameters('azureSearchIndex')]",
- "AZURE_SEARCH_USE_SEMANTIC_SEARCH": "[parameters('azureSearchUseSemanticSearch')]",
- "AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG": "[parameters('azureSearchSemanticSearchConfig')]",
- "AZURE_SEARCH_INDEX_IS_PRECHUNKED": "[parameters('azureSearchIndexIsPrechunked')]",
- "AZURE_SEARCH_TOP_K": "[parameters('azureSearchTopK')]",
- "AZURE_SEARCH_ENABLE_IN_DOMAIN": "[parameters('azureSearchEnableInDomain')]",
- "AZURE_SEARCH_FILENAME_COLUMN": "[parameters('azureSearchFilenameColumn')]",
- "AZURE_SEARCH_FILTER": "[parameters('azureSearchFilter')]",
- "AZURE_SEARCH_FIELDS_ID": "[parameters('azureSearchFieldId')]",
- "AZURE_SEARCH_CONTENT_COLUMN": "[parameters('azureSearchContentColumn')]",
- "AZURE_SEARCH_CONTENT_VECTOR_COLUMN": "[parameters('azureSearchVectorColumn')]",
- "AZURE_SEARCH_TITLE_COLUMN": "[parameters('azureSearchTitleColumn')]",
- "AZURE_SEARCH_FIELDS_METADATA": "[parameters('azureSearchFieldsMetadata')]",
- "AZURE_SEARCH_SOURCE_COLUMN": "[parameters('azureSearchSourceColumn')]",
- "AZURE_SEARCH_CHUNK_COLUMN": "[parameters('azureSearchChunkColumn')]",
- "AZURE_SEARCH_OFFSET_COLUMN": "[parameters('azureSearchOffsetColumn')]",
- "AZURE_SEARCH_URL_COLUMN": "[parameters('azureSearchUrlColumn')]",
- "AZURE_SEARCH_DATASOURCE_NAME": "[parameters('azureSearchDatasource')]",
- "AZURE_SEARCH_INDEXER_NAME": "[parameters('azureSearchIndexer')]",
- "AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION": "[parameters('azureSearchUseIntegratedVectorization')]",
- "USE_ADVANCED_IMAGE_PROCESSING": "[parameters('useAdvancedImageProcessing')]",
- "BACKEND_URL": "[format('https://{0}-docker.azurewebsites.net', parameters('functionName'))]",
- "DOCUMENT_PROCESSING_QUEUE_NAME": "[variables('queueName')]",
- "FUNCTION_KEY": "[variables('clientKey')]",
- "ORCHESTRATION_STRATEGY": "[parameters('orchestrationStrategy')]",
- "LOGLEVEL": "[parameters('logLevel')]",
- "CHAT_HISTORY_ENABLED": "[parameters('chatHistoryEnabled')]"
- }
+ "value": "[union(createObject('AZURE_COMPUTER_VISION_ENDPOINT', if(parameters('useAdvancedImageProcessing'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'computerVision'), '2022-09-01').outputs.endpoint.value, ''), 'AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_API_VERSION', parameters('computerVisionVectorizeImageApiVersion'), 'AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_MODEL_VERSION', parameters('computerVisionVectorizeImageModelVersion'), 'AZURE_CONTENT_SAFETY_ENDPOINT', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('contentSafetyName')), '2022-09-01').outputs.endpoint.value, 'AZURE_OPENAI_RESOURCE', parameters('azureOpenAIResourceName'), 'AZURE_OPENAI_MODEL_INFO', variables('azureOpenAIModelInfo'), 'AZURE_OPENAI_TEMPERATURE', parameters('azureOpenAITemperature'), 'AZURE_OPENAI_TOP_P', parameters('azureOpenAITopP'), 'AZURE_OPENAI_MAX_TOKENS', parameters('azureOpenAIMaxTokens'), 'AZURE_OPENAI_STOP_SEQUENCE', parameters('azureOpenAIStopSequence'), 'AZURE_OPENAI_SYSTEM_MESSAGE', parameters('azureOpenAISystemMessage'), 'AZURE_OPENAI_API_VERSION', parameters('azureOpenAIApiVersion'), 'AZURE_OPENAI_STREAM', parameters('azureOpenAIStream'), 'AZURE_OPENAI_EMBEDDING_MODEL_INFO', variables('azureOpenAIEmbeddingModelInfo'), 'AZURE_SEARCH_SERVICE', format('https://{0}.search.windows.net', parameters('azureAISearchName')), 'AZURE_SEARCH_INDEX', parameters('azureSearchIndex'), 'AZURE_SEARCH_USE_SEMANTIC_SEARCH', parameters('azureSearchUseSemanticSearch'), 'AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG', parameters('azureSearchSemanticSearchConfig'), 'AZURE_SEARCH_INDEX_IS_PRECHUNKED', parameters('azureSearchIndexIsPrechunked'), 'AZURE_SEARCH_TOP_K', parameters('azureSearchTopK'), 'AZURE_SEARCH_ENABLE_IN_DOMAIN', parameters('azureSearchEnableInDomain'), 'AZURE_SEARCH_FILENAME_COLUMN', parameters('azureSearchFilenameColumn'), 'AZURE_SEARCH_FILTER', parameters('azureSearchFilter'), 'AZURE_SEARCH_FIELDS_ID', parameters('azureSearchFieldId'), 'AZURE_SEARCH_CONTENT_COLUMN', parameters('azureSearchContentColumn'), 'AZURE_SEARCH_CONTENT_VECTOR_COLUMN', parameters('azureSearchVectorColumn'), 'AZURE_SEARCH_TITLE_COLUMN', parameters('azureSearchTitleColumn'), 'AZURE_SEARCH_FIELDS_METADATA', parameters('azureSearchFieldsMetadata'), 'AZURE_SEARCH_SOURCE_COLUMN', parameters('azureSearchSourceColumn'), 'AZURE_SEARCH_CHUNK_COLUMN', parameters('azureSearchChunkColumn'), 'AZURE_SEARCH_OFFSET_COLUMN', parameters('azureSearchOffsetColumn'), 'AZURE_SEARCH_URL_COLUMN', parameters('azureSearchUrlColumn'), 'AZURE_SEARCH_DATASOURCE_NAME', parameters('azureSearchDatasource'), 'AZURE_SEARCH_INDEXER_NAME', parameters('azureSearchIndexer'), 'AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION', parameters('azureSearchUseIntegratedVectorization'), 'USE_ADVANCED_IMAGE_PROCESSING', parameters('useAdvancedImageProcessing'), 'BACKEND_URL', format('https://{0}-docker.azurewebsites.net', parameters('functionName')), 'DOCUMENT_PROCESSING_QUEUE_NAME', variables('queueName'), 'FUNCTION_KEY', variables('clientKey'), 'ORCHESTRATION_STRATEGY', parameters('orchestrationStrategy'), 'LOGLEVEL', parameters('logLevel'), 'DATABASE_TYPE', parameters('databaseType')), if(equals(parameters('databaseType'), 'PostgreSQL'), createObject('AZURE_POSTGRESQL_INFO', string(createObject('host', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql'), '2022-09-01').outputs.postgresDbOutput.value.postgreSQLServerName, 'dbname', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql'), '2022-09-01').outputs.postgresDbOutput.value.postgreSQLDatabaseName, 'user', format('{0}-docker', parameters('adminWebsiteName'))))), createObject()))]"
}
},
"template": {
@@ -5334,8 +5580,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "16426772879193976216"
+ "version": "0.32.4.45862",
+ "templateHash": "8759422582622331901"
}
},
"parameters": {
@@ -5416,11 +5662,11 @@
"type": "string",
"defaultValue": ""
},
- "storageAccountKeyName": {
+ "azureBlobStorageInfo": {
"type": "string",
"defaultValue": ""
},
- "formRecognizerKeyName": {
+ "azureFormRecognizerInfo": {
"type": "string",
"defaultValue": ""
},
@@ -5450,6 +5696,10 @@
"useDocker": {
"type": "bool",
"defaultValue": "[not(equals(parameters('dockerFullImageName'), ''))]"
+ },
+ "databaseType": {
+ "type": "string",
+ "defaultValue": "CosmosDB"
}
},
"resources": [
@@ -5495,8 +5745,11 @@
"appServicePlanId": {
"value": "[parameters('appServicePlanId')]"
},
+ "managedIdentity": {
+ "value": "[or(equals(parameters('databaseType'), 'PostgreSQL'), not(empty(parameters('keyVaultName'))))]"
+ },
"appSettings": {
- "value": "[union(parameters('appSettings'), createObject('AZURE_AUTH_TYPE', parameters('authType'), 'USE_KEY_VAULT', if(parameters('useKeyVault'), parameters('useKeyVault'), ''), 'AZURE_OPENAI_API_KEY', if(parameters('useKeyVault'), parameters('openAIKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('azureOpenAIName')), '2023-05-01').key1), 'AZURE_SEARCH_KEY', if(parameters('useKeyVault'), parameters('searchKeyName'), listAdminKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.Search/searchServices', parameters('azureAISearchName')), '2021-04-01-preview').primaryKey), 'AZURE_BLOB_ACCOUNT_KEY', if(parameters('useKeyVault'), parameters('storageAccountKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.Storage/storageAccounts', parameters('storageAccountName')), '2021-09-01').keys[0].value), 'AZURE_FORM_RECOGNIZER_KEY', if(parameters('useKeyVault'), parameters('formRecognizerKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('formRecognizerName')), '2023-05-01').key1), 'AZURE_CONTENT_SAFETY_KEY', if(parameters('useKeyVault'), parameters('contentSafetyKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('contentSafetyName')), '2023-05-01').key1), 'AZURE_SPEECH_SERVICE_KEY', if(parameters('useKeyVault'), parameters('speechKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('speechServiceName')), '2023-05-01').key1), 'AZURE_COMPUTER_VISION_KEY', if(or(parameters('useKeyVault'), equals(parameters('computerVisionName'), '')), parameters('computerVisionKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('computerVisionName')), '2023-05-01').key1)))]"
+ "value": "[union(parameters('appSettings'), createObject('AZURE_AUTH_TYPE', parameters('authType'), 'USE_KEY_VAULT', if(parameters('useKeyVault'), parameters('useKeyVault'), ''), 'AZURE_OPENAI_API_KEY', if(parameters('useKeyVault'), parameters('openAIKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('azureOpenAIName')), '2023-05-01').key1), 'AZURE_SEARCH_KEY', if(parameters('useKeyVault'), parameters('searchKeyName'), listAdminKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.Search/searchServices', parameters('azureAISearchName')), '2021-04-01-preview').primaryKey), 'AZURE_BLOB_STORAGE_INFO', if(parameters('useKeyVault'), parameters('azureBlobStorageInfo'), replace(parameters('azureBlobStorageInfo'), '$STORAGE_ACCOUNT_KEY', listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.Storage/storageAccounts', parameters('storageAccountName')), '2021-09-01').keys[0].value)), 'AZURE_FORM_RECOGNIZER_INFO', if(parameters('useKeyVault'), parameters('azureFormRecognizerInfo'), replace(parameters('azureFormRecognizerInfo'), '$FORM_RECOGNIZER_KEY', listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('formRecognizerName')), '2023-05-01').key1)), 'AZURE_CONTENT_SAFETY_KEY', if(parameters('useKeyVault'), parameters('contentSafetyKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('contentSafetyName')), '2023-05-01').key1), 'AZURE_SPEECH_SERVICE_KEY', if(parameters('useKeyVault'), parameters('speechKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('speechServiceName')), '2023-05-01').key1), 'AZURE_COMPUTER_VISION_KEY', if(or(parameters('useKeyVault'), equals(parameters('computerVisionName'), '')), parameters('computerVisionKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('computerVisionName')), '2023-05-01').key1)))]"
}
},
"template": {
@@ -5505,8 +5758,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "14818871229133632920"
+ "version": "0.32.4.45862",
+ "templateHash": "1710823743041736936"
},
"description": "Creates an Azure App Service in an existing Azure App Service plan."
},
@@ -5732,8 +5985,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "3955925289075906039"
+ "version": "0.32.4.45862",
+ "templateHash": "3479291286349558867"
},
"description": "Updates app settings for an Azure App Service."
},
@@ -5810,8 +6063,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "5620801774479515492"
+ "version": "0.32.4.45862",
+ "templateHash": "2541084448726511572"
},
"description": "Creates a role assignment for a service principal."
},
@@ -5879,8 +6132,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "5620801774479515492"
+ "version": "0.32.4.45862",
+ "templateHash": "2541084448726511572"
},
"description": "Creates a role assignment for a service principal."
},
@@ -5948,8 +6201,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "5620801774479515492"
+ "version": "0.32.4.45862",
+ "templateHash": "2541084448726511572"
},
"description": "Creates a role assignment for a service principal."
},
@@ -6017,8 +6270,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "5620801774479515492"
+ "version": "0.32.4.45862",
+ "templateHash": "2541084448726511572"
},
"description": "Creates a role assignment for a service principal."
},
@@ -6083,8 +6336,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "17352167468248267479"
+ "version": "0.32.4.45862",
+ "templateHash": "17848638157182929130"
},
"description": "Assigns an Azure Key Vault access policy."
},
@@ -6156,6 +6409,7 @@
"[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'keyvault')]",
"[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'monitoring')]",
"[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('azureOpenAIResourceName'))]",
+ "[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql')]",
"[subscriptionResourceId('Microsoft.Resources/resourceGroups', variables('rgName'))]",
"[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('azureAISearchName'))]",
"[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('speechServiceName'))]",
@@ -6198,8 +6452,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "8473455776229346647"
+ "version": "0.32.4.45862",
+ "templateHash": "10190065828144265343"
},
"description": "Creates an Application Insights instance and a Log Analytics workspace."
},
@@ -6250,8 +6504,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "15449976264810996474"
+ "version": "0.32.4.45862",
+ "templateHash": "9506675660522824519"
},
"description": "Creates a Log Analytics workspace."
},
@@ -6331,8 +6585,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "16358460762600875186"
+ "version": "0.32.4.45862",
+ "templateHash": "1166184924473734792"
},
"description": "Creates an Application Insights instance based on an existing Log Analytics workspace."
},
@@ -6396,8 +6650,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "1003060957409338499"
+ "version": "0.32.4.45862",
+ "templateHash": "12126236527601344203"
},
"description": "Creates a dashboard for an Application Insights instance."
},
@@ -7735,8 +7989,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "12632171944221294691"
+ "version": "0.32.4.45862",
+ "templateHash": "9194393038824315813"
}
},
"parameters": {
@@ -7818,8 +8072,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "10154909114565024920"
+ "version": "0.32.4.45862",
+ "templateHash": "12403631824314710916"
}
},
"parameters": {
@@ -7960,8 +8214,12 @@
"value": "[variables('clientKey')]"
},
"openAIKeyName": "[if(parameters('useKeyVault'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.OPENAI_KEY_NAME.value), createObject('value', ''))]",
- "storageAccountKeyName": "[if(parameters('useKeyVault'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.STORAGE_ACCOUNT_KEY_NAME.value), createObject('value', ''))]",
- "formRecognizerKeyName": "[if(parameters('useKeyVault'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.FORM_RECOGNIZER_KEY_NAME.value), createObject('value', ''))]",
+ "azureBlobStorageInfo": {
+ "value": "[string(createObject('containerName', variables('blobContainerName'), 'accountName', parameters('storageAccountName'), 'accountKey', if(parameters('useKeyVault'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.STORAGE_ACCOUNT_KEY_NAME.value, '$STORAGE_ACCOUNT_KEY')))]"
+ },
+ "azureFormRecognizerInfo": {
+ "value": "[string(createObject('endpoint', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('formRecognizerName')), '2022-09-01').outputs.endpoint.value, 'key', if(parameters('useKeyVault'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.FORM_RECOGNIZER_KEY_NAME.value, '$FORM_RECOGNIZER_KEY')))]"
+ },
"searchKeyName": "[if(parameters('useKeyVault'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.SEARCH_KEY_NAME.value), createObject('value', ''))]",
"contentSafetyKeyName": "[if(parameters('useKeyVault'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.CONTENT_SAFETY_KEY_NAME.value), createObject('value', ''))]",
"speechKeyName": "[if(parameters('useKeyVault'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.SPEECH_KEY_NAME.value), createObject('value', ''))]",
@@ -7973,39 +8231,11 @@
"authType": {
"value": "[parameters('authType')]"
},
+ "databaseType": {
+ "value": "[parameters('databaseType')]"
+ },
"appSettings": {
- "value": {
- "AZURE_BLOB_ACCOUNT_NAME": "[parameters('storageAccountName')]",
- "AZURE_BLOB_CONTAINER_NAME": "[variables('blobContainerName')]",
- "AZURE_COMPUTER_VISION_ENDPOINT": "[if(parameters('useAdvancedImageProcessing'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'computerVision'), '2022-09-01').outputs.endpoint.value, '')]",
- "AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_API_VERSION": "[parameters('computerVisionVectorizeImageApiVersion')]",
- "AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_MODEL_VERSION": "[parameters('computerVisionVectorizeImageModelVersion')]",
- "AZURE_CONTENT_SAFETY_ENDPOINT": "[reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('contentSafetyName')), '2022-09-01').outputs.endpoint.value]",
- "AZURE_FORM_RECOGNIZER_ENDPOINT": "[reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('formRecognizerName')), '2022-09-01').outputs.endpoint.value]",
- "AZURE_OPENAI_MODEL_INFO": "[variables('azureOpenAIModelInfo')]",
- "AZURE_OPENAI_EMBEDDING_MODEL_INFO": "[variables('azureOpenAIEmbeddingModelInfo')]",
- "AZURE_OPENAI_RESOURCE": "[parameters('azureOpenAIResourceName')]",
- "AZURE_OPENAI_API_VERSION": "[parameters('azureOpenAIApiVersion')]",
- "AZURE_SEARCH_INDEX": "[parameters('azureSearchIndex')]",
- "AZURE_SEARCH_SERVICE": "[format('https://{0}.search.windows.net', parameters('azureAISearchName'))]",
- "AZURE_SEARCH_DATASOURCE_NAME": "[parameters('azureSearchDatasource')]",
- "AZURE_SEARCH_INDEXER_NAME": "[parameters('azureSearchIndexer')]",
- "AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION": "[parameters('azureSearchUseIntegratedVectorization')]",
- "AZURE_SEARCH_FIELDS_ID": "[parameters('azureSearchFieldId')]",
- "AZURE_SEARCH_CONTENT_COLUMN": "[parameters('azureSearchContentColumn')]",
- "AZURE_SEARCH_CONTENT_VECTOR_COLUMN": "[parameters('azureSearchVectorColumn')]",
- "AZURE_SEARCH_TITLE_COLUMN": "[parameters('azureSearchTitleColumn')]",
- "AZURE_SEARCH_FIELDS_METADATA": "[parameters('azureSearchFieldsMetadata')]",
- "AZURE_SEARCH_SOURCE_COLUMN": "[parameters('azureSearchSourceColumn')]",
- "AZURE_SEARCH_CHUNK_COLUMN": "[parameters('azureSearchChunkColumn')]",
- "AZURE_SEARCH_OFFSET_COLUMN": "[parameters('azureSearchOffsetColumn')]",
- "USE_ADVANCED_IMAGE_PROCESSING": "[parameters('useAdvancedImageProcessing')]",
- "DOCUMENT_PROCESSING_QUEUE_NAME": "[variables('queueName')]",
- "ORCHESTRATION_STRATEGY": "[parameters('orchestrationStrategy')]",
- "LOGLEVEL": "[parameters('logLevel')]",
- "AZURE_OPENAI_SYSTEM_MESSAGE": "[parameters('azureOpenAISystemMessage')]",
- "AZURE_SEARCH_TOP_K": "[parameters('azureSearchTopK')]"
- }
+ "value": "[union(createObject('AZURE_COMPUTER_VISION_ENDPOINT', if(parameters('useAdvancedImageProcessing'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'computerVision'), '2022-09-01').outputs.endpoint.value, ''), 'AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_API_VERSION', parameters('computerVisionVectorizeImageApiVersion'), 'AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_MODEL_VERSION', parameters('computerVisionVectorizeImageModelVersion'), 'AZURE_CONTENT_SAFETY_ENDPOINT', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('contentSafetyName')), '2022-09-01').outputs.endpoint.value, 'AZURE_OPENAI_MODEL_INFO', variables('azureOpenAIModelInfo'), 'AZURE_OPENAI_EMBEDDING_MODEL_INFO', variables('azureOpenAIEmbeddingModelInfo'), 'AZURE_OPENAI_RESOURCE', parameters('azureOpenAIResourceName'), 'AZURE_OPENAI_API_VERSION', parameters('azureOpenAIApiVersion'), 'AZURE_SEARCH_INDEX', parameters('azureSearchIndex'), 'AZURE_SEARCH_SERVICE', format('https://{0}.search.windows.net', parameters('azureAISearchName')), 'AZURE_SEARCH_DATASOURCE_NAME', parameters('azureSearchDatasource'), 'AZURE_SEARCH_INDEXER_NAME', parameters('azureSearchIndexer'), 'AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION', parameters('azureSearchUseIntegratedVectorization'), 'AZURE_SEARCH_FIELDS_ID', parameters('azureSearchFieldId'), 'AZURE_SEARCH_CONTENT_COLUMN', parameters('azureSearchContentColumn'), 'AZURE_SEARCH_CONTENT_VECTOR_COLUMN', parameters('azureSearchVectorColumn'), 'AZURE_SEARCH_TITLE_COLUMN', parameters('azureSearchTitleColumn'), 'AZURE_SEARCH_FIELDS_METADATA', parameters('azureSearchFieldsMetadata'), 'AZURE_SEARCH_SOURCE_COLUMN', parameters('azureSearchSourceColumn'), 'AZURE_SEARCH_CHUNK_COLUMN', parameters('azureSearchChunkColumn'), 'AZURE_SEARCH_OFFSET_COLUMN', parameters('azureSearchOffsetColumn'), 'USE_ADVANCED_IMAGE_PROCESSING', parameters('useAdvancedImageProcessing'), 'DOCUMENT_PROCESSING_QUEUE_NAME', variables('queueName'), 'ORCHESTRATION_STRATEGY', parameters('orchestrationStrategy'), 'LOGLEVEL', parameters('logLevel'), 'AZURE_OPENAI_SYSTEM_MESSAGE', parameters('azureOpenAISystemMessage'), 'AZURE_SEARCH_TOP_K', parameters('azureSearchTopK'), 'DATABASE_TYPE', parameters('databaseType')), if(equals(parameters('databaseType'), 'PostgreSQL'), createObject('AZURE_POSTGRESQL_INFO', string(createObject('host', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql'), '2022-09-01').outputs.postgresDbOutput.value.postgreSQLServerName, 'dbname', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql'), '2022-09-01').outputs.postgresDbOutput.value.postgreSQLDatabaseName, 'user', parameters('functionName')))), createObject()))]"
}
},
"template": {
@@ -8014,8 +8244,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "9410273585702095132"
+ "version": "0.32.4.45862",
+ "templateHash": "3062598733364993743"
}
},
"parameters": {
@@ -8091,11 +8321,11 @@
"type": "string",
"defaultValue": ""
},
- "storageAccountKeyName": {
+ "azureBlobStorageInfo": {
"type": "string",
"defaultValue": ""
},
- "formRecognizerKeyName": {
+ "azureFormRecognizerInfo": {
"type": "string",
"defaultValue": ""
},
@@ -8122,9 +8352,8 @@
"type": "string",
"defaultValue": ""
},
- "cosmosDBKeyName": {
- "type": "string",
- "defaultValue": ""
+ "databaseType": {
+ "type": "string"
}
},
"resources": [
@@ -8200,8 +8429,11 @@
"useKeyVault": {
"value": "[parameters('useKeyVault')]"
},
+ "managedIdentity": {
+ "value": "[or(equals(parameters('databaseType'), 'PostgreSQL'), not(empty(parameters('keyVaultName'))))]"
+ },
"appSettings": {
- "value": "[union(parameters('appSettings'), createObject('WEBSITES_ENABLE_APP_SERVICE_STORAGE', 'false', 'AZURE_AUTH_TYPE', parameters('authType'), 'USE_KEY_VAULT', if(parameters('useKeyVault'), parameters('useKeyVault'), ''), 'AZURE_OPENAI_API_KEY', if(parameters('useKeyVault'), parameters('openAIKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('azureOpenAIName')), '2023-05-01').key1), 'AZURE_SEARCH_KEY', if(parameters('useKeyVault'), parameters('searchKeyName'), listAdminKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.Search/searchServices', parameters('azureAISearchName')), '2021-04-01-preview').primaryKey), 'AZURE_BLOB_ACCOUNT_KEY', if(parameters('useKeyVault'), parameters('storageAccountKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.Storage/storageAccounts', parameters('storageAccountName')), '2021-09-01').keys[0].value), 'AZURE_FORM_RECOGNIZER_KEY', if(parameters('useKeyVault'), parameters('formRecognizerKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('formRecognizerName')), '2023-05-01').key1), 'AZURE_CONTENT_SAFETY_KEY', if(parameters('useKeyVault'), parameters('contentSafetyKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('contentSafetyName')), '2023-05-01').key1), 'AZURE_SPEECH_SERVICE_KEY', if(parameters('useKeyVault'), parameters('speechKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('speechServiceName')), '2023-05-01').key1), 'AZURE_COMPUTER_VISION_KEY', if(or(parameters('useKeyVault'), equals(parameters('computerVisionName'), '')), parameters('computerVisionKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('computerVisionName')), '2023-05-01').key1)))]"
+ "value": "[union(parameters('appSettings'), createObject('WEBSITES_ENABLE_APP_SERVICE_STORAGE', 'false', 'AZURE_AUTH_TYPE', parameters('authType'), 'USE_KEY_VAULT', if(parameters('useKeyVault'), parameters('useKeyVault'), ''), 'AZURE_OPENAI_API_KEY', if(parameters('useKeyVault'), parameters('openAIKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('azureOpenAIName')), '2023-05-01').key1), 'AZURE_SEARCH_KEY', if(parameters('useKeyVault'), parameters('searchKeyName'), listAdminKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.Search/searchServices', parameters('azureAISearchName')), '2021-04-01-preview').primaryKey), 'AZURE_BLOB_STORAGE_INFO', if(parameters('useKeyVault'), parameters('azureBlobStorageInfo'), replace(parameters('azureBlobStorageInfo'), '$STORAGE_ACCOUNT_KEY', listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.Storage/storageAccounts', parameters('storageAccountName')), '2021-09-01').keys[0].value)), 'AZURE_FORM_RECOGNIZER_INFO', if(parameters('useKeyVault'), parameters('azureFormRecognizerInfo'), replace(parameters('azureFormRecognizerInfo'), '$FORM_RECOGNIZER_KEY', listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('formRecognizerName')), '2023-05-01').key1)), 'AZURE_CONTENT_SAFETY_KEY', if(parameters('useKeyVault'), parameters('contentSafetyKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('contentSafetyName')), '2023-05-01').key1), 'AZURE_SPEECH_SERVICE_KEY', if(parameters('useKeyVault'), parameters('speechKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('speechServiceName')), '2023-05-01').key1), 'AZURE_COMPUTER_VISION_KEY', if(or(parameters('useKeyVault'), equals(parameters('computerVisionName'), '')), parameters('computerVisionKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('computerVisionName')), '2023-05-01').key1)))]"
}
},
"template": {
@@ -8210,8 +8442,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "7133078529690530611"
+ "version": "0.32.4.45862",
+ "templateHash": "5188081085127808194"
},
"description": "Creates an Azure Function in an existing Azure App Service plan."
},
@@ -8421,8 +8653,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "14818871229133632920"
+ "version": "0.32.4.45862",
+ "templateHash": "1710823743041736936"
},
"description": "Creates an Azure App Service in an existing Azure App Service plan."
},
@@ -8648,8 +8880,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "3955925289075906039"
+ "version": "0.32.4.45862",
+ "templateHash": "3479291286349558867"
},
"description": "Updates app settings for an Azure App Service."
},
@@ -8725,8 +8957,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "5620801774479515492"
+ "version": "0.32.4.45862",
+ "templateHash": "2541084448726511572"
},
"description": "Creates a role assignment for a service principal."
},
@@ -8812,8 +9044,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "5620801774479515492"
+ "version": "0.32.4.45862",
+ "templateHash": "2541084448726511572"
},
"description": "Creates a role assignment for a service principal."
},
@@ -8881,8 +9113,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "5620801774479515492"
+ "version": "0.32.4.45862",
+ "templateHash": "2541084448726511572"
},
"description": "Creates a role assignment for a service principal."
},
@@ -8950,8 +9182,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "5620801774479515492"
+ "version": "0.32.4.45862",
+ "templateHash": "2541084448726511572"
},
"description": "Creates a role assignment for a service principal."
},
@@ -9019,8 +9251,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "5620801774479515492"
+ "version": "0.32.4.45862",
+ "templateHash": "2541084448726511572"
},
"description": "Creates a role assignment for a service principal."
},
@@ -9088,8 +9320,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "5620801774479515492"
+ "version": "0.32.4.45862",
+ "templateHash": "2541084448726511572"
},
"description": "Creates a role assignment for a service principal."
},
@@ -9154,8 +9386,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "17352167468248267479"
+ "version": "0.32.4.45862",
+ "templateHash": "17848638157182929130"
},
"description": "Assigns an Azure Key Vault access policy."
},
@@ -9223,6 +9455,7 @@
"[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'keyvault')]",
"[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'monitoring')]",
"[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('azureOpenAIResourceName'))]",
+ "[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql')]",
"[subscriptionResourceId('Microsoft.Resources/resourceGroups', variables('rgName'))]",
"[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('azureAISearchName'))]",
"[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('speechServiceName'))]",
@@ -9283,8 +9516,12 @@
"value": "[variables('clientKey')]"
},
"openAIKeyName": "[if(parameters('useKeyVault'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.OPENAI_KEY_NAME.value), createObject('value', ''))]",
- "storageAccountKeyName": "[if(parameters('useKeyVault'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.STORAGE_ACCOUNT_KEY_NAME.value), createObject('value', ''))]",
- "formRecognizerKeyName": "[if(parameters('useKeyVault'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.FORM_RECOGNIZER_KEY_NAME.value), createObject('value', ''))]",
+ "azureBlobStorageInfo": {
+ "value": "[string(createObject('containerName', variables('blobContainerName'), 'accountName', parameters('storageAccountName'), 'accountKey', if(parameters('useKeyVault'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.STORAGE_ACCOUNT_KEY_NAME.value, '$STORAGE_ACCOUNT_KEY')))]"
+ },
+ "azureFormRecognizerInfo": {
+ "value": "[string(createObject('endpoint', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('formRecognizerName')), '2022-09-01').outputs.endpoint.value, 'key', if(parameters('useKeyVault'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.FORM_RECOGNIZER_KEY_NAME.value, '$FORM_RECOGNIZER_KEY')))]"
+ },
"searchKeyName": "[if(parameters('useKeyVault'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.SEARCH_KEY_NAME.value), createObject('value', ''))]",
"contentSafetyKeyName": "[if(parameters('useKeyVault'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.CONTENT_SAFETY_KEY_NAME.value), createObject('value', ''))]",
"speechKeyName": "[if(parameters('useKeyVault'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.SPEECH_KEY_NAME.value), createObject('value', ''))]",
@@ -9296,39 +9533,11 @@
"authType": {
"value": "[parameters('authType')]"
},
+ "databaseType": {
+ "value": "[parameters('databaseType')]"
+ },
"appSettings": {
- "value": {
- "AZURE_BLOB_ACCOUNT_NAME": "[parameters('storageAccountName')]",
- "AZURE_BLOB_CONTAINER_NAME": "[variables('blobContainerName')]",
- "AZURE_COMPUTER_VISION_ENDPOINT": "[if(parameters('useAdvancedImageProcessing'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'computerVision'), '2022-09-01').outputs.endpoint.value, '')]",
- "AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_API_VERSION": "[parameters('computerVisionVectorizeImageApiVersion')]",
- "AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_MODEL_VERSION": "[parameters('computerVisionVectorizeImageModelVersion')]",
- "AZURE_CONTENT_SAFETY_ENDPOINT": "[reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('contentSafetyName')), '2022-09-01').outputs.endpoint.value]",
- "AZURE_FORM_RECOGNIZER_ENDPOINT": "[reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('formRecognizerName')), '2022-09-01').outputs.endpoint.value]",
- "AZURE_OPENAI_MODEL_INFO": "[variables('azureOpenAIModelInfo')]",
- "AZURE_OPENAI_EMBEDDING_MODEL_INFO": "[variables('azureOpenAIEmbeddingModelInfo')]",
- "AZURE_OPENAI_RESOURCE": "[parameters('azureOpenAIResourceName')]",
- "AZURE_OPENAI_API_VERSION": "[parameters('azureOpenAIApiVersion')]",
- "AZURE_SEARCH_INDEX": "[parameters('azureSearchIndex')]",
- "AZURE_SEARCH_SERVICE": "[format('https://{0}.search.windows.net', parameters('azureAISearchName'))]",
- "AZURE_SEARCH_DATASOURCE_NAME": "[parameters('azureSearchDatasource')]",
- "AZURE_SEARCH_INDEXER_NAME": "[parameters('azureSearchIndexer')]",
- "AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION": "[parameters('azureSearchUseIntegratedVectorization')]",
- "AZURE_SEARCH_FIELDS_ID": "[parameters('azureSearchFieldId')]",
- "AZURE_SEARCH_CONTENT_COLUMN": "[parameters('azureSearchContentColumn')]",
- "AZURE_SEARCH_CONTENT_VECTOR_COLUMN": "[parameters('azureSearchVectorColumn')]",
- "AZURE_SEARCH_TITLE_COLUMN": "[parameters('azureSearchTitleColumn')]",
- "AZURE_SEARCH_FIELDS_METADATA": "[parameters('azureSearchFieldsMetadata')]",
- "AZURE_SEARCH_SOURCE_COLUMN": "[parameters('azureSearchSourceColumn')]",
- "AZURE_SEARCH_CHUNK_COLUMN": "[parameters('azureSearchChunkColumn')]",
- "AZURE_SEARCH_OFFSET_COLUMN": "[parameters('azureSearchOffsetColumn')]",
- "USE_ADVANCED_IMAGE_PROCESSING": "[parameters('useAdvancedImageProcessing')]",
- "DOCUMENT_PROCESSING_QUEUE_NAME": "[variables('queueName')]",
- "ORCHESTRATION_STRATEGY": "[parameters('orchestrationStrategy')]",
- "LOGLEVEL": "[parameters('logLevel')]",
- "AZURE_OPENAI_SYSTEM_MESSAGE": "[parameters('azureOpenAISystemMessage')]",
- "AZURE_SEARCH_TOP_K": "[parameters('azureSearchTopK')]"
- }
+ "value": "[union(createObject('AZURE_COMPUTER_VISION_ENDPOINT', if(parameters('useAdvancedImageProcessing'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'computerVision'), '2022-09-01').outputs.endpoint.value, ''), 'AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_API_VERSION', parameters('computerVisionVectorizeImageApiVersion'), 'AZURE_COMPUTER_VISION_VECTORIZE_IMAGE_MODEL_VERSION', parameters('computerVisionVectorizeImageModelVersion'), 'AZURE_CONTENT_SAFETY_ENDPOINT', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('contentSafetyName')), '2022-09-01').outputs.endpoint.value, 'AZURE_OPENAI_MODEL_INFO', variables('azureOpenAIModelInfo'), 'AZURE_OPENAI_EMBEDDING_MODEL_INFO', variables('azureOpenAIEmbeddingModelInfo'), 'AZURE_OPENAI_RESOURCE', parameters('azureOpenAIResourceName'), 'AZURE_OPENAI_API_VERSION', parameters('azureOpenAIApiVersion'), 'AZURE_SEARCH_INDEX', parameters('azureSearchIndex'), 'AZURE_SEARCH_SERVICE', format('https://{0}.search.windows.net', parameters('azureAISearchName')), 'AZURE_SEARCH_DATASOURCE_NAME', parameters('azureSearchDatasource'), 'AZURE_SEARCH_INDEXER_NAME', parameters('azureSearchIndexer'), 'AZURE_SEARCH_USE_INTEGRATED_VECTORIZATION', parameters('azureSearchUseIntegratedVectorization'), 'AZURE_SEARCH_FIELDS_ID', parameters('azureSearchFieldId'), 'AZURE_SEARCH_CONTENT_COLUMN', parameters('azureSearchContentColumn'), 'AZURE_SEARCH_CONTENT_VECTOR_COLUMN', parameters('azureSearchVectorColumn'), 'AZURE_SEARCH_TITLE_COLUMN', parameters('azureSearchTitleColumn'), 'AZURE_SEARCH_FIELDS_METADATA', parameters('azureSearchFieldsMetadata'), 'AZURE_SEARCH_SOURCE_COLUMN', parameters('azureSearchSourceColumn'), 'AZURE_SEARCH_CHUNK_COLUMN', parameters('azureSearchChunkColumn'), 'AZURE_SEARCH_OFFSET_COLUMN', parameters('azureSearchOffsetColumn'), 'USE_ADVANCED_IMAGE_PROCESSING', parameters('useAdvancedImageProcessing'), 'DOCUMENT_PROCESSING_QUEUE_NAME', variables('queueName'), 'ORCHESTRATION_STRATEGY', parameters('orchestrationStrategy'), 'LOGLEVEL', parameters('logLevel'), 'AZURE_OPENAI_SYSTEM_MESSAGE', parameters('azureOpenAISystemMessage'), 'AZURE_SEARCH_TOP_K', parameters('azureSearchTopK'), 'DATABASE_TYPE', parameters('databaseType')), if(equals(parameters('databaseType'), 'PostgreSQL'), createObject('AZURE_POSTGRESQL_INFO', string(createObject('host', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql'), '2022-09-01').outputs.postgresDbOutput.value.postgreSQLServerName, 'dbname', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql'), '2022-09-01').outputs.postgresDbOutput.value.postgreSQLDatabaseName, 'user', format('{0}-docker', parameters('functionName'))))), createObject()))]"
}
},
"template": {
@@ -9337,8 +9546,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "9410273585702095132"
+ "version": "0.32.4.45862",
+ "templateHash": "3062598733364993743"
}
},
"parameters": {
@@ -9414,11 +9623,11 @@
"type": "string",
"defaultValue": ""
},
- "storageAccountKeyName": {
+ "azureBlobStorageInfo": {
"type": "string",
"defaultValue": ""
},
- "formRecognizerKeyName": {
+ "azureFormRecognizerInfo": {
"type": "string",
"defaultValue": ""
},
@@ -9445,9 +9654,8 @@
"type": "string",
"defaultValue": ""
},
- "cosmosDBKeyName": {
- "type": "string",
- "defaultValue": ""
+ "databaseType": {
+ "type": "string"
}
},
"resources": [
@@ -9523,8 +9731,11 @@
"useKeyVault": {
"value": "[parameters('useKeyVault')]"
},
+ "managedIdentity": {
+ "value": "[or(equals(parameters('databaseType'), 'PostgreSQL'), not(empty(parameters('keyVaultName'))))]"
+ },
"appSettings": {
- "value": "[union(parameters('appSettings'), createObject('WEBSITES_ENABLE_APP_SERVICE_STORAGE', 'false', 'AZURE_AUTH_TYPE', parameters('authType'), 'USE_KEY_VAULT', if(parameters('useKeyVault'), parameters('useKeyVault'), ''), 'AZURE_OPENAI_API_KEY', if(parameters('useKeyVault'), parameters('openAIKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('azureOpenAIName')), '2023-05-01').key1), 'AZURE_SEARCH_KEY', if(parameters('useKeyVault'), parameters('searchKeyName'), listAdminKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.Search/searchServices', parameters('azureAISearchName')), '2021-04-01-preview').primaryKey), 'AZURE_BLOB_ACCOUNT_KEY', if(parameters('useKeyVault'), parameters('storageAccountKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.Storage/storageAccounts', parameters('storageAccountName')), '2021-09-01').keys[0].value), 'AZURE_FORM_RECOGNIZER_KEY', if(parameters('useKeyVault'), parameters('formRecognizerKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('formRecognizerName')), '2023-05-01').key1), 'AZURE_CONTENT_SAFETY_KEY', if(parameters('useKeyVault'), parameters('contentSafetyKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('contentSafetyName')), '2023-05-01').key1), 'AZURE_SPEECH_SERVICE_KEY', if(parameters('useKeyVault'), parameters('speechKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('speechServiceName')), '2023-05-01').key1), 'AZURE_COMPUTER_VISION_KEY', if(or(parameters('useKeyVault'), equals(parameters('computerVisionName'), '')), parameters('computerVisionKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('computerVisionName')), '2023-05-01').key1)))]"
+ "value": "[union(parameters('appSettings'), createObject('WEBSITES_ENABLE_APP_SERVICE_STORAGE', 'false', 'AZURE_AUTH_TYPE', parameters('authType'), 'USE_KEY_VAULT', if(parameters('useKeyVault'), parameters('useKeyVault'), ''), 'AZURE_OPENAI_API_KEY', if(parameters('useKeyVault'), parameters('openAIKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('azureOpenAIName')), '2023-05-01').key1), 'AZURE_SEARCH_KEY', if(parameters('useKeyVault'), parameters('searchKeyName'), listAdminKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.Search/searchServices', parameters('azureAISearchName')), '2021-04-01-preview').primaryKey), 'AZURE_BLOB_STORAGE_INFO', if(parameters('useKeyVault'), parameters('azureBlobStorageInfo'), replace(parameters('azureBlobStorageInfo'), '$STORAGE_ACCOUNT_KEY', listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.Storage/storageAccounts', parameters('storageAccountName')), '2021-09-01').keys[0].value)), 'AZURE_FORM_RECOGNIZER_INFO', if(parameters('useKeyVault'), parameters('azureFormRecognizerInfo'), replace(parameters('azureFormRecognizerInfo'), '$FORM_RECOGNIZER_KEY', listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('formRecognizerName')), '2023-05-01').key1)), 'AZURE_CONTENT_SAFETY_KEY', if(parameters('useKeyVault'), parameters('contentSafetyKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('contentSafetyName')), '2023-05-01').key1), 'AZURE_SPEECH_SERVICE_KEY', if(parameters('useKeyVault'), parameters('speechKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('speechServiceName')), '2023-05-01').key1), 'AZURE_COMPUTER_VISION_KEY', if(or(parameters('useKeyVault'), equals(parameters('computerVisionName'), '')), parameters('computerVisionKeyName'), listKeys(resourceId(subscription().subscriptionId, resourceGroup().name, 'Microsoft.CognitiveServices/accounts', parameters('computerVisionName')), '2023-05-01').key1)))]"
}
},
"template": {
@@ -9533,8 +9744,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "7133078529690530611"
+ "version": "0.32.4.45862",
+ "templateHash": "5188081085127808194"
},
"description": "Creates an Azure Function in an existing Azure App Service plan."
},
@@ -9744,8 +9955,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "14818871229133632920"
+ "version": "0.32.4.45862",
+ "templateHash": "1710823743041736936"
},
"description": "Creates an Azure App Service in an existing Azure App Service plan."
},
@@ -9971,8 +10182,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "3955925289075906039"
+ "version": "0.32.4.45862",
+ "templateHash": "3479291286349558867"
},
"description": "Updates app settings for an Azure App Service."
},
@@ -10048,8 +10259,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "5620801774479515492"
+ "version": "0.32.4.45862",
+ "templateHash": "2541084448726511572"
},
"description": "Creates a role assignment for a service principal."
},
@@ -10135,8 +10346,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "5620801774479515492"
+ "version": "0.32.4.45862",
+ "templateHash": "2541084448726511572"
},
"description": "Creates a role assignment for a service principal."
},
@@ -10204,8 +10415,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "5620801774479515492"
+ "version": "0.32.4.45862",
+ "templateHash": "2541084448726511572"
},
"description": "Creates a role assignment for a service principal."
},
@@ -10273,8 +10484,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "5620801774479515492"
+ "version": "0.32.4.45862",
+ "templateHash": "2541084448726511572"
},
"description": "Creates a role assignment for a service principal."
},
@@ -10342,8 +10553,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "5620801774479515492"
+ "version": "0.32.4.45862",
+ "templateHash": "2541084448726511572"
},
"description": "Creates a role assignment for a service principal."
},
@@ -10411,8 +10622,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "5620801774479515492"
+ "version": "0.32.4.45862",
+ "templateHash": "2541084448726511572"
},
"description": "Creates a role assignment for a service principal."
},
@@ -10477,8 +10688,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "17352167468248267479"
+ "version": "0.32.4.45862",
+ "templateHash": "17848638157182929130"
},
"description": "Assigns an Azure Key Vault access policy."
},
@@ -10546,6 +10757,7 @@
"[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'keyvault')]",
"[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'monitoring')]",
"[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('azureOpenAIResourceName'))]",
+ "[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql')]",
"[subscriptionResourceId('Microsoft.Resources/resourceGroups', variables('rgName'))]",
"[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('azureAISearchName'))]",
"[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('speechServiceName'))]",
@@ -10583,8 +10795,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "5846053745240336221"
+ "version": "0.32.4.45862",
+ "templateHash": "5038087255133909729"
},
"description": "Creates an Azure Cognitive Services instance."
},
@@ -10738,8 +10950,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "5846053745240336221"
+ "version": "0.32.4.45862",
+ "templateHash": "5038087255133909729"
},
"description": "Creates an Azure Cognitive Services instance."
},
@@ -10896,8 +11108,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "14787323190374281342"
+ "version": "0.32.4.45862",
+ "templateHash": "12571494031452225082"
}
},
"parameters": {
@@ -11029,8 +11241,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "17192989974061212120"
+ "version": "0.32.4.45862",
+ "templateHash": "16347867757057954703"
},
"description": "Creates an Azure storage account."
},
@@ -11257,8 +11469,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "5620801774479515492"
+ "version": "0.32.4.45862",
+ "templateHash": "2541084448726511572"
},
"description": "Creates a role assignment for a service principal."
},
@@ -11327,8 +11539,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "5620801774479515492"
+ "version": "0.32.4.45862",
+ "templateHash": "2541084448726511572"
},
"description": "Creates a role assignment for a service principal."
},
@@ -11397,8 +11609,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "5620801774479515492"
+ "version": "0.32.4.45862",
+ "templateHash": "2541084448726511572"
},
"description": "Creates a role assignment for a service principal."
},
@@ -11467,8 +11679,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "5620801774479515492"
+ "version": "0.32.4.45862",
+ "templateHash": "2541084448726511572"
},
"description": "Creates a role assignment for a service principal."
},
@@ -11553,8 +11765,8 @@
"metadata": {
"_generator": {
"name": "bicep",
- "version": "0.31.34.60546",
- "templateHash": "14309427698097244890"
+ "version": "0.32.4.45862",
+ "templateHash": "2285879213840317610"
}
},
"parameters": {
@@ -11657,6 +11869,121 @@
"[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('azureAISearchName'))]",
"[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('storageAccountName'))]"
]
+ },
+ {
+ "condition": "[equals(parameters('databaseType'), 'PostgreSQL')]",
+ "type": "Microsoft.Resources/deployments",
+ "apiVersion": "2022-09-01",
+ "name": "deploy_create_table_script",
+ "resourceGroup": "[variables('rgName')]",
+ "properties": {
+ "expressionEvaluationOptions": {
+ "scope": "inner"
+ },
+ "mode": "Incremental",
+ "parameters": {
+ "solutionLocation": {
+ "value": "[parameters('location')]"
+ },
+ "identity": {
+ "value": "[reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_managed_identity'), '2022-09-01').outputs.managedIdentityOutput.value.id]"
+ },
+ "baseUrl": {
+ "value": "[variables('baseUrl')]"
+ },
+ "keyVaultName": {
+ "value": "[reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'keyvault'), '2022-09-01').outputs.name.value]"
+ },
+ "postgresSqlServerName": {
+ "value": "[reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql'), '2022-09-01').outputs.postgresDbOutput.value.postgreSQLServerName]"
+ },
+ "webAppPrincipalName": "[if(equals(parameters('hostingModel'), 'code'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('websiteName')), '2022-09-01').outputs.FRONTEND_API_NAME.value), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', format('{0}-docker', parameters('websiteName'))), '2022-09-01').outputs.FRONTEND_API_NAME.value))]",
+ "adminAppPrincipalName": "[if(equals(parameters('hostingModel'), 'code'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('adminWebsiteName')), '2022-09-01').outputs.WEBSITE_ADMIN_NAME.value), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', format('{0}-docker', parameters('adminWebsiteName'))), '2022-09-01').outputs.WEBSITE_ADMIN_NAME.value))]",
+ "functionAppPrincipalName": "[if(equals(parameters('hostingModel'), 'code'), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('functionName')), '2022-09-01').outputs.functionName.value), createObject('value', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', format('{0}-docker', parameters('functionName'))), '2022-09-01').outputs.functionName.value))]",
+ "managedIdentityName": {
+ "value": "[reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_managed_identity'), '2022-09-01').outputs.managedIdentityOutput.value.name]"
+ }
+ },
+ "template": {
+ "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#",
+ "contentVersion": "1.0.0.0",
+ "metadata": {
+ "_generator": {
+ "name": "bicep",
+ "version": "0.32.4.45862",
+ "templateHash": "6726225974980028819"
+ }
+ },
+ "parameters": {
+ "solutionLocation": {
+ "type": "string",
+ "metadata": {
+ "description": "Specifies the location for resources."
+ }
+ },
+ "baseUrl": {
+ "type": "string"
+ },
+ "keyVaultName": {
+ "type": "string"
+ },
+ "identity": {
+ "type": "string"
+ },
+ "postgresSqlServerName": {
+ "type": "string"
+ },
+ "webAppPrincipalName": {
+ "type": "string"
+ },
+ "adminAppPrincipalName": {
+ "type": "string"
+ },
+ "managedIdentityName": {
+ "type": "string"
+ },
+ "functionAppPrincipalName": {
+ "type": "string"
+ }
+ },
+ "resources": [
+ {
+ "type": "Microsoft.Resources/deploymentScripts",
+ "apiVersion": "2020-10-01",
+ "name": "create_postgres_table",
+ "kind": "AzureCLI",
+ "location": "[parameters('solutionLocation')]",
+ "identity": {
+ "type": "UserAssigned",
+ "userAssignedIdentities": {
+ "[format('{0}', parameters('identity'))]": {}
+ }
+ },
+ "properties": {
+ "azCliVersion": "2.52.0",
+ "primaryScriptUri": "[format('{0}scripts/run_create_table_script.sh', parameters('baseUrl'))]",
+ "arguments": "[format('{0} {1} {2} {3} {4} {5} {6} {7}', parameters('baseUrl'), parameters('keyVaultName'), resourceGroup().name, parameters('postgresSqlServerName'), parameters('webAppPrincipalName'), parameters('adminAppPrincipalName'), parameters('functionAppPrincipalName'), parameters('managedIdentityName'))]",
+ "timeout": "PT1H",
+ "retentionInterval": "PT1H",
+ "cleanupPreference": "OnSuccess"
+ }
+ }
+ ]
+ }
+ },
+ "dependsOn": [
+ "[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('adminWebsiteName'))]",
+ "[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', format('{0}-docker', parameters('adminWebsiteName')))]",
+ "[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('functionName'))]",
+ "[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', format('{0}-docker', parameters('functionName')))]",
+ "[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'keyvault')]",
+ "[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_managed_identity')]",
+ "[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql')]",
+ "[subscriptionResourceId('Microsoft.Resources/resourceGroups', variables('rgName'))]",
+ "[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys')]",
+ "[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('websiteName'))]",
+ "[extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', format('{0}-docker', parameters('websiteName')))]"
+ ]
}
],
"outputs": {
@@ -11668,17 +11995,9 @@
"type": "string",
"value": "[parameters('hostingModel')]"
},
- "AZURE_BLOB_CONTAINER_NAME": {
- "type": "string",
- "value": "[variables('blobContainerName')]"
- },
- "AZURE_BLOB_ACCOUNT_NAME": {
+ "AZURE_BLOB_STORAGE_INFO": {
"type": "string",
- "value": "[parameters('storageAccountName')]"
- },
- "AZURE_BLOB_ACCOUNT_KEY": {
- "type": "string",
- "value": "[if(parameters('useKeyVault'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.STORAGE_ACCOUNT_KEY_NAME.value, '')]"
+ "value": "[replace(string(createObject('containerName', variables('blobContainerName'), 'accountName', parameters('storageAccountName'), 'accountKey', if(parameters('useKeyVault'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.STORAGE_ACCOUNT_KEY_NAME.value, '$STORAGE_ACCOUNT_KEY'))), '$STORAGE_ACCOUNT_KEY', '')]"
},
"AZURE_COMPUTER_VISION_ENDPOINT": {
"type": "string",
@@ -11708,13 +12027,9 @@
"type": "string",
"value": "[if(parameters('useKeyVault'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.CONTENT_SAFETY_KEY_NAME.value, '')]"
},
- "AZURE_FORM_RECOGNIZER_ENDPOINT": {
- "type": "string",
- "value": "[reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('formRecognizerName')), '2022-09-01').outputs.endpoint.value]"
- },
- "AZURE_FORM_RECOGNIZER_KEY": {
+ "AZURE_FORM_RECOGNIZER_INFO": {
"type": "string",
- "value": "[if(parameters('useKeyVault'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.FORM_RECOGNIZER_KEY_NAME.value, '')]"
+ "value": "[replace(string(createObject('endpoint', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', parameters('formRecognizerName')), '2022-09-01').outputs.endpoint.value, 'key', if(parameters('useKeyVault'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'storekeys'), '2022-09-01').outputs.FORM_RECOGNIZER_KEY_NAME.value, '$FORM_RECOGNIZER_KEY'))), '$FORM_RECOGNIZER_KEY', '')]"
},
"AZURE_KEY_VAULT_ENDPOINT": {
"type": "string",
@@ -11910,7 +12225,11 @@
},
"AZURE_COSMOSDB_INFO": {
"type": "string",
- "value": "[string(createObject('accountName', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_cosmos_db'), '2022-09-01').outputs.cosmosOutput.value.cosmosAccountName, 'databaseName', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_cosmos_db'), '2022-09-01').outputs.cosmosOutput.value.cosmosDatabaseName, 'containerName', reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_cosmos_db'), '2022-09-01').outputs.cosmosOutput.value.cosmosContainerName))]"
+ "value": "[string(createObject('accountName', if(equals(parameters('databaseType'), 'CosmosDB'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_cosmos_db'), '2022-09-01').outputs.cosmosOutput.value.cosmosAccountName, ''), 'databaseName', if(equals(parameters('databaseType'), 'CosmosDB'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_cosmos_db'), '2022-09-01').outputs.cosmosOutput.value.cosmosDatabaseName, ''), 'containerName', if(equals(parameters('databaseType'), 'CosmosDB'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_cosmos_db'), '2022-09-01').outputs.cosmosOutput.value.cosmosContainerName, '')))]"
+ },
+ "AZURE_POSTGRESQL_INFO": {
+ "type": "string",
+ "value": "[string(createObject('serverName', if(equals(parameters('databaseType'), 'PostgreSQL'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql'), '2022-09-01').outputs.postgresDbOutput.value.postgreSQLServerName, ''), 'databaseName', if(equals(parameters('databaseType'), 'PostgreSQL'), reference(extensionResourceId(format('/subscriptions/{0}/resourceGroups/{1}', subscription().subscriptionId, variables('rgName')), 'Microsoft.Resources/deployments', 'deploy_postgres_sql'), '2022-09-01').outputs.postgresDbOutput.value.postgreSQLDatabaseName, ''), 'userName', ''))]"
}
}
}
\ No newline at end of file
diff --git a/poetry.lock b/poetry.lock
index c321375ed..c3042f502 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -355,6 +355,72 @@ files = [
{file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"},
]
+[[package]]
+name = "asyncpg"
+version = "0.30.0"
+description = "An asyncio PostgreSQL driver"
+optional = false
+python-versions = ">=3.8.0"
+files = [
+ {file = "asyncpg-0.30.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bfb4dd5ae0699bad2b233672c8fc5ccbd9ad24b89afded02341786887e37927e"},
+ {file = "asyncpg-0.30.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dc1f62c792752a49f88b7e6f774c26077091b44caceb1983509edc18a2222ec0"},
+ {file = "asyncpg-0.30.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3152fef2e265c9c24eec4ee3d22b4f4d2703d30614b0b6753e9ed4115c8a146f"},
+ {file = "asyncpg-0.30.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7255812ac85099a0e1ffb81b10dc477b9973345793776b128a23e60148dd1af"},
+ {file = "asyncpg-0.30.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:578445f09f45d1ad7abddbff2a3c7f7c291738fdae0abffbeb737d3fc3ab8b75"},
+ {file = "asyncpg-0.30.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c42f6bb65a277ce4d93f3fba46b91a265631c8df7250592dd4f11f8b0152150f"},
+ {file = "asyncpg-0.30.0-cp310-cp310-win32.whl", hash = "sha256:aa403147d3e07a267ada2ae34dfc9324e67ccc4cdca35261c8c22792ba2b10cf"},
+ {file = "asyncpg-0.30.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb622c94db4e13137c4c7f98834185049cc50ee01d8f657ef898b6407c7b9c50"},
+ {file = "asyncpg-0.30.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5e0511ad3dec5f6b4f7a9e063591d407eee66b88c14e2ea636f187da1dcfff6a"},
+ {file = "asyncpg-0.30.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:915aeb9f79316b43c3207363af12d0e6fd10776641a7de8a01212afd95bdf0ed"},
+ {file = "asyncpg-0.30.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c198a00cce9506fcd0bf219a799f38ac7a237745e1d27f0e1f66d3707c84a5a"},
+ {file = "asyncpg-0.30.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3326e6d7381799e9735ca2ec9fd7be4d5fef5dcbc3cb555d8a463d8460607956"},
+ {file = "asyncpg-0.30.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:51da377487e249e35bd0859661f6ee2b81db11ad1f4fc036194bc9cb2ead5056"},
+ {file = "asyncpg-0.30.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bc6d84136f9c4d24d358f3b02be4b6ba358abd09f80737d1ac7c444f36108454"},
+ {file = "asyncpg-0.30.0-cp311-cp311-win32.whl", hash = "sha256:574156480df14f64c2d76450a3f3aaaf26105869cad3865041156b38459e935d"},
+ {file = "asyncpg-0.30.0-cp311-cp311-win_amd64.whl", hash = "sha256:3356637f0bd830407b5597317b3cb3571387ae52ddc3bca6233682be88bbbc1f"},
+ {file = "asyncpg-0.30.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c902a60b52e506d38d7e80e0dd5399f657220f24635fee368117b8b5fce1142e"},
+ {file = "asyncpg-0.30.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aca1548e43bbb9f0f627a04666fedaca23db0a31a84136ad1f868cb15deb6e3a"},
+ {file = "asyncpg-0.30.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c2a2ef565400234a633da0eafdce27e843836256d40705d83ab7ec42074efb3"},
+ {file = "asyncpg-0.30.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1292b84ee06ac8a2ad8e51c7475aa309245874b61333d97411aab835c4a2f737"},
+ {file = "asyncpg-0.30.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0f5712350388d0cd0615caec629ad53c81e506b1abaaf8d14c93f54b35e3595a"},
+ {file = "asyncpg-0.30.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:db9891e2d76e6f425746c5d2da01921e9a16b5a71a1c905b13f30e12a257c4af"},
+ {file = "asyncpg-0.30.0-cp312-cp312-win32.whl", hash = "sha256:68d71a1be3d83d0570049cd1654a9bdfe506e794ecc98ad0873304a9f35e411e"},
+ {file = "asyncpg-0.30.0-cp312-cp312-win_amd64.whl", hash = "sha256:9a0292c6af5c500523949155ec17b7fe01a00ace33b68a476d6b5059f9630305"},
+ {file = "asyncpg-0.30.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05b185ebb8083c8568ea8a40e896d5f7af4b8554b64d7719c0eaa1eb5a5c3a70"},
+ {file = "asyncpg-0.30.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c47806b1a8cbb0a0db896f4cd34d89942effe353a5035c62734ab13b9f938da3"},
+ {file = "asyncpg-0.30.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b6fde867a74e8c76c71e2f64f80c64c0f3163e687f1763cfaf21633ec24ec33"},
+ {file = "asyncpg-0.30.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46973045b567972128a27d40001124fbc821c87a6cade040cfcd4fa8a30bcdc4"},
+ {file = "asyncpg-0.30.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9110df111cabc2ed81aad2f35394a00cadf4f2e0635603db6ebbd0fc896f46a4"},
+ {file = "asyncpg-0.30.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04ff0785ae7eed6cc138e73fc67b8e51d54ee7a3ce9b63666ce55a0bf095f7ba"},
+ {file = "asyncpg-0.30.0-cp313-cp313-win32.whl", hash = "sha256:ae374585f51c2b444510cdf3595b97ece4f233fde739aa14b50e0d64e8a7a590"},
+ {file = "asyncpg-0.30.0-cp313-cp313-win_amd64.whl", hash = "sha256:f59b430b8e27557c3fb9869222559f7417ced18688375825f8f12302c34e915e"},
+ {file = "asyncpg-0.30.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:29ff1fc8b5bf724273782ff8b4f57b0f8220a1b2324184846b39d1ab4122031d"},
+ {file = "asyncpg-0.30.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:64e899bce0600871b55368b8483e5e3e7f1860c9482e7f12e0a771e747988168"},
+ {file = "asyncpg-0.30.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b290f4726a887f75dcd1b3006f484252db37602313f806e9ffc4e5996cfe5cb"},
+ {file = "asyncpg-0.30.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f86b0e2cd3f1249d6fe6fd6cfe0cd4538ba994e2d8249c0491925629b9104d0f"},
+ {file = "asyncpg-0.30.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:393af4e3214c8fa4c7b86da6364384c0d1b3298d45803375572f415b6f673f38"},
+ {file = "asyncpg-0.30.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:fd4406d09208d5b4a14db9a9dbb311b6d7aeeab57bded7ed2f8ea41aeef39b34"},
+ {file = "asyncpg-0.30.0-cp38-cp38-win32.whl", hash = "sha256:0b448f0150e1c3b96cb0438a0d0aa4871f1472e58de14a3ec320dbb2798fb0d4"},
+ {file = "asyncpg-0.30.0-cp38-cp38-win_amd64.whl", hash = "sha256:f23b836dd90bea21104f69547923a02b167d999ce053f3d502081acea2fba15b"},
+ {file = "asyncpg-0.30.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6f4e83f067b35ab5e6371f8a4c93296e0439857b4569850b178a01385e82e9ad"},
+ {file = "asyncpg-0.30.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5df69d55add4efcd25ea2a3b02025b669a285b767bfbf06e356d68dbce4234ff"},
+ {file = "asyncpg-0.30.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3479a0d9a852c7c84e822c073622baca862d1217b10a02dd57ee4a7a081f708"},
+ {file = "asyncpg-0.30.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26683d3b9a62836fad771a18ecf4659a30f348a561279d6227dab96182f46144"},
+ {file = "asyncpg-0.30.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1b982daf2441a0ed314bd10817f1606f1c28b1136abd9e4f11335358c2c631cb"},
+ {file = "asyncpg-0.30.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1c06a3a50d014b303e5f6fc1e5f95eb28d2cee89cf58384b700da621e5d5e547"},
+ {file = "asyncpg-0.30.0-cp39-cp39-win32.whl", hash = "sha256:1b11a555a198b08f5c4baa8f8231c74a366d190755aa4f99aacec5970afe929a"},
+ {file = "asyncpg-0.30.0-cp39-cp39-win_amd64.whl", hash = "sha256:8b684a3c858a83cd876f05958823b68e8d14ec01bb0c0d14a6704c5bf9711773"},
+ {file = "asyncpg-0.30.0.tar.gz", hash = "sha256:c551e9928ab6707602f44811817f82ba3c446e018bfe1d3abecc8ba5f3eac851"},
+]
+
+[package.dependencies]
+async-timeout = {version = ">=4.0.3", markers = "python_version < \"3.11.0\""}
+
+[package.extras]
+docs = ["Sphinx (>=8.1.3,<8.2.0)", "sphinx-rtd-theme (>=1.2.2)"]
+gssauth = ["gssapi", "sspilib"]
+test = ["distro (>=1.9.0,<1.10.0)", "flake8 (>=6.1,<7.0)", "flake8-pyi (>=24.1.0,<24.2.0)", "gssapi", "k5test", "mypy (>=1.8.0,<1.9.0)", "sspilib", "uvloop (>=0.15.3)"]
+
[[package]]
name = "attrs"
version = "23.2.0"
@@ -4150,6 +4216,20 @@ files = [
[package.dependencies]
ptyprocess = ">=0.5"
+[[package]]
+name = "pgvector"
+version = "0.3.6"
+description = "pgvector support for Python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pgvector-0.3.6-py3-none-any.whl", hash = "sha256:f6c269b3c110ccb7496bac87202148ed18f34b390a0189c783e351062400a75a"},
+ {file = "pgvector-0.3.6.tar.gz", hash = "sha256:31d01690e6ea26cea8a633cde5f0f55f5b246d9c8292d68efdef8c22ec994ade"},
+]
+
+[package.dependencies]
+numpy = "*"
+
[[package]]
name = "pillow"
version = "10.4.0"
@@ -4553,6 +4633,82 @@ files = [
[package.extras]
test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"]
+[[package]]
+name = "psycopg2-binary"
+version = "2.9.10"
+description = "psycopg2 - Python-PostgreSQL Database Adapter"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:0ea8e3d0ae83564f2fc554955d327fa081d065c8ca5cc6d2abb643e2c9c1200f"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:3e9c76f0ac6f92ecfc79516a8034a544926430f7b080ec5a0537bca389ee0906"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ad26b467a405c798aaa1458ba09d7e2b6e5f96b1ce0ac15d82fd9f95dc38a92"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:270934a475a0e4b6925b5f804e3809dd5f90f8613621d062848dd82f9cd62007"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:48b338f08d93e7be4ab2b5f1dbe69dc5e9ef07170fe1f86514422076d9c010d0"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4152f8f76d2023aac16285576a9ecd2b11a9895373a1f10fd9db54b3ff06b4"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:32581b3020c72d7a421009ee1c6bf4a131ef5f0a968fab2e2de0c9d2bb4577f1"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:2ce3e21dc3437b1d960521eca599d57408a695a0d3c26797ea0f72e834c7ffe5"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e984839e75e0b60cfe75e351db53d6db750b00de45644c5d1f7ee5d1f34a1ce5"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c4745a90b78e51d9ba06e2088a2fe0c693ae19cc8cb051ccda44e8df8a6eb53"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-win32.whl", hash = "sha256:e5720a5d25e3b99cd0dc5c8a440570469ff82659bb09431c1439b92caf184d3b"},
+ {file = "psycopg2_binary-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:3c18f74eb4386bf35e92ab2354a12c17e5eb4d9798e4c0ad3a00783eae7cd9f1"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:04392983d0bb89a8717772a193cfaac58871321e3ec69514e1c4e0d4957b5aff"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:1a6784f0ce3fec4edc64e985865c17778514325074adf5ad8f80636cd029ef7c"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5f86c56eeb91dc3135b3fd8a95dc7ae14c538a2f3ad77a19645cf55bab1799c"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b3d2491d4d78b6b14f76881905c7a8a8abcf974aad4a8a0b065273a0ed7a2cb"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2286791ececda3a723d1910441c793be44625d86d1a4e79942751197f4d30341"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:512d29bb12608891e349af6a0cccedce51677725a921c07dba6342beaf576f9a"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5a507320c58903967ef7384355a4da7ff3f28132d679aeb23572753cbf2ec10b"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6d4fa1079cab9018f4d0bd2db307beaa612b0d13ba73b5c6304b9fe2fb441ff7"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:851485a42dbb0bdc1edcdabdb8557c09c9655dfa2ca0460ff210522e073e319e"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:35958ec9e46432d9076286dda67942ed6d968b9c3a6a2fd62b48939d1d78bf68"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-win32.whl", hash = "sha256:ecced182e935529727401b24d76634a357c71c9275b356efafd8a2a91ec07392"},
+ {file = "psycopg2_binary-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:ee0e8c683a7ff25d23b55b11161c2663d4b099770f6085ff0a20d4505778d6b4"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:880845dfe1f85d9d5f7c412efea7a08946a46894537e4e5d091732eb1d34d9a0"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9440fa522a79356aaa482aa4ba500b65f28e5d0e63b801abf6aa152a29bd842a"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3923c1d9870c49a2d44f795df0c889a22380d36ef92440ff618ec315757e539"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b2c956c028ea5de47ff3a8d6b3cc3330ab45cf0b7c3da35a2d6ff8420896526"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f758ed67cab30b9a8d2833609513ce4d3bd027641673d4ebc9c067e4d208eec1"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cd9b4f2cfab88ed4a9106192de509464b75a906462fb846b936eabe45c2063e"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dc08420625b5a20b53551c50deae6e231e6371194fa0651dbe0fb206452ae1f"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7cd730dfa7c36dbe8724426bf5612798734bff2d3c3857f36f2733f5bfc7c00"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:155e69561d54d02b3c3209545fb08938e27889ff5a10c19de8d23eb5a41be8a5"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3cc28a6fd5a4a26224007712e79b81dbaee2ffb90ff406256158ec4d7b52b47"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-win32.whl", hash = "sha256:ec8a77f521a17506a24a5f626cb2aee7850f9b69a0afe704586f63a464f3cd64"},
+ {file = "psycopg2_binary-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:18c5ee682b9c6dd3696dad6e54cc7ff3a1a9020df6a5c0f861ef8bfd338c3ca0"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:26540d4a9a4e2b096f1ff9cce51253d0504dca5a85872c7f7be23be5a53eb18d"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e217ce4d37667df0bc1c397fdcd8de5e81018ef305aed9415c3b093faaeb10fb"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:245159e7ab20a71d989da00f280ca57da7641fa2cdcf71749c193cea540a74f7"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c4ded1a24b20021ebe677b7b08ad10bf09aac197d6943bfe6fec70ac4e4690d"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3abb691ff9e57d4a93355f60d4f4c1dd2d68326c968e7db17ea96df3c023ef73"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8608c078134f0b3cbd9f89b34bd60a943b23fd33cc5f065e8d5f840061bd0673"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:230eeae2d71594103cd5b93fd29d1ace6420d0b86f4778739cb1a5a32f607d1f"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bb89f0a835bcfc1d42ccd5f41f04870c1b936d8507c6df12b7737febc40f0909"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0c2d907a1e102526dd2986df638343388b94c33860ff3bbe1384130828714b1"},
+ {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8157bed2f51db683f31306aa497311b560f2265998122abe1dce6428bd86567"},
+ {file = "psycopg2_binary-2.9.10-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:eb09aa7f9cecb45027683bb55aebaaf45a0df8bf6de68801a6afdc7947bb09d4"},
+ {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b73d6d7f0ccdad7bc43e6d34273f70d587ef62f824d7261c4ae9b8b1b6af90e8"},
+ {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce5ab4bf46a211a8e924d307c1b1fcda82368586a19d0a24f8ae166f5c784864"},
+ {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:056470c3dc57904bbf63d6f534988bafc4e970ffd50f6271fc4ee7daad9498a5"},
+ {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aa0e31fa4bb82578f3a6c74a73c273367727de397a7a0f07bd83cbea696baa"},
+ {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8de718c0e1c4b982a54b41779667242bc630b2197948405b7bd8ce16bcecac92"},
+ {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5c370b1e4975df846b0277b4deba86419ca77dbc25047f535b0bb03d1a544d44"},
+ {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:ffe8ed017e4ed70f68b7b371d84b7d4a790368db9203dfc2d222febd3a9c8863"},
+ {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:8aecc5e80c63f7459a1a2ab2c64df952051df196294d9f739933a9f6687e86b3"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:7a813c8bdbaaaab1f078014b9b0b13f5de757e2b5d9be6403639b298a04d218b"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d00924255d7fc916ef66e4bf22f354a940c67179ad3fd7067d7a0a9c84d2fbfc"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7559bce4b505762d737172556a4e6ea8a9998ecac1e39b5233465093e8cee697"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8b58f0a96e7a1e341fc894f62c1177a7c83febebb5ff9123b579418fdc8a481"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b269105e59ac96aba877c1707c600ae55711d9dcd3fc4b5012e4af68e30c648"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:79625966e176dc97ddabc142351e0409e28acf4660b88d1cf6adb876d20c490d"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:8aabf1c1a04584c168984ac678a668094d831f152859d06e055288fa515e4d30"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:19721ac03892001ee8fdd11507e6a2e01f4e37014def96379411ca99d78aeb2c"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7f5d859928e635fa3ce3477704acee0f667b3a3d3e4bb109f2b18d4005f38287"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-win32.whl", hash = "sha256:3216ccf953b3f267691c90c6fe742e45d890d8272326b4a8b20850a03d05b7b8"},
+ {file = "psycopg2_binary-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:30e34c4e97964805f715206c7b789d54a78b70f3ff19fbe590104b71c45600e5"},
+]
+
[[package]]
name = "ptyprocess"
version = "0.7.0"
@@ -6627,4 +6783,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools",
[metadata]
lock-version = "2.0"
python-versions = "^3.10"
-content-hash = "826226f49f211954e1a565360e48f0e655807b7e7f370780bd1fed30f2bccac4"
+content-hash = "2f68e50e5cc37578d95c47708f24e8b1ee8f3c2d20481d14514a8aead0eaf078"
diff --git a/pyproject.toml b/pyproject.toml
index c99e50d78..10a97162f 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -40,6 +40,9 @@ jsonschema = "^4.23.0"
semantic-kernel = {version = "1.3.0", python = "<3.13"}
azure-ai-ml = "^1.21.1"
azure-cosmos = "^4.7.0"
+asyncpg = "^0.30.0"
+psycopg2-binary = "^2.9.10"
+pgvector = "^0.3.6"
[tool.poetry.group.dev.dependencies]
pytest = "^8.3.3"
diff --git a/scripts/data_scripts/create_postgres_tables.py b/scripts/data_scripts/create_postgres_tables.py
new file mode 100644
index 000000000..605d7634c
--- /dev/null
+++ b/scripts/data_scripts/create_postgres_tables.py
@@ -0,0 +1,144 @@
+import json
+from azure.keyvault.secrets import SecretClient
+from azure.identity import DefaultAzureCredential
+import psycopg2
+from psycopg2 import sql
+
+key_vault_name = "kv_to-be-replaced"
+principal_name = "webAppPrincipalName"
+admin_principal_name = "adminAppPrincipalName"
+function_app_principal_name = "functionAppPrincipalName"
+user = "managedIdentityName"
+host = "serverName"
+dbname = "postgres"
+
+
+def grant_permissions(cursor, dbname, schema_name, principal_name):
+ """
+ Grants database and schema-level permissions to a specified principal.
+
+ Parameters:
+ - cursor: psycopg2 cursor object for database operations.
+ - dbname: Name of the database to grant CONNECT permission.
+ - schema_name: Name of the schema to grant table-level permissions.
+ - principal_name: Name of the principal (role or user) to grant permissions.
+ """
+
+ add_principal_user_query = sql.SQL("SELECT * FROM pgaadauth_create_principal({principal}, false, false)")
+ cursor.execute(
+ add_principal_user_query.format(
+ principal=sql.Literal(principal_name),
+ )
+ )
+
+ # Grant CONNECT on database
+ grant_connect_query = sql.SQL("GRANT CONNECT ON DATABASE {database} TO {principal}")
+ cursor.execute(
+ grant_connect_query.format(
+ database=sql.Identifier(dbname),
+ principal=sql.Identifier(principal_name),
+ )
+ )
+ print(f"Granted CONNECT on database '{dbname}' to '{principal_name}'")
+
+ # Grant SELECT, INSERT, UPDATE, DELETE on schema tables
+ grant_permissions_query = sql.SQL(
+ "GRANT SELECT, INSERT, UPDATE, DELETE ON ALL TABLES IN SCHEMA {schema} TO {principal}"
+ )
+ cursor.execute(
+ grant_permissions_query.format(
+ schema=sql.Identifier(schema_name),
+ principal=sql.Identifier(principal_name),
+ )
+ )
+
+
+# Acquire the access token
+cred = DefaultAzureCredential()
+access_token = cred.get_token("https://ossrdbms-aad.database.windows.net/.default")
+
+# Combine the token with the connection string to establish the connection.
+conn_string = "host={0} user={1} dbname={2} password={3} sslmode=require".format(
+ host, user, dbname, access_token.token
+)
+conn = psycopg2.connect(conn_string)
+cursor = conn.cursor()
+
+# Drop and recreate the conversations table
+cursor.execute("DROP TABLE IF EXISTS conversations")
+conn.commit()
+
+create_cs_sql = """CREATE TABLE conversations (
+ id TEXT PRIMARY KEY,
+ conversation_id TEXT NOT NULL,
+ type TEXT NOT NULL,
+ "createdAt" TEXT,
+ "updatedAt" TEXT,
+ user_id TEXT NOT NULL,
+ title TEXT
+ );"""
+cursor.execute(create_cs_sql)
+conn.commit()
+
+# Drop and recreate the messages table
+cursor.execute("DROP TABLE IF EXISTS messages")
+conn.commit()
+
+create_ms_sql = """CREATE TABLE messages (
+ id TEXT PRIMARY KEY,
+ type VARCHAR(50) NOT NULL,
+ "createdAt" TEXT,
+ "updatedAt" TEXT,
+ user_id TEXT NOT NULL,
+ conversation_id TEXT NOT NULL,
+ role VARCHAR(50),
+ content TEXT NOT NULL,
+ feedback TEXT
+ );"""
+cursor.execute(create_ms_sql)
+conn.commit()
+
+
+# Add Vector extension
+cursor.execute("CREATE EXTENSION IF NOT EXISTS vector CASCADE;")
+conn.commit()
+
+cursor.execute("DROP TABLE IF EXISTS vector_store;")
+conn.commit()
+
+table_create_command = """CREATE TABLE IF NOT EXISTS vector_store(
+ id text,
+ title text,
+ chunk integer,
+ chunk_id text,
+ "offset" integer,
+ page_number integer,
+ content text,
+ source text,
+ metadata text,
+ content_vector public.vector(1536)
+);"""
+
+cursor.execute(table_create_command)
+conn.commit()
+
+
+cursor.execute("CREATE INDEX vector_store_content_vector_idx ON vector_store USING hnsw (content_vector vector_cosine_ops);")
+conn.commit()
+
+grant_permissions(cursor, dbname, "public", principal_name)
+conn.commit()
+
+grant_permissions(cursor, dbname, "public", admin_principal_name)
+conn.commit()
+
+grant_permissions(cursor, dbname, "public", function_app_principal_name)
+conn.commit()
+
+cursor.execute("ALTER TABLE public.conversations OWNER TO azure_pg_admin;")
+cursor.execute("ALTER TABLE public.messages OWNER TO azure_pg_admin;")
+cursor.execute("ALTER TABLE public.vector_store OWNER TO azure_pg_admin;")
+conn.commit()
+
+cursor.close()
+conn.close()
diff --git a/scripts/data_scripts/requirements.txt b/scripts/data_scripts/requirements.txt
new file mode 100644
index 000000000..3cb4d1b3e
--- /dev/null
+++ b/scripts/data_scripts/requirements.txt
@@ -0,0 +1,3 @@
+psycopg2-binary==2.9.10
+azure-identity==1.19.0
+azure-keyvault-secrets==4.9.0
diff --git a/scripts/run_create_table_script.sh b/scripts/run_create_table_script.sh
new file mode 100644
index 000000000..8777ecbc5
--- /dev/null
+++ b/scripts/run_create_table_script.sh
@@ -0,0 +1,42 @@
+#!/bin/bash
+echo "started the script"
+
+# Variables
+baseUrl="$1"
+keyvaultName="$2"
+requirementFile="requirements.txt"
+requirementFileUrl=${baseUrl}"scripts/data_scripts/requirements.txt"
+resourceGroup="$3"
+serverName="$4"
+webAppPrincipalName="$5"
+adminAppPrincipalName="$6"
+functionAppPrincipalName="$7"
+managedIdentityName="$8"
+
+echo "Script Started"
+
+# Get the public IP address of the machine running the script
+publicIp=$(curl -s https://api.ipify.org)
+
+# Use Azure CLI to add the public IP to the PostgreSQL firewall rule
+az postgres flexible-server firewall-rule create --resource-group $resourceGroup --name $serverName --rule-name "AllowScriptIp" --start-ip-address "$publicIp" --end-ip-address "$publicIp"
+
+# Download the create table python file
+curl --output "create_postgres_tables.py" ${baseUrl}"scripts/data_scripts/create_postgres_tables.py"
+
+# Download the requirement file
+curl --output "$requirementFile" "$requirementFileUrl"
+
+echo "Download completed"
+
+#Replace key vault name
+sed -i "s/kv_to-be-replaced/${keyvaultName}/g" "create_postgres_tables.py"
+sed -i "s/webAppPrincipalName/${webAppPrincipalName}/g" "create_postgres_tables.py"
+sed -i "s/adminAppPrincipalName/${adminAppPrincipalName}/g" "create_postgres_tables.py"
+sed -i "s/managedIdentityName/${managedIdentityName}/g" "create_postgres_tables.py"
+sed -i "s/functionAppPrincipalName/${functionAppPrincipalName}/g" "create_postgres_tables.py"
+sed -i "s/serverName/${serverName}/g" "create_postgres_tables.py"
+
+pip install -r requirements.txt
+
+python create_postgres_tables.py