Skip to content

Commit

Permalink
style: Investigate pylint for PEP8
Browse files Browse the repository at this point in the history
  • Loading branch information
ross-p-smith committed May 15, 2024
1 parent 8c46627 commit 70fe71d
Show file tree
Hide file tree
Showing 5 changed files with 109 additions and 50 deletions.
1 change: 1 addition & 0 deletions .devcontainer/devcontainer.json
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
"ms-python.python",
"ms-python.black-formatter",
"ms-python.vscode-pylance",
"ms-python.pylint",
"ms-toolsai.jupyter",
"ms-vscode.vscode-node-azure-pack",
"TeamsDevApp.ms-teams-vscode-extension",
Expand Down
8 changes: 7 additions & 1 deletion code/app.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,24 @@
"""
This module contains the entry point for the application.
"""

import os
import logging
from azure.monitor.opentelemetry import configure_azure_monitor
from opentelemetry.instrumentation.httpx import HTTPXClientInstrumentor

logging.captureWarnings(True)
logging.basicConfig(level=os.environ.get("LOGLEVEL", "INFO").upper())
# Raising the azure log level to WARN as it is too verbose - https://github.com/Azure/azure-sdk-for-python/issues/9422
# Raising the azure log level to WARN as it is too verbose -
# https://github.com/Azure/azure-sdk-for-python/issues/9422
logging.getLogger("azure").setLevel(os.environ.get("LOGLEVEL_AZURE", "WARN").upper())
# We cannot use EnvHelper here as Application Insights should be configured first
# for instrumentation to work correctly
if os.getenv("APPLICATIONINSIGHTS_ENABLED", "false").lower() == "true":
configure_azure_monitor()
HTTPXClientInstrumentor().instrument() # httpx is used by openai

# pylint: disable=wrong-import-position
from create_app import create_app # noqa: E402

app = create_app()
Expand Down
13 changes: 9 additions & 4 deletions code/backend/Admin.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,19 @@
import streamlit as st
"""
This module contains the code for the Admin app of the Chat with your data Solution Accelerator.
"""

import os
import logging
import sys
import streamlit as st
from azure.monitor.opentelemetry import configure_azure_monitor

sys.path.append(os.path.join(os.path.dirname(__file__), ".."))

logging.captureWarnings(True)
logging.basicConfig(level=os.getenv("LOGLEVEL", "INFO").upper())
# Raising the azure log level to WARN as it is too verbose - https://github.com/Azure/azure-sdk-for-python/issues/9422
# Raising the azure log level to WARN as it is too verbose
# https://github.com/Azure/azure-sdk-for-python/issues/9422
logging.getLogger("azure").setLevel(os.environ.get("LOGLEVEL_AZURE", "WARN").upper())
# We cannot use EnvHelper here as Application Insights needs to be configured first
# for instrumentation to work correctly
Expand All @@ -26,14 +31,14 @@
menu_items=None,
)

mod_page_style = """
MOD_PAGE_STYLE = """
<style>
#MainMenu {visibility: hidden;}
footer {visibility: hidden;}
header {visibility: hidden;}
</style>
"""
st.markdown(mod_page_style, unsafe_allow_html=True)
st.markdown(MOD_PAGE_STYLE, unsafe_allow_html=True)


col1, col2, col3 = st.columns([1, 2, 1])
Expand Down
79 changes: 48 additions & 31 deletions code/create_app.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,18 @@
"""
This module creates a Flask app that serves the web interface for the chatbot.
"""

import functools
import json
import logging
import mimetypes
from os import path
import sys
import requests
from openai import AzureOpenAI, Stream
from openai.types.chat import ChatCompletionChunk
import mimetypes
from flask import Flask, Response, request, Request, jsonify
from dotenv import load_dotenv
import sys
import functools
from backend.batch.utilities.helpers.env_helper import EnvHelper
from backend.batch.utilities.helpers.orchestrator_helper import Orchestrator
from backend.batch.utilities.helpers.config.config_helper import ConfigHelper
Expand All @@ -19,6 +23,7 @@


def stream_with_data(response: Stream[ChatCompletionChunk]):
"""This function streams the response from Azure OpenAI with data."""
response_obj = {
"id": "",
"model": "",
Expand Down Expand Up @@ -69,7 +74,8 @@ def stream_with_data(response: Stream[ChatCompletionChunk]):
yield json.dumps(response_obj, ensure_ascii=False) + "\n"


def conversation_with_data(request: Request, env_helper: EnvHelper):
def conversation_with_data(conversation: Request, env_helper: EnvHelper):
"""This function streams the response from Azure OpenAI with data."""
if env_helper.is_auth_type_keys():
openai_client = AzureOpenAI(
azure_endpoint=env_helper.AZURE_OPENAI_ENDPOINT,
Expand All @@ -83,9 +89,10 @@ def conversation_with_data(request: Request, env_helper: EnvHelper):
azure_ad_token_provider=env_helper.AZURE_TOKEN_PROVIDER,
)

messages = request.json["messages"]
messages = conversation.json["messages"]

# Azure OpenAI takes the deployment name as the model name, "AZURE_OPENAI_MODEL" means deployment name.
# Azure OpenAI takes the deployment name as the model name, "AZURE_OPENAI_MODEL" means
# deployment name.
response = openai_client.chat.completions.create(
model=env_helper.AZURE_OPENAI_MODEL,
messages=messages,
Expand Down Expand Up @@ -180,45 +187,51 @@ def conversation_with_data(request: Request, env_helper: EnvHelper):
}

return response_obj
else:
return Response(
stream_with_data(response),
mimetype="application/json-lines",
)

return Response(
stream_with_data(response),
mimetype="application/json-lines",
)


def stream_without_data(response: Stream[ChatCompletionChunk]):
responseText = ""
"""This function streams the response from Azure OpenAI without data."""
response_text = ""
for line in response:
if not line.choices:
continue

deltaText = line.choices[0].delta.content
delta_text = line.choices[0].delta.content

if deltaText is None:
if delta_text is None:
return

responseText += deltaText
response_text += delta_text

response_obj = {
"id": line.id,
"model": line.model,
"created": line.created,
"object": line.object,
"choices": [{"messages": [{"role": "assistant", "content": responseText}]}],
"choices": [
{"messages": [{"role": "assistant", "content": response_text}]}
],
}
yield json.dumps(response_obj, ensure_ascii=False) + "\n"


def get_message_orchestrator():
"""This function gets the message orchestrator."""
return Orchestrator()


def get_orchestrator_config():
"""This function gets the orchestrator configuration."""
return ConfigHelper.get_active_config_or_default().orchestrator


def conversation_without_data(request: Request, env_helper: EnvHelper):
def conversation_without_data(conversation: Request, env_helper: EnvHelper):
"""This function streams the response from Azure OpenAI without data."""
if env_helper.AZURE_AUTH_TYPE == "rbac":
openai_client = AzureOpenAI(
azure_endpoint=env_helper.AZURE_OPENAI_ENDPOINT,
Expand All @@ -232,13 +245,14 @@ def conversation_without_data(request: Request, env_helper: EnvHelper):
api_key=env_helper.AZURE_OPENAI_API_KEY,
)

request_messages = request.json["messages"]
request_messages = conversation.json["messages"]
messages = [{"role": "system", "content": env_helper.AZURE_OPENAI_SYSTEM_MESSAGE}]

for message in request_messages:
messages.append({"role": message["role"], "content": message["content"]})

# Azure Open AI takes the deployment name as the model name, "AZURE_OPENAI_MODEL" means deployment name.
# Azure Open AI takes the deployment name as the model name, "AZURE_OPENAI_MODEL" means
# deployment name.
response = openai_client.chat.completions.create(
model=env_helper.AZURE_OPENAI_MODEL,
messages=messages,
Expand Down Expand Up @@ -271,10 +285,8 @@ def conversation_without_data(request: Request, env_helper: EnvHelper):
],
}
return jsonify(response_obj), 200
else:
return Response(
stream_without_data(response), mimetype="application/json-lines"
)

return Response(stream_without_data(response), mimetype="application/json-lines")


@functools.cache
Expand All @@ -296,6 +308,7 @@ def get_speech_key(env_helper: EnvHelper):


def create_app():
"""This function creates the Flask app."""
# Fixing MIME types for static files under Windows
mimetypes.add_type("application/javascript", ".js")
mimetypes.add_type("text/css", ".css")
Expand All @@ -313,8 +326,8 @@ def create_app():

@app.route("/", defaults={"path": "index.html"})
@app.route("/<path:path>")
def static_file(path):
return app.send_static_file(path)
def static_file(file_path):
return app.send_static_file(file_path)

@app.route("/api/health", methods=["GET"])
def health():
Expand All @@ -328,9 +341,9 @@ def conversation_azure_byod():
else:
return conversation_without_data(request, env_helper)
except Exception as e:
errorMessage = str(e)
error_message = str(e)
logger.exception(
f"Exception in /api/conversation/azure_byod | {errorMessage}"
"Exception in /api/conversation/azure_byod | %s", error_message
)
return (
jsonify(
Expand Down Expand Up @@ -373,8 +386,10 @@ async def conversation_custom():
return jsonify(response_obj), 200

except Exception as e:
errorMessage = str(e)
logger.exception(f"Exception in /api/conversation/custom | {errorMessage}")
error_message = str(e)
logger.exception(
"Exception in /api/conversation/custom | %s", error_message
)
return (
jsonify(
{
Expand All @@ -386,6 +401,7 @@ async def conversation_custom():

@app.route("/api/speech", methods=["GET"])
def speech_config():
"""Get the speech config for Azure Speech."""
try:
speech_key = env_helper.AZURE_SPEECH_KEY or get_speech_key(env_helper)

Expand All @@ -394,6 +410,7 @@ def speech_config():
headers={
"Ocp-Apim-Subscription-Key": speech_key,
},
timeout=5,
)

if response.status_code == 200:
Expand All @@ -403,10 +420,10 @@ def speech_config():
"languages": env_helper.AZURE_SPEECH_RECOGNIZER_LANGUAGES,
}

logger.error(f"Failed to get speech config: {response.text}")
logger.error("Failed to get speech config: %s", response.text)
return {"error": "Failed to get speech config"}, response.status_code
except Exception as e:
logger.exception(f"Exception in /api/speech | {str(e)}")
logger.exception("Exception in /api/speech | %s", str(e))

return {"error": "Failed to get speech config"}, 500

Expand Down
Loading

0 comments on commit 70fe71d

Please sign in to comment.