From 028565ce73d3933d2d94de95d766603937d2b676 Mon Sep 17 00:00:00 2001 From: Valentina-Alto Date: Thu, 9 Jan 2025 16:07:01 +0400 Subject: [PATCH 1/2] adding files --- .../CodeInterpreter/Dockerfile | 20 +++++ AICustomIntegrations/CodeInterpreter/app.py | 80 +++++++++++++++++++ AICustomIntegrations/CodeInterpreter/data.csv | 11 +++ .../CodeInterpreter/requirements.txt | 7 ++ AICustomIntegrations/README.md | 3 + 5 files changed, 121 insertions(+) create mode 100644 AICustomIntegrations/CodeInterpreter/Dockerfile create mode 100644 AICustomIntegrations/CodeInterpreter/app.py create mode 100644 AICustomIntegrations/CodeInterpreter/data.csv create mode 100644 AICustomIntegrations/CodeInterpreter/requirements.txt create mode 100644 AICustomIntegrations/README.md diff --git a/AICustomIntegrations/CodeInterpreter/Dockerfile b/AICustomIntegrations/CodeInterpreter/Dockerfile new file mode 100644 index 0000000..2865b04 --- /dev/null +++ b/AICustomIntegrations/CodeInterpreter/Dockerfile @@ -0,0 +1,20 @@ +# Use an official Python runtime as a parent image +FROM python:3.9-slim + +# Set the working directory in the container +WORKDIR /app + +# Copy the current directory contents into the container at /app +COPY . /app + +# Install any needed packages specified in requirements.txt +RUN pip install --no-cache-dir -r requirements.txt + +# Copy the data.csv file into the container +COPY data.csv /app/data.csv + +# Make port 5000 available to the world outside this container +EXPOSE 5000 + +# Run app.py when the container launches +CMD ["python", "-m", "flask", "run", "-h", "0.0.0.0", "-p", "5000"] \ No newline at end of file diff --git a/AICustomIntegrations/CodeInterpreter/app.py b/AICustomIntegrations/CodeInterpreter/app.py new file mode 100644 index 0000000..ca6779a --- /dev/null +++ b/AICustomIntegrations/CodeInterpreter/app.py @@ -0,0 +1,80 @@ +from flask import Flask, request, jsonify +import os +from openai import AzureOpenAI + +app = Flask(__name__) + +client = AzureOpenAI( + api_key="xxx", + api_version="xxx", + azure_endpoint="xxx" +) + +@app.route('/chat', methods=['POST']) +def chat(): + user_content = request.json.get('content') + if not user_content: + return jsonify({'error': 'No content provided.'}), 400 + + response = client.chat.completions.create( + model="gpt-4o", + messages=[ + {"role": "system", "content": "Assistant is a large language model trained by OpenAI."}, + {"role": "user", "content": user_content} + ] + ) + + return jsonify({'response': response.choices[0].message.content}) + +import time + +@app.route('/code', methods=['POST']) +def code(): + user_content = request.json.get('content') + file = client.files.create(file=open("data.csv", "rb"), purpose="assistants") + + assistant = client.beta.assistants.create( + instructions="You are an AI assistant that can write code to help answer math questions.", + model="gpt-4o", + tools=[{"type": "code_interpreter"}], + tool_resources={"code_interpreter": {"file_ids": [file.id]}} + ) + + # Create a new thread + thread = client.beta.threads.create() + + # Place your first message into your thread + client.beta.threads.messages.create( + thread_id=thread.id, + role="user", + content=user_content, + ) + + # Create a run + run = client.beta.threads.runs.create( + thread_id=thread.id, + assistant_id=assistant.id, + ) + + # Wait for your run to finish + while True: + run = client.beta.threads.runs.retrieve(thread_id=thread.id, run_id=run.id) + + if run.status == "completed": + messages = client.beta.threads.messages.list(thread_id=thread.id) + messages_list = [] + for message in messages: + messages_list.append(message) + last_message = messages_list[0] + + client.beta.assistants.delete(assistant.id) + client.beta.threads.delete(thread.id) + break + elif run.status == "requires_action": + pass + elif run.status in ["expired", "failed", "cancelled"]: + break + return jsonify({'response': last_message.content[0].text.value}) + +if __name__ == '__main__': + app.run(debug=True) \ No newline at end of file diff --git a/AICustomIntegrations/CodeInterpreter/data.csv b/AICustomIntegrations/CodeInterpreter/data.csv new file mode 100644 index 0000000..4075806 --- /dev/null +++ b/AICustomIntegrations/CodeInterpreter/data.csv @@ -0,0 +1,11 @@ +Name,Type,City,Country,Rating +Le Gourmet,Restaurant,New York,USA,4.5 +Ocean's Delight,Restaurant,Sydney,Australia,4.7 +The Cozy Inn,Hotel,Zurich,Switzerland,4.8 +Mountain Retreat,Hotel,Aspen,USA,4.6 +City Lights Hotel,Hotel,Dubai,UAE,4.3 +Bella Roma,Restaurant,Rome,Italy,4.9 +Golden Dragon,Restaurant,Beijing,China,4.6 +Sea Breeze Resort,Hotel,Maldives,Maldives,4.7 +Café Paris,Restaurant,Paris,France,4.8 +The Royal Stay,Hotel,London,UK,4.4 diff --git a/AICustomIntegrations/CodeInterpreter/requirements.txt b/AICustomIntegrations/CodeInterpreter/requirements.txt new file mode 100644 index 0000000..b2e1ff8 --- /dev/null +++ b/AICustomIntegrations/CodeInterpreter/requirements.txt @@ -0,0 +1,7 @@ +# Do not include azure-functions-worker in this file +# The Python Worker is managed by the Azure Functions platform +# Manually managing azure-functions-worker may cause unexpected issues + +azure-functions +flask +openai \ No newline at end of file diff --git a/AICustomIntegrations/README.md b/AICustomIntegrations/README.md new file mode 100644 index 0000000..017163e --- /dev/null +++ b/AICustomIntegrations/README.md @@ -0,0 +1,3 @@ +# AI Custom Integration + +This folder collects samples of custom AI integrations. The idea is that you can call your custom code via HTTPs within a Copilot Studio Action. By doing so, you can leverage the full flexibility of invoking Azure components in AI Foundry or other AI Services, as well as leveraging AI orchestrators like Semantic Kernel and LangChain. \ No newline at end of file From b4b052080a0ab5b87cd103af26564f2fbb2d10f0 Mon Sep 17 00:00:00 2001 From: Valentina-Alto Date: Thu, 9 Jan 2025 18:59:10 +0400 Subject: [PATCH 2/2] readme --- .../CodeInterpreter/README.md | 25 +++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 AICustomIntegrations/CodeInterpreter/README.md diff --git a/AICustomIntegrations/CodeInterpreter/README.md b/AICustomIntegrations/CodeInterpreter/README.md new file mode 100644 index 0000000..d030945 --- /dev/null +++ b/AICustomIntegrations/CodeInterpreter/README.md @@ -0,0 +1,25 @@ +# Integrate Copilot Studio with Azure AI Foundry (Code Interpreter and Chat API) + +This solution showcases the potential of integrating Azure OpenAI Assistants API (code interpreter) and Chat API within Copilot Studio with custom Python Code. +The code is containerized with Docker and deployed on an Azure Container App. + +To invoke the endpoint, you can incorporate an HTTP call into your custom flow within a Copilot Studio Action and route it either to /chat (for the chat API) or to /code (for the code interpreter API). + +## Prerequisites +1. Azure subscription +2. Azure OpenAI instance with a GPT-4o deployment +3. Azure CLI + +## How to deploy + +1. Clone the repository +2. Add your Azure OpenAI keys and endpoint to the file +3. Build the Docker Image +`docker build -t yourapp .` +4. Push your image into your Azure Container Registry +`az login` +`az acr login --name yourContainerRegistry` +5. Tag and push the docker image to your registry +`docker tag yourapp mycontainerregistry.azurecr.io/flask-app:v1` +`docker push mycontainerregistry.azurecr.io/yourapp:v1` +6. Create your Azure Container App (ACA) from the Azure Portal and link it to the image you pushed in the previous step. To create your ACA, you can follow [this tutorial](https://learn.microsoft.com/en-us/azure/container-apps/quickstart-portal) \ No newline at end of file