Code.py
%pip install azure-ai-projects==2.0.0b2 openai==1.109.1 python-dotenv azure-identity
import os
from dotenv import load_dotenv
from azure.identity import DefaultAzureCredential
from azure.ai.projects import AIProjectClient
load_dotenv()
foundry_project_endpoint = os.getenv("FOUNDRY_PROJECT_ENDPOINT")
model_deployment_name = os.getenv("MODEL_DEPLOYMENT_NAME")
project_client = AIProjectClient(
endpoint= foundry_project_endpoint,
credential=DefaultAzureCredential()
)
openai_client = project_client.get_openai_client()
response = openai_client.responses.create(
model=model_deployment_name,
instructions="You are a helpful AI assistant.",
input = "Can you tell me about Microsoft Foundry?"
)
print(f"Response output: {response.output_text}")
And .env has:
FOUNDRY_PROJECT_ENDPOINT=https://demoprojectv1.services.ai.azure.com/api/projects/udemy-demo-proj
MODEL_DEPLOYMENT_NAME=gpt-4o
Error:
ConnectError Traceback (most recent call last)
File ~\AppData\Roaming\Python\Python313\site-packages\httpx_transports\default.py:101, in map_httpcore_exceptions()
100 try:
--> 101 yield
102 except Exception as exc:
File ~\AppData\Roaming\Python\Python313\site-packages\httpx_transports\default.py:250, in HTTPTransport.handle_request(self, request)
249 with map_httpcore_exceptions():
--> 250 resp = self._pool.handle_request(req)
252 assert isinstance(resp.stream, typing.Iterable)
File ~\AppData\Roaming\Python\Python313\site-packages\httpcore_sync\connection_pool.py:256, in ConnectionPool.handle_request(self, request)
255 self._close_connections(closing)
--> 256 raise exc from None
258 # Return the response. Note that in this case we still have to manage
259 # the point at which the response is closed.
File ~\AppData\Roaming\Python\Python313\site-packages\httpcore_sync\connection_pool.py:236, in ConnectionPool.handle_request(self, request)
234 try:
235 # Send the request on the assigned connection.
--> 236 response = connection.handle_request(
237 pool_request.request
238 )
239 except ConnectionNotAvailable:
...
(...) 1022 response.headers,
1023 )
1024 log.debug("request_id: %s", response.headers.get("x-request-id"))
APIConnectionError: Connection error.
Output is truncated. View as a scrollable element or open in a text editor. Adjust cell output settings...
Code.py
%pip install azure-ai-projects==2.0.0b2 openai==1.109.1 python-dotenv azure-identity
import os
from dotenv import load_dotenv
from azure.identity import DefaultAzureCredential
from azure.ai.projects import AIProjectClient
load_dotenv()
foundry_project_endpoint = os.getenv("FOUNDRY_PROJECT_ENDPOINT")
model_deployment_name = os.getenv("MODEL_DEPLOYMENT_NAME")
project_client = AIProjectClient(
endpoint= foundry_project_endpoint,
credential=DefaultAzureCredential()
)
openai_client = project_client.get_openai_client()
response = openai_client.responses.create(
model=model_deployment_name,
instructions="You are a helpful AI assistant.",
input = "Can you tell me about Microsoft Foundry?"
)
print(f"Response output: {response.output_text}")
And .env has:
FOUNDRY_PROJECT_ENDPOINT=https://demoprojectv1.services.ai.azure.com/api/projects/udemy-demo-proj
MODEL_DEPLOYMENT_NAME=gpt-4o
Error:
ConnectError Traceback (most recent call last)
File ~\AppData\Roaming\Python\Python313\site-packages\httpx_transports\default.py:101, in map_httpcore_exceptions()
100 try:
--> 101 yield
102 except Exception as exc:
File ~\AppData\Roaming\Python\Python313\site-packages\httpx_transports\default.py:250, in HTTPTransport.handle_request(self, request)
249 with map_httpcore_exceptions():
--> 250 resp = self._pool.handle_request(req)
252 assert isinstance(resp.stream, typing.Iterable)
File ~\AppData\Roaming\Python\Python313\site-packages\httpcore_sync\connection_pool.py:256, in ConnectionPool.handle_request(self, request)
255 self._close_connections(closing)
--> 256 raise exc from None
258 # Return the response. Note that in this case we still have to manage
259 # the point at which the response is closed.
File ~\AppData\Roaming\Python\Python313\site-packages\httpcore_sync\connection_pool.py:236, in ConnectionPool.handle_request(self, request)
234 try:
235 # Send the request on the assigned connection.
--> 236 response = connection.handle_request(
237 pool_request.request
238 )
239 except ConnectionNotAvailable:
...
(...) 1022 response.headers,
1023 )
1024 log.debug("request_id: %s", response.headers.get("x-request-id"))
APIConnectionError: Connection error.
Output is truncated. View as a scrollable element or open in a text editor. Adjust cell output settings...