From bcabb271d144b00c90652d071b68168e3c5fa666 Mon Sep 17 00:00:00 2001 From: avichalsri24 Date: Mon, 11 May 2026 16:25:48 +0530 Subject: [PATCH 1/2] feat(platform): expand Data Fabric entities service [DS-8360] MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Adds the missing Data Fabric methods on EntitiesService and exposes the full backend parameter set on existing batch methods. Internally splits the service into a facade pattern for clearer separation between schema- and data-side operations while keeping the public sdk.entities.* surface unchanged: - EntitySchemaService (internal): entity / choiceset CRUD against datafabric_/api/Entity. - EntityDataService (internal): record CRUD (single + batch), structured query, federated SQL query, attachments, bulk import, choiceset values, against datafabric_/api/EntityService/... + datafabric_/api/Attachment + datafabric_/api/v1/query/execute. - EntitiesService (public): thin facade that delegates to the two sub-services and owns cross-cutting concerns (agent entity-set resolution). New methods (sync + async on the facade): - Single-record ops: insert_record, get_record, update_record, delete_record — fire trigger events on each mutation. - query — structured query with filter_group, sort_options, selected_fields, expansions, expansion_level, aggregates, group_by, joins, binnings, start, limit. Routes to V2 endpoint when binnings is supplied. - Attachments: upload_attachment (bytes or path), download_attachment, delete_attachment. - Schema: create_entity (with SQL-type mapping and per-type constraint defaults), delete_entity, update_entity_metadata. - import_records for CSV bulk upload. Existing methods extended (ticket §B): - insert_records / update_records / delete_records accept expansion_level and fail_on_first. - list_records accepts OData filter / orderby / select / expand / expansion_level and returns EntityRecordsListResponse (a list subclass with total_count / has_next_page / next_cursor). Bug fixes (ticket §C): - Batch operations recover per-record failures from HTTP 400 responses that carry successRecords / failureRecords lists; other non-2xx statuses propagate. - Record input is normalized — accepts dicts, Pydantic models, EntityRecord, or any object with __dict__. Client-side validation in create_entity uses the UI's create-form rules: ^[a-zA-Z][a-zA-Z0-9]*$ (no underscores), entity 1-30 chars, field 3-100 chars, plus reserved-name and per-field constraint range checks. Anything accepted by the SDK round-trips cleanly through the Data Service UI. Backward compatibility: public method signatures only gained optional kwargs. EntityRecord.id stays required. list_records return type subclasses list, so iteration / indexing / len() / isinstance(records, list) continue to work. Existing method docstrings preserved verbatim from main; only ticket-mandated additions appear in their docs. Co-Authored-By: Claude Opus 4.7 (1M context) --- packages/uipath-platform/pyproject.toml | 2 +- .../src/uipath/platform/entities/__init__.py | 42 +- .../platform/entities/_entities_service.py | 1947 ++++++++++++----- .../platform/entities/_entity_data_service.py | 1401 ++++++++++++ .../entities/_entity_schema_service.py | 499 +++++ .../src/uipath/platform/entities/entities.py | 386 +++- .../tests/services/test_entities_service.py | 1152 +++++++++- packages/uipath-platform/uv.lock | 2 +- packages/uipath/uv.lock | 2 +- 9 files changed, 4788 insertions(+), 645 deletions(-) create mode 100644 packages/uipath-platform/src/uipath/platform/entities/_entity_data_service.py create mode 100644 packages/uipath-platform/src/uipath/platform/entities/_entity_schema_service.py diff --git a/packages/uipath-platform/pyproject.toml b/packages/uipath-platform/pyproject.toml index 1220b386a..f54e0d143 100644 --- a/packages/uipath-platform/pyproject.toml +++ b/packages/uipath-platform/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "uipath-platform" -version = "0.1.48" +version = "0.1.49" description = "HTTP client library for programmatic access to UiPath Platform" readme = { file = "README.md", content-type = "text/markdown" } requires-python = ">=3.11" diff --git a/packages/uipath-platform/src/uipath/platform/entities/__init__.py b/packages/uipath-platform/src/uipath/platform/entities/__init__.py index aca80997b..63069d1d6 100644 --- a/packages/uipath-platform/src/uipath/platform/entities/__init__.py +++ b/packages/uipath-platform/src/uipath/platform/entities/__init__.py @@ -8,17 +8,34 @@ ChoiceSetValue, DataFabricEntityItem, Entity, + EntityAggregate, + EntityAggregateFunction, + EntityBinning, + EntityCreateFieldOptions, + EntityCreateOptions, EntityField, + EntityFieldDataType, EntityFieldMetadata, + EntityImportRecordsResponse, + EntityJoin, + EntityMetadataUpdateOptions, + EntityQueryFilter, + EntityQueryFilterGroup, + EntityQueryRecordsResponse, + EntityQuerySortOption, EntityRecord, EntityRecordsBatchResponse, + EntityRecordsListResponse, EntityRouting, EntitySetResolution, ExternalField, ExternalObject, ExternalSourceFields, + FailureRecord, FieldDataType, FieldMetadata, + LogicalOperator, + QueryFilterOperator, QueryRoutingOverrideContext, ReferenceType, SourceJoinCriteria, @@ -29,17 +46,34 @@ "DataFabricEntityItem", "EntitiesService", "Entity", + "EntityAggregate", + "EntityAggregateFunction", + "EntityBinning", + "EntityCreateFieldOptions", + "EntityCreateOptions", "EntityField", - "EntityRecord", + "EntityFieldDataType", "EntityFieldMetadata", + "EntityImportRecordsResponse", + "EntityJoin", + "EntityMetadataUpdateOptions", + "EntityQueryFilter", + "EntityQueryFilterGroup", + "EntityQueryRecordsResponse", + "EntityQuerySortOption", + "EntityRecord", + "EntityRecordsBatchResponse", + "EntityRecordsListResponse", "EntityRouting", "EntitySetResolution", - "FieldDataType", - "FieldMetadata", - "EntityRecordsBatchResponse", "ExternalField", "ExternalObject", "ExternalSourceFields", + "FailureRecord", + "FieldDataType", + "FieldMetadata", + "LogicalOperator", + "QueryFilterOperator", "QueryRoutingOverrideContext", "ReferenceType", "SourceJoinCriteria", diff --git a/packages/uipath-platform/src/uipath/platform/entities/_entities_service.py b/packages/uipath-platform/src/uipath/platform/entities/_entities_service.py index 951a4b07b..f8eb08c6a 100644 --- a/packages/uipath-platform/src/uipath/platform/entities/_entities_service.py +++ b/packages/uipath-platform/src/uipath/platform/entities/_entities_service.py @@ -1,22 +1,32 @@ -import json as json_module +"""Public facade for the Data Fabric entities surface. + +:class:`EntitiesService` keeps the existing ``sdk.entities.*`` API flat and +unchanged from a caller's perspective while delegating each operation to the +appropriate underlying service: + +* :class:`EntitySchemaService` — entity definitions, choice set listings, + create / delete / update-metadata lifecycle. +* :class:`EntityDataService` — record CRUD (single and batch), structured + queries, attachments, choice-set values, bulk import, and the legacy + federated SQL query. + +The facade additionally owns cross-cutting concerns such as agent entity-set +resolution. +""" + import logging -from typing import Any, Dict, List, Optional, Type +from typing import Any, Dict, List, Optional, Type, Union -import sqlparse from httpx import Response -from sqlparse.sql import Function, Identifier, IdentifierList, Parenthesis, Where -from sqlparse.tokens import DML, Keyword, Whitespace, Wildcard from uipath.core.tracing import traced from ..common._base_service import BaseService from ..common._bindings import _resource_overwrites from ..common._config import UiPathApiConfig from ..common._execution_context import UiPathExecutionContext -from ..common._models import Endpoint, RequestSpec -from ..common.constants import HEADER_FOLDER_KEY from ..orchestrator._folder_service import FolderService +from ._entity_data_service import EntityDataService, FileContent from ._entity_resolution import ( - RoutingStrategy, build_resolution_service, create_resolution_plan, create_resolution_plan_async, @@ -24,46 +34,45 @@ fetch_resolved_entities, fetch_resolved_entities_async, ) +from ._entity_schema_service import EntitySchemaService from .entities import ( ChoiceSetValue, DataFabricEntityItem, Entity, + EntityAggregate, + EntityBinning, + EntityCreateFieldOptions, + EntityCreateOptions, + EntityImportRecordsResponse, + EntityJoin, + EntityMetadataUpdateOptions, + EntityQueryFilterGroup, + EntityQueryRecordsResponse, + EntityQuerySortOption, EntityRecord, EntityRecordsBatchResponse, + EntityRecordsListResponse, EntitySetResolution, QueryRoutingOverrideContext, ) logger = logging.getLogger(__name__) -_FORBIDDEN_DML = {"INSERT", "UPDATE", "DELETE", "MERGE", "REPLACE"} -_FORBIDDEN_DDL = {"DROP", "ALTER", "CREATE", "TRUNCATE"} -_DISALLOWED_KEYWORDS = [ - "WITH", - "UNION", - "INTERSECT", - "EXCEPT", - "OVER", - "ROLLUP", - "CUBE", - "GROUPING", - "PARTITION", -] -_AGGREGATE_FUNCTIONS = ("COUNT", "SUM", "AVG", "MIN", "MAX") - class EntitiesService(BaseService): """Service for managing UiPath Data Service entities. - Entities are database tables in UiPath Data Service that can store - structured data for automation processes. + Entities are database tables in UiPath Data Service that store structured + data for automation processes. This service is the unified entry point for + every entity operation: schema management, record CRUD, structured and + SQL queries, file attachments, choice sets, and bulk import. See Also: https://docs.uipath.com/data-service/automation-cloud/latest/user-guide/introduction !!! warning "Preview Feature" - This function is currently experimental. - Behavior and parameters are subject to change in future versions. + This service is currently experimental. Behavior and parameters are + subject to change in future versions. """ def __init__( @@ -75,14 +84,30 @@ def __init__( entity_name_overrides: Optional[Dict[str, str]] = None, routing_context: Optional[QueryRoutingOverrideContext] = None, ) -> None: + """Initialise the facade and its underlying schema and data services.""" super().__init__(config=config, execution_context=execution_context) self._folders_service = folders_service - self._routing_strategy: RoutingStrategy = create_routing_strategy( + self._routing_strategy = create_routing_strategy( folders_map=folders_map, effective_entity_names=entity_name_overrides, routing_context=routing_context, folders_service=folders_service, ) + self._schema = EntitySchemaService( + config=config, + execution_context=execution_context, + folders_service=folders_service, + ) + self._data = EntityDataService( + config=config, + execution_context=execution_context, + folders_service=folders_service, + routing_strategy=self._routing_strategy, + ) + + # ------------------------------------------------------------------ + # Schema operations — delegate to EntitySchemaService + # ------------------------------------------------------------------ @traced(name="entity_retrieve", run_type="uipath") def retrieve(self, entity_key: str) -> Entity: @@ -117,10 +142,7 @@ def retrieve(self, entity_key: str) -> Entity: print(f" Required: {field.is_required}") print(f" Primary Key: {field.is_primary_key}") """ - spec = self._retrieve_spec(entity_key) - response = self.request(spec.method, spec.endpoint) - - return Entity.model_validate(response.json()) + return self._schema.retrieve(entity_key) @traced(name="entity_retrieve", run_type="uipath") async def retrieve_async(self, entity_key: str) -> Entity: @@ -155,11 +177,7 @@ async def retrieve_async(self, entity_key: str) -> Entity: print(f" Required: {field.is_required}") print(f" Primary Key: {field.is_primary_key}") """ - spec = self._retrieve_spec(entity_key) - - response = await self.request_async(spec.method, spec.endpoint) - - return Entity.model_validate(response.json()) + return await self._schema.retrieve_async(entity_key) @traced(name="entity_retrieve_by_name", run_type="uipath") def retrieve_by_name( @@ -175,11 +193,7 @@ def retrieve_by_name( entity_name: The name of the entity. folder_key: Optional folder key for disambiguation. """ - spec = self._retrieve_by_name_spec(entity_name) - headers = self._folder_key_headers(folder_key) - response = self.request(spec.method, spec.endpoint, headers=headers) - - return Entity.model_validate(response.json()) + return self._schema.retrieve_by_name(entity_name, folder_key=folder_key) @traced(name="entity_retrieve_by_name", run_type="uipath") async def retrieve_by_name_async( @@ -195,11 +209,9 @@ async def retrieve_by_name_async( entity_name: The name of the entity. folder_key: Optional folder key for disambiguation. """ - spec = self._retrieve_by_name_spec(entity_name) - headers = self._folder_key_headers(folder_key) - response = await self.request_async(spec.method, spec.endpoint, headers=headers) - - return Entity.model_validate(response.json()) + return await self._schema.retrieve_by_name_async( + entity_name, folder_key=folder_key + ) @traced(name="list_entities", run_type="uipath") def list_entities(self) -> List[Entity]: @@ -238,11 +250,7 @@ def list_entities(self) -> List[Entity]: print(f"Total records: {total_records}") print(f"Total storage: {total_storage:.2f} MB") """ - spec = self._list_entities_spec() - response = self.request(spec.method, spec.endpoint) - - entities_data = response.json() - return [Entity.model_validate(entity) for entity in entities_data] + return self._schema.list_entities() @traced(name="list_entities", run_type="uipath") async def list_entities_async(self) -> List[Entity]: @@ -281,11 +289,7 @@ async def list_entities_async(self) -> List[Entity]: print(f"Total records: {total_records}") print(f"Total storage: {total_storage:.2f} MB") """ - spec = self._list_entities_spec() - response = await self.request_async(spec.method, spec.endpoint) - - entities_data = response.json() - return [Entity.model_validate(entity) for entity in entities_data] + return await self._schema.list_entities_async() @traced(name="list_choicesets", run_type="uipath") def list_choicesets(self) -> List[Entity]: @@ -301,9 +305,7 @@ def list_choicesets(self) -> List[Entity]: for cs in choicesets: print(f"{cs.display_name} ({cs.id})") """ - spec = self._list_choicesets_spec() - response = self.request(spec.method, spec.endpoint) - return [Entity.model_validate(item) for item in response.json()] + return self._schema.list_choicesets() @traced(name="list_choicesets", run_type="uipath") async def list_choicesets_async(self) -> List[Entity]: @@ -312,16 +314,215 @@ async def list_choicesets_async(self) -> List[Entity]: Returns: List[Entity]: A list of all choice set entities. """ - spec = self._list_choicesets_spec() - response = await self.request_async(spec.method, spec.endpoint) - return [Entity.model_validate(item) for item in response.json()] + return await self._schema.list_choicesets_async() + + @traced(name="entity_create", run_type="uipath") + def create_entity( + self, + name: str, + fields: List[EntityCreateFieldOptions], + options: Optional[EntityCreateOptions] = None, + ) -> str: + """Create a new entity with the given schema and return its id. + + Args: + name (str): Entity name. Must start with a letter and contain + only letters, digits, and underscores (3-100 characters). + fields (List[EntityCreateFieldOptions]): Field definitions for + the new entity. Each entry declares the field's name, type, + and optional constraints such as ``length_limit``, + ``decimal_precision``, ``is_required``, ``is_unique``, etc. + options (Optional[EntityCreateOptions]): Optional entity-level + settings such as display name, description, folder + placement, and RBAC / analytics flags. + + Returns: + str: The id (UUID) of the newly created entity. + + Raises: + ValueError: If the entity name or any field name fails the + client-side validation (regex / length / reserved names) or + if a per-field constraint is not supported for that field + type or is out of range. + + Examples: + Create a simple entity:: + + from uipath.platform.entities import ( + EntityCreateFieldOptions, + EntityCreateOptions, + EntityFieldDataType, + ) + + entity_id = entities_service.create_entity( + "ProductCatalog", + [ + EntityCreateFieldOptions( + field_name="product_name", + type=EntityFieldDataType.STRING, + is_required=True, + is_unique=True, + ), + EntityCreateFieldOptions( + field_name="price", + type=EntityFieldDataType.DECIMAL, + decimal_precision=2, + ), + ], + options=EntityCreateOptions( + display_name="Product Catalog", + description="Inventory of available products", + is_rbac_enabled=True, + ), + ) + """ + return self._schema.create_entity(name, fields, options) + + @traced(name="entity_create", run_type="uipath") + async def create_entity_async( + self, + name: str, + fields: List[EntityCreateFieldOptions], + options: Optional[EntityCreateOptions] = None, + ) -> str: + """Asynchronously create a new entity with the given schema. + + Args: + name (str): Entity name; same validation rules as :meth:`create_entity`. + fields (List[EntityCreateFieldOptions]): Field definitions. + options (Optional[EntityCreateOptions]): Optional entity-level settings. + + Returns: + str: The id (UUID) of the newly created entity. + + Raises: + ValueError: For client-side validation failures. + + Examples: + Create a simple entity:: + + from uipath.platform.entities import ( + EntityCreateFieldOptions, + EntityFieldDataType, + ) + + entity_id = await entities_service.create_entity_async( + "ProductCatalog", + [ + EntityCreateFieldOptions( + field_name="product_name", + type=EntityFieldDataType.STRING, + is_required=True, + ), + ], + ) + """ + return await self._schema.create_entity_async(name, fields, options) + + @traced(name="entity_delete", run_type="uipath") + def delete_entity(self, entity_id: str) -> None: + """Delete an entity and all of its records. + + Args: + entity_id (str): The unique identifier of the entity to delete. + + Examples: + Delete an entity by id:: + + entities_service.delete_entity("a1b2c3d4-...") + """ + self._schema.delete_entity(entity_id) + + @traced(name="entity_delete", run_type="uipath") + async def delete_entity_async(self, entity_id: str) -> None: + """Asynchronously delete an entity and all of its records. + + Args: + entity_id (str): The unique identifier of the entity to delete. + + Examples: + Delete an entity by id:: + + await entities_service.delete_entity_async("a1b2c3d4-...") + """ + await self._schema.delete_entity_async(entity_id) + + @traced(name="entity_update_metadata", run_type="uipath") + def update_entity_metadata( + self, + entity_id: str, + metadata: Union[EntityMetadataUpdateOptions, Dict[str, Any]], + ) -> None: + """Update an entity's display name, description, and/or RBAC flag. + + Args: + entity_id (str): The unique identifier of the entity. + metadata (Union[EntityMetadataUpdateOptions, Dict[str, Any]]): + An :class:`EntityMetadataUpdateOptions` instance or a dict + with any of ``display_name``, ``description``, + ``is_rbac_enabled``. Dict keys may be snake_case + (``display_name``) or camelCase (``displayName``); both + serialize correctly to the API. + + Examples: + Rename and update description:: + + from uipath.platform.entities import EntityMetadataUpdateOptions + + entities_service.update_entity_metadata( + "a1b2c3d4-...", + EntityMetadataUpdateOptions( + display_name="New Display Name", + description="Refreshed description", + ), + ) + + From a plain dict:: + + entities_service.update_entity_metadata( + "a1b2c3d4-...", + {"display_name": "X", "is_rbac_enabled": True}, + ) + """ + self._schema.update_entity_metadata(entity_id, metadata) + + @traced(name="entity_update_metadata", run_type="uipath") + async def update_entity_metadata_async( + self, + entity_id: str, + metadata: Union[EntityMetadataUpdateOptions, Dict[str, Any]], + ) -> None: + """Asynchronously update an entity's display name, description, and/or RBAC flag. + + Args: + entity_id (str): The unique identifier of the entity. + metadata (Union[EntityMetadataUpdateOptions, Dict[str, Any]]): + An :class:`EntityMetadataUpdateOptions` instance or a dict + with any of ``display_name``, ``description``, + ``is_rbac_enabled``. + + Examples: + Rename:: + + from uipath.platform.entities import EntityMetadataUpdateOptions + + await entities_service.update_entity_metadata_async( + "a1b2c3d4-...", + EntityMetadataUpdateOptions(display_name="Renamed Entity"), + ) + """ + await self._schema.update_entity_metadata_async(entity_id, metadata) + + # ------------------------------------------------------------------ + # Data operations — delegate to EntityDataService + # ------------------------------------------------------------------ @traced(name="get_choiceset_values", run_type="uipath") def get_choiceset_values( self, choiceset_id: str, - start: int | None = None, - limit: int | None = None, + start: Optional[int] = None, + limit: Optional[int] = None, ) -> List[ChoiceSetValue]: """Get the values of a choice set by its ID. @@ -341,23 +542,14 @@ def get_choiceset_values( for v in values: print(f"{v.number_id}: {v.display_name}") """ - spec = self._get_choiceset_values_spec(choiceset_id, start=start, limit=limit) - response = self.request( - spec.method, spec.endpoint, params=spec.params, json=spec.json - ) - data = response.json() - raw_values = data.get("jsonValue", "[]") - items = ( - json_module.loads(raw_values) if isinstance(raw_values, str) else raw_values - ) - return [ChoiceSetValue.model_validate(item) for item in items] + return self._data.get_choiceset_values(choiceset_id, start=start, limit=limit) @traced(name="get_choiceset_values", run_type="uipath") async def get_choiceset_values_async( self, choiceset_id: str, - start: int | None = None, - limit: int | None = None, + start: Optional[int] = None, + limit: Optional[int] = None, ) -> List[ChoiceSetValue]: """Asynchronously get the values of a choice set by its ID. @@ -369,25 +561,23 @@ async def get_choiceset_values_async( Returns: List[ChoiceSetValue]: The values in the choice set. """ - spec = self._get_choiceset_values_spec(choiceset_id, start=start, limit=limit) - response = await self.request_async( - spec.method, spec.endpoint, params=spec.params, json=spec.json + return await self._data.get_choiceset_values_async( + choiceset_id, start=start, limit=limit ) - data = response.json() - raw_values = data.get("jsonValue", "[]") - items = ( - json_module.loads(raw_values) if isinstance(raw_values, str) else raw_values - ) - return [ChoiceSetValue.model_validate(item) for item in items] @traced(name="entity_list_records", run_type="uipath") def list_records( self, entity_key: str, - schema: Optional[Type[Any]] = None, # Optional schema + schema: Optional[Type[Any]] = None, start: Optional[int] = None, limit: Optional[int] = None, - ) -> List[EntityRecord]: + expansion_level: Optional[int] = None, + filter: Optional[str] = None, + orderby: Optional[str] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + ) -> EntityRecordsListResponse: """List records from an entity with optional pagination and schema validation. The schema parameter enables type-safe access to entity records by validating the @@ -424,11 +614,23 @@ class CustomerRecord: start (Optional[int]): Starting index for pagination (0-based). limit (Optional[int]): Maximum number of records to return. + expansion_level (Optional[int]): Depth of foreign-key expansion in the + response (``0`` means no expansion). Higher values inline related + records up to that many hops. + filter (Optional[str]): OData ``$filter`` expression + (e.g. ``"status eq 'active'"``). + orderby (Optional[str]): OData ``$orderby`` expression + (e.g. ``"created_at desc"``). + select (Optional[List[str]]): Column projection — field names to + include (rendered as ``$select``). + expand (Optional[List[str]]): Relationship names to expand inline + (rendered as ``$expand``). Returns: - List[EntityRecord]: A list of entity records. Each record contains an 'id' field - and all other fields from the entity. Fields can be accessed as attributes - or dictionary keys on the EntityRecord object. + EntityRecordsListResponse: A list-compatible response with + ``total_count``, ``has_next_page`` and ``next_cursor`` pagination + metadata. Iteration, indexing, and ``len()`` continue to work + like a plain list of :class:`EntityRecord`. Raises: ValueError: If schema validation fails for any record, including cases where @@ -446,6 +648,22 @@ class CustomerRecord: # Get first 50 records records = entities_service.list_records("Customers", start=0, limit=50) + print(f"Showing {len(records)} of {records.total_count} total") + if records.has_next_page: + next_page = entities_service.list_records( + "Customers", start=50, limit=50 + ) + + With OData filter, sorting, projection, and expansion:: + + records = entities_service.list_records( + "Customers", + filter="status eq 'active'", + orderby="created_at desc", + select=["name", "email", "status"], + expand=["company"], + expansion_level=1, + ) With schema validation:: @@ -465,28 +683,31 @@ class CustomerRecord: for record in records: print(f"{record.name}: {record.email}") """ - # Example method to generate the API request specification (mocked here) - spec = self._list_records_spec(entity_key, start, limit) - - # Make the HTTP request (assumes self.request exists) - response = self.request(spec.method, spec.endpoint, params=spec.params) - - # Parse the response JSON and extract the "value" field - records_data = response.json().get("value", []) - - # Validate and wrap records - return [ - EntityRecord.from_data(data=record, model=schema) for record in records_data - ] + return self._data.list_records( + entity_key, + schema=schema, + start=start, + limit=limit, + expansion_level=expansion_level, + filter=filter, + orderby=orderby, + select=select, + expand=expand, + ) @traced(name="entity_list_records", run_type="uipath") async def list_records_async( self, entity_key: str, - schema: Optional[Type[Any]] = None, # Optional schema + schema: Optional[Type[Any]] = None, start: Optional[int] = None, limit: Optional[int] = None, - ) -> List[EntityRecord]: + expansion_level: Optional[int] = None, + filter: Optional[str] = None, + orderby: Optional[str] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + ) -> EntityRecordsListResponse: """Asynchronously list records from an entity with optional pagination and schema validation. The schema parameter enables type-safe access to entity records by validating the @@ -523,11 +744,23 @@ class CustomerRecord: start (Optional[int]): Starting index for pagination (0-based). limit (Optional[int]): Maximum number of records to return. + expansion_level (Optional[int]): Depth of foreign-key expansion in the + response (``0`` means no expansion). Higher values inline related + records up to that many hops. + filter (Optional[str]): OData ``$filter`` expression + (e.g. ``"status eq 'active'"``). + orderby (Optional[str]): OData ``$orderby`` expression + (e.g. ``"created_at desc"``). + select (Optional[List[str]]): Column projection — field names to + include (rendered as ``$select``). + expand (Optional[List[str]]): Relationship names to expand inline + (rendered as ``$expand``). Returns: - List[EntityRecord]: A list of entity records. Each record contains an 'id' field - and all other fields from the entity. Fields can be accessed as attributes - or dictionary keys on the EntityRecord object. + EntityRecordsListResponse: A list-compatible response with + ``total_count``, ``has_next_page`` and ``next_cursor`` pagination + metadata. Iteration, indexing, and ``len()`` continue to work + like a plain list of :class:`EntityRecord`. Raises: ValueError: If schema validation fails for any record, including cases where @@ -545,6 +778,22 @@ class CustomerRecord: # Get first 50 records records = await entities_service.list_records_async("Customers", start=0, limit=50) + print(f"Showing {len(records)} of {records.total_count} total") + if records.has_next_page: + next_page = await entities_service.list_records_async( + "Customers", start=50, limit=50 + ) + + With OData filter, sorting, projection, and expansion:: + + records = await entities_service.list_records_async( + "Customers", + filter="status eq 'active'", + orderby="created_at desc", + select=["name", "email", "status"], + expand=["company"], + expansion_level=1, + ) With schema validation:: @@ -564,193 +813,330 @@ class CustomerRecord: for record in records: print(f"{record.name}: {record.email}") """ - spec = self._list_records_spec(entity_key, start, limit) - - # Make the HTTP request (assumes self.request exists) - response = await self.request_async( - spec.method, spec.endpoint, params=spec.params + return await self._data.list_records_async( + entity_key, + schema=schema, + start=start, + limit=limit, + expansion_level=expansion_level, + filter=filter, + orderby=orderby, + select=select, + expand=expand, ) - # Parse the response JSON and extract the "value" field - records_data = response.json().get("value", []) - - # Validate and wrap records - return [ - EntityRecord.from_data(data=record, model=schema) for record in records_data - ] - - @traced(name="entity_query_records", run_type="uipath") - def query_entity_records( + @traced(name="entity_insert_record", run_type="uipath") + def insert_record( self, - sql_query: str, - ) -> List[Dict[str, Any]]: - """Query entity records using a validated SQL query. + entity_key: str, + data: Any, + expansion_level: Optional[int] = None, + ) -> EntityRecord: + """Insert a single record into an entity and return the inserted row. - PREVIEW: This method is in preview and may change in future releases. + Note: + Unlike :meth:`insert_records` (batch), this single-record endpoint + fires Data Fabric trigger events. Use this method when triggers + attached to the entity must run. Args: - sql_query (str): A SQL SELECT query to execute against Data Service entities. - Only SELECT statements are allowed. Queries without WHERE must include - a LIMIT clause. Subqueries and multi-statement queries are not permitted. - - Notes: - A routing context is always derived from the configured ``folders_map`` - when present and included in the request body. + entity_key (str): The unique key/identifier of the entity. + data (Any): Record payload — a dict, a Pydantic model, an + :class:`EntityRecord`, or any object exposing ``__dict__``. + expansion_level (Optional[int]): Depth of foreign-key expansion in + the response (``0`` means no expansion). Returns: - List[Dict[str, Any]]: A list of result records as dictionaries. + EntityRecord: The inserted record with its server-assigned ``Id`` + plus any expanded relationships. - Raises: - ValueError: If the SQL query fails validation (e.g., non-SELECT, missing - WHERE/LIMIT, forbidden keywords, subqueries). + Examples: + Insert from a dict:: + + record = entities_service.insert_record( + "Customers", + {"name": "Alice", "email": "alice@example.com"}, + ) + print(record.id) + + Insert from a Pydantic model:: + + class CustomerInput(BaseModel): + name: str + email: str + + record = entities_service.insert_record( + "Customers", + CustomerInput(name="Bob", email="bob@example.com"), + expansion_level=1, + ) """ - return self._query_entities_for_records(sql_query) + return self._data.insert_record( + entity_key, data, expansion_level=expansion_level + ) - @traced(name="entity_query_records", run_type="uipath") - async def query_entity_records_async( + @traced(name="entity_insert_record", run_type="uipath") + async def insert_record_async( self, - sql_query: str, - ) -> List[Dict[str, Any]]: - """Asynchronously query entity records using a validated SQL query. + entity_key: str, + data: Any, + expansion_level: Optional[int] = None, + ) -> EntityRecord: + """Asynchronously insert a single record into an entity. - PREVIEW: This method is in preview and may change in future releases. + Note: + Unlike :meth:`insert_records_async` (batch), this single-record + endpoint fires Data Fabric trigger events. Use this method when + triggers attached to the entity must run. Args: - sql_query (str): A SQL SELECT query to execute against Data Service entities. - Only SELECT statements are allowed. Queries without WHERE must include - a LIMIT clause. Subqueries and multi-statement queries are not permitted. - - Notes: - A routing context is always derived from the configured ``folders_map`` - when present and included in the request body. + entity_key (str): The unique key/identifier of the entity. + data (Any): Record payload — a dict, a Pydantic model, an + :class:`EntityRecord`, or any object exposing ``__dict__``. + expansion_level (Optional[int]): Depth of foreign-key expansion in + the response (``0`` means no expansion). Returns: - List[Dict[str, Any]]: A list of result records as dictionaries. + EntityRecord: The inserted record with its server-assigned ``Id``. - Raises: - ValueError: If the SQL query fails validation (e.g., non-SELECT, missing - WHERE/LIMIT, forbidden keywords, subqueries). - """ - return await self._query_entities_for_records_async(sql_query) + Examples: + Insert from a dict:: - @traced(name="resolve_entity_set", run_type="uipath") - def resolve_entity_set( - self, - items: list[DataFabricEntityItem], - ) -> EntitySetResolution: - """Resolve an agent entity set, applying resource overwrites.""" - plan = create_resolution_plan( - items, - _resource_overwrites.get() or {}, - lambda folder_path: ( - self._folders_service.retrieve_key(folder_path=folder_path) - if self._folders_service is not None - else None - ), - ) - entities = fetch_resolved_entities( - plan, - self.retrieve, - self.retrieve_by_name, - logger, - ) - resolution_service: EntitiesService = build_resolution_service( # type: ignore[assignment] - config=self._config, - execution_context=self._execution_context, - folders_service=self._folders_service, - plan=plan, - service_factory=EntitiesService, - ) - return EntitySetResolution( - entities=entities, - entities_service=resolution_service, + record = await entities_service.insert_record_async( + "Customers", + {"name": "Alice", "email": "alice@example.com"}, + ) + print(record.id) + """ + return await self._data.insert_record_async( + entity_key, data, expansion_level=expansion_level ) - @traced(name="resolve_entity_set", run_type="uipath") - async def resolve_entity_set_async( + @traced(name="entity_get_record", run_type="uipath") + def get_record( self, - items: list[DataFabricEntityItem], - ) -> EntitySetResolution: - """Resolve an agent entity set, applying resource overwrites.""" + entity_key: str, + record_id: str, + expansion_level: Optional[int] = None, + ) -> EntityRecord: + """Fetch a single entity record by its id. - async def _resolve_folder_path(folder_path: str) -> Optional[str]: - if self._folders_service is None: - return None - return await self._folders_service.retrieve_key_async( - folder_path=folder_path - ) + Args: + entity_key (str): The unique key/identifier of the entity. + record_id (str): The unique identifier of the record to fetch. + expansion_level (Optional[int]): Depth of foreign-key expansion in + the response (``0`` means no expansion). - plan = await create_resolution_plan_async( - items, - _resource_overwrites.get() or {}, - _resolve_folder_path, - ) - entities = await fetch_resolved_entities_async( - plan, - self.retrieve_async, - self.retrieve_by_name_async, - logger, - ) - resolution_service: EntitiesService = build_resolution_service( # type: ignore[assignment] - config=self._config, - execution_context=self._execution_context, - folders_service=self._folders_service, - plan=plan, - service_factory=EntitiesService, - ) - return EntitySetResolution( - entities=entities, - entities_service=resolution_service, - ) + Returns: + EntityRecord: The record, with optional expanded relationships. - def _query_entities_for_records( - self, - sql_query: str, - ) -> List[Dict[str, Any]]: - self._validate_sql_query(sql_query) - routing_context = self._routing_strategy.resolve() - spec = self._query_entity_records_spec(sql_query, routing_context) - response = self.request(spec.method, spec.endpoint, json=spec.json) - return response.json().get("results", []) - - async def _query_entities_for_records_async( - self, - sql_query: str, - ) -> List[Dict[str, Any]]: - self._validate_sql_query(sql_query) - routing_context = await self._routing_strategy.resolve_async() - spec = self._query_entity_records_spec(sql_query, routing_context) - response = await self.request_async(spec.method, spec.endpoint, json=spec.json) - return response.json().get("results", []) + Examples: + Basic usage:: - @traced(name="entity_record_insert_batch", run_type="uipath") - def insert_records( + record = entities_service.get_record("Customers", "rec-1") + print(record.id, record.name) + + With FK expansion:: + + # Inline the related Company record on the returned Customer + record = entities_service.get_record( + "Customers", "rec-1", expansion_level=1 + ) + """ + return self._data.get_record( + entity_key, record_id, expansion_level=expansion_level + ) + + @traced(name="entity_get_record", run_type="uipath") + async def get_record_async( self, entity_key: str, - records: List[Any], - schema: Optional[Type[Any]] = None, - ) -> EntityRecordsBatchResponse: - """Insert multiple records into an entity in a single batch operation. + record_id: str, + expansion_level: Optional[int] = None, + ) -> EntityRecord: + """Asynchronously fetch a single entity record by its id. Args: entity_key (str): The unique key/identifier of the entity. - records (List[Any]): List of records to insert. Each record should be an object - with attributes matching the entity's field names. - schema (Optional[Type[Any]]): Optional schema class for validation. When provided, - validates that each record in the response matches the schema structure. + record_id (str): The unique identifier of the record to fetch. + expansion_level (Optional[int]): Depth of foreign-key expansion in + the response (``0`` means no expansion). Returns: - EntityRecordsBatchResponse: Response containing successful and failed record operations. - - success_records: List of successfully inserted EntityRecord objects - - failure_records: List of EntityRecord objects that failed to insert + EntityRecord: The record. Examples: - Insert records without schema:: + Basic usage:: - class Customer: - def __init__(self, name, email, age): - self.name = name - self.email = email + record = await entities_service.get_record_async("Customers", "rec-1") + print(record.id, record.name) + """ + return await self._data.get_record_async( + entity_key, record_id, expansion_level=expansion_level + ) + + @traced(name="entity_update_record", run_type="uipath") + def update_record( + self, + entity_key: str, + record_id: str, + data: Any, + expansion_level: Optional[int] = None, + ) -> EntityRecord: + """Update a single record by id and return the updated row. + + Note: + Unlike :meth:`update_records` (batch), this single-record endpoint + fires Data Fabric trigger events. Use this method when triggers + attached to the entity must run. + + Args: + entity_key (str): The unique key/identifier of the entity. + record_id (str): The unique identifier of the record to update. + data (Any): Fields to update — a dict, a Pydantic model, or any + object exposing ``__dict__``. Fields explicitly set to + ``None`` are sent through; unset fields are omitted. + expansion_level (Optional[int]): Depth of foreign-key expansion in + the response (``0`` means no expansion). + + Returns: + EntityRecord: The updated record. + + Examples: + Partial update from a dict:: + + record = entities_service.update_record( + "Customers", + "rec-1", + {"email": "alice.new@example.com"}, + ) + + Clear a field by passing an explicit ``None``:: + + # Note: unset fields are omitted; explicit None values are sent. + record = entities_service.update_record( + "Customers", + "rec-1", + {"middle_name": None}, + ) + """ + return self._data.update_record( + entity_key, record_id, data, expansion_level=expansion_level + ) + + @traced(name="entity_update_record", run_type="uipath") + async def update_record_async( + self, + entity_key: str, + record_id: str, + data: Any, + expansion_level: Optional[int] = None, + ) -> EntityRecord: + """Asynchronously update a single record by id. + + Note: + Unlike :meth:`update_records_async` (batch), this single-record + endpoint fires Data Fabric trigger events. + + Args: + entity_key (str): The unique key/identifier of the entity. + record_id (str): The unique identifier of the record to update. + data (Any): Fields to update — a dict, a Pydantic model, or any + object exposing ``__dict__``. + expansion_level (Optional[int]): Depth of foreign-key expansion. + + Returns: + EntityRecord: The updated record. + + Examples: + Partial update:: + + record = await entities_service.update_record_async( + "Customers", + "rec-1", + {"email": "alice.new@example.com"}, + ) + """ + return await self._data.update_record_async( + entity_key, record_id, data, expansion_level=expansion_level + ) + + @traced(name="entity_delete_record", run_type="uipath") + def delete_record(self, entity_key: str, record_id: str) -> None: + """Delete a single record by id. + + Note: + Unlike :meth:`delete_records` (batch), this single-record endpoint + fires Data Fabric trigger events. Use this method when triggers + attached to the entity must run on delete. + + Args: + entity_key (str): The unique key/identifier of the entity. + record_id (str): The unique identifier of the record to delete. + + Examples: + Delete by id:: + + entities_service.delete_record("Customers", "rec-1") + """ + self._data.delete_record(entity_key, record_id) + + @traced(name="entity_delete_record", run_type="uipath") + async def delete_record_async(self, entity_key: str, record_id: str) -> None: + """Asynchronously delete a single record by id. + + Note: + Unlike :meth:`delete_records_async` (batch), this single-record + endpoint fires Data Fabric trigger events. + + Args: + entity_key (str): The unique key/identifier of the entity. + record_id (str): The unique identifier of the record to delete. + + Examples: + Delete by id:: + + await entities_service.delete_record_async("Customers", "rec-1") + """ + await self._data.delete_record_async(entity_key, record_id) + + @traced(name="entity_record_insert_batch", run_type="uipath") + def insert_records( + self, + entity_key: str, + records: List[Any], + schema: Optional[Type[Any]] = None, + expansion_level: Optional[int] = None, + fail_on_first: Optional[bool] = None, + ) -> EntityRecordsBatchResponse: + """Insert multiple records into an entity in a single batch operation. + + Args: + entity_key (str): The unique key/identifier of the entity. + records (List[Any]): List of records to insert. Each record may be + a dict, a Pydantic model, an :class:`EntityRecord`, or any + object exposing ``__dict__``. + schema (Optional[Type[Any]]): Optional schema class for validation. When provided, + validates that each record in the response matches the schema structure. + expansion_level (Optional[int]): Depth of foreign-key expansion in + the response (``0`` means no expansion). + fail_on_first (Optional[bool]): When ``True``, stop the batch on + the first per-record failure. When ``False`` (default), all + records are attempted and the response lists both + ``success_records`` and ``failure_records``. + + Returns: + EntityRecordsBatchResponse: Response containing successful and failed record operations. + - success_records: List of successfully inserted :class:`EntityRecord` objects + - failure_records: List of :class:`FailureRecord` describing per-record errors + + Examples: + Insert records without schema:: + + class Customer: + def __init__(self, name, email, age): + self.name = name + self.email = email self.age = age customers = [ @@ -766,6 +1152,15 @@ def __init__(self, name, email, age): print(f"Inserted: {len(response.success_records)}") print(f"Failed: {len(response.failure_records)}") + Insert with FK expansion and fail-fast:: + + response = entities_service.insert_records( + "Orders", + [{"product_id": "p-1", "qty": 3}, {"product_id": "p-2", "qty": 1}], + expansion_level=1, # inline the related Product on each response record + fail_on_first=True, # abort the batch at the first error + ) + Insert with schema validation:: class CustomerSchema: @@ -791,10 +1186,13 @@ def __init__(self, name, email, age): for record in response.success_records: print(f"Inserted: {record.name} (ID: {record.id})") """ - spec = self._insert_batch_spec(entity_key, records) - response = self.request(spec.method, spec.endpoint, json=spec.json) - - return self.validate_entity_batch(response, schema) + return self._data.insert_records( + entity_key, + records, + schema=schema, + expansion_level=expansion_level, + fail_on_first=fail_on_first, + ) @traced(name="entity_record_insert_batch", run_type="uipath") async def insert_records_async( @@ -802,20 +1200,29 @@ async def insert_records_async( entity_key: str, records: List[Any], schema: Optional[Type[Any]] = None, + expansion_level: Optional[int] = None, + fail_on_first: Optional[bool] = None, ) -> EntityRecordsBatchResponse: """Asynchronously insert multiple records into an entity in a single batch operation. Args: entity_key (str): The unique key/identifier of the entity. - records (List[Any]): List of records to insert. Each record should be an object - with attributes matching the entity's field names. + records (List[Any]): List of records to insert. Each record may be + a dict, a Pydantic model, an :class:`EntityRecord`, or any + object exposing ``__dict__``. schema (Optional[Type[Any]]): Optional schema class for validation. When provided, validates that each record in the response matches the schema structure. + expansion_level (Optional[int]): Depth of foreign-key expansion in + the response (``0`` means no expansion). + fail_on_first (Optional[bool]): When ``True``, stop the batch on + the first per-record failure. When ``False`` (default), all + records are attempted and the response lists both + ``success_records`` and ``failure_records``. Returns: EntityRecordsBatchResponse: Response containing successful and failed record operations. - - success_records: List of successfully inserted EntityRecord objects - - failure_records: List of EntityRecord objects that failed to insert + - success_records: List of successfully inserted :class:`EntityRecord` objects + - failure_records: List of :class:`FailureRecord` describing per-record errors Examples: Insert records without schema:: @@ -864,10 +1271,13 @@ def __init__(self, name, email, age): for record in response.success_records: print(f"Inserted: {record.name} (ID: {record.id})") """ - spec = self._insert_batch_spec(entity_key, records) - response = await self.request_async(spec.method, spec.endpoint, json=spec.json) - - return self.validate_entity_batch(response, schema) + return await self._data.insert_records_async( + entity_key, + records, + schema=schema, + expansion_level=expansion_level, + fail_on_first=fail_on_first, + ) @traced(name="entity_record_update_batch", run_type="uipath") def update_records( @@ -875,20 +1285,30 @@ def update_records( entity_key: str, records: List[Any], schema: Optional[Type[Any]] = None, + expansion_level: Optional[int] = None, + fail_on_first: Optional[bool] = None, ) -> EntityRecordsBatchResponse: """Update multiple records in an entity in a single batch operation. Args: entity_key (str): The unique key/identifier of the entity. - records (List[Any]): List of records to update. Each record must have an 'Id' field - and should be a Pydantic model with `model_dump()` method or similar object. + records (List[Any]): List of records to update. Each record must + include its ``Id`` field. A record may be a dict, a Pydantic + model, an :class:`EntityRecord`, or any object exposing + ``__dict__``. schema (Optional[Type[Any]]): Optional schema class for validation. When provided, validates that each record in the request and response matches the schema structure. + expansion_level (Optional[int]): Depth of foreign-key expansion in + the response (``0`` means no expansion). + fail_on_first (Optional[bool]): When ``True``, stop the batch on + the first per-record failure. When ``False`` (default), all + records are attempted and the response lists both + ``success_records`` and ``failure_records``. Returns: EntityRecordsBatchResponse: Response containing successful and failed record operations. - - success_records: List of successfully updated EntityRecord objects - - failure_records: List of EntityRecord objects that failed to update + - success_records: List of successfully updated :class:`EntityRecord` objects + - failure_records: List of :class:`FailureRecord` describing per-record errors Examples: Update records:: @@ -937,15 +1357,13 @@ class CustomerSchema: for record in response.success_records: print(f"Updated: {record.name}") """ - valid_records = [ - EntityRecord.from_data(data=record.model_dump(by_alias=True), model=schema) - for record in records - ] - - spec = self._update_batch_spec(entity_key, valid_records) - response = self.request(spec.method, spec.endpoint, json=spec.json) - - return self.validate_entity_batch(response, schema) + return self._data.update_records( + entity_key, + records, + schema=schema, + expansion_level=expansion_level, + fail_on_first=fail_on_first, + ) @traced(name="entity_record_update_batch", run_type="uipath") async def update_records_async( @@ -953,20 +1371,30 @@ async def update_records_async( entity_key: str, records: List[Any], schema: Optional[Type[Any]] = None, + expansion_level: Optional[int] = None, + fail_on_first: Optional[bool] = None, ) -> EntityRecordsBatchResponse: """Asynchronously update multiple records in an entity in a single batch operation. Args: entity_key (str): The unique key/identifier of the entity. - records (List[Any]): List of records to update. Each record must have an 'Id' field - and should be a Pydantic model with `model_dump()` method or similar object. + records (List[Any]): List of records to update. Each record must + include its ``Id`` field. A record may be a dict, a Pydantic + model, an :class:`EntityRecord`, or any object exposing + ``__dict__``. schema (Optional[Type[Any]]): Optional schema class for validation. When provided, validates that each record in the request and response matches the schema structure. + expansion_level (Optional[int]): Depth of foreign-key expansion in + the response (``0`` means no expansion). + fail_on_first (Optional[bool]): When ``True``, stop the batch on + the first per-record failure. When ``False`` (default), all + records are attempted and the response lists both + ``success_records`` and ``failure_records``. Returns: EntityRecordsBatchResponse: Response containing successful and failed record operations. - - success_records: List of successfully updated EntityRecord objects - - failure_records: List of EntityRecord objects that failed to update + - success_records: List of successfully updated :class:`EntityRecord` objects + - failure_records: List of :class:`FailureRecord` describing per-record errors Examples: Update records:: @@ -1015,32 +1443,35 @@ class CustomerSchema: for record in response.success_records: print(f"Updated: {record.name}") """ - valid_records = [ - EntityRecord.from_data(data=record.model_dump(by_alias=True), model=schema) - for record in records - ] - - spec = self._update_batch_spec(entity_key, valid_records) - response = await self.request_async(spec.method, spec.endpoint, json=spec.json) - - return self.validate_entity_batch(response, schema) + return await self._data.update_records_async( + entity_key, + records, + schema=schema, + expansion_level=expansion_level, + fail_on_first=fail_on_first, + ) @traced(name="entity_record_delete_batch", run_type="uipath") def delete_records( self, entity_key: str, record_ids: List[str], + fail_on_first: Optional[bool] = None, ) -> EntityRecordsBatchResponse: """Delete multiple records from an entity in a single batch operation. Args: entity_key (str): The unique key/identifier of the entity. record_ids (List[str]): List of record IDs (GUIDs) to delete. + fail_on_first (Optional[bool]): When ``True``, stop the batch on + the first per-record failure. When ``False`` (default), all + records are attempted and the response lists both + ``success_records`` and ``failure_records``. Returns: EntityRecordsBatchResponse: Response containing successful and failed record operations. - - success_records: List of successfully deleted EntityRecord objects - - failure_records: List of EntityRecord objects that failed to delete + - success_records: List of successfully deleted :class:`EntityRecord` objects + - failure_records: List of :class:`FailureRecord` describing per-record errors Examples: Delete specific records by ID:: @@ -1077,31 +1508,31 @@ def delete_records( ) print(f"Deleted {len(response.success_records)} inactive records") """ - spec = self._delete_batch_spec(entity_key, record_ids) - response = self.request(spec.method, spec.endpoint, json=spec.json) - - delete_records_response = EntityRecordsBatchResponse.model_validate( - response.json() + return self._data.delete_records( + entity_key, record_ids, fail_on_first=fail_on_first ) - return delete_records_response - @traced(name="entity_record_delete_batch", run_type="uipath") async def delete_records_async( self, entity_key: str, record_ids: List[str], + fail_on_first: Optional[bool] = None, ) -> EntityRecordsBatchResponse: """Asynchronously delete multiple records from an entity in a single batch operation. Args: entity_key (str): The unique key/identifier of the entity. record_ids (List[str]): List of record IDs (GUIDs) to delete. + fail_on_first (Optional[bool]): When ``True``, stop the batch on + the first per-record failure. When ``False`` (default), all + records are attempted and the response lists both + ``success_records`` and ``failure_records``. Returns: EntityRecordsBatchResponse: Response containing successful and failed record operations. - - success_records: List of successfully deleted EntityRecord objects - - failure_records: List of EntityRecord objects that failed to delete + - success_records: List of successfully deleted :class:`EntityRecord` objects + - failure_records: List of :class:`FailureRecord` describing per-record errors Examples: Delete specific records by ID:: @@ -1138,351 +1569,655 @@ async def delete_records_async( ) print(f"Deleted {len(response.success_records)} inactive records") """ - spec = self._delete_batch_spec(entity_key, record_ids) - response = await self.request_async(spec.method, spec.endpoint, json=spec.json) - - delete_records_response = EntityRecordsBatchResponse.model_validate( - response.json() + return await self._data.delete_records_async( + entity_key, record_ids, fail_on_first=fail_on_first ) - return delete_records_response - - def validate_entity_batch( + @traced(name="entity_query", run_type="uipath") + def query( self, - batch_response: Response, - schema: Optional[Type[Any]] = None, - ) -> EntityRecordsBatchResponse: - # Validate the response format - insert_records_response = EntityRecordsBatchResponse.model_validate( - batch_response.json() - ) + entity_key: str, + filter_group: Optional[EntityQueryFilterGroup] = None, + sort_options: Optional[List[EntityQuerySortOption]] = None, + selected_fields: Optional[List[str]] = None, + expansions: Optional[List[Any]] = None, + expansion_level: Optional[int] = None, + aggregates: Optional[List[EntityAggregate]] = None, + group_by: Optional[List[str]] = None, + joins: Optional[List[EntityJoin]] = None, + binnings: Optional[List[EntityBinning]] = None, + start: Optional[int] = None, + limit: Optional[int] = None, + ) -> EntityQueryRecordsResponse: + """Query records with structured filters, sorting, expansion, joins, and aggregates. - # Validate individual records - validated_successful_records = [ - EntityRecord.from_data( - data=successful_record.model_dump(by_alias=True), model=schema - ) - for successful_record in insert_records_response.success_records - ] + Routes to the V2 endpoint when ``binnings`` is provided (numeric/date + binning is gated by the ``enable-binning-on-query`` feature flag on + the backend). - validated_failed_records = [ - EntityRecord.from_data( - data=failed_record.model_dump(by_alias=True), model=schema - ) - for failed_record in insert_records_response.failure_records - ] + Args: + entity_key (str): The unique key/identifier of the entity. + filter_group (Optional[EntityQueryFilterGroup]): Nested filter + conditions combined with AND/OR. + sort_options (Optional[List[EntityQuerySortOption]]): Sort fields + and direction. + selected_fields (Optional[List[str]]): Column projection — field + names to include; omit to return all fields. + expansions (Optional[List[Any]]): Foreign-key relationships to + expand inline on each result record. + expansion_level (Optional[int]): Depth of expansion (sent as a + URL query param). + aggregates (Optional[List[EntityAggregate]]): Aggregate + expressions (``COUNT`` / ``SUM`` / ``AVG`` / ``MIN`` / + ``MAX``). Maximum 5 per query. + group_by (Optional[List[str]]): Fields to group aggregate results + by. Maximum 5; required when both ``aggregates`` and + ``selected_fields`` are supplied. + joins (Optional[List[EntityJoin]]): Cross-entity joins. Maximum + 3, all of the same type. + binnings (Optional[List[EntityBinning]]): Bucket numeric or date + group-by fields. Each entry's field must also appear in + ``group_by``. + start (Optional[int]): Records to skip (pagination offset). + limit (Optional[int]): Maximum number of records to return. - return EntityRecordsBatchResponse( - success_records=validated_successful_records, - failure_records=validated_failed_records, - ) + Returns: + EntityQueryRecordsResponse: A response with ``items``, + ``total_count``, ``has_next_page``, and ``next_cursor``. + ``next_cursor`` is populated only when the backend returns + one; otherwise paginate by passing the next ``start``. - def _retrieve_spec( - self, - entity_key: str, - ) -> RequestSpec: - return RequestSpec( - method="GET", - endpoint=Endpoint(f"datafabric_/api/Entity/{entity_key}"), - ) + Examples: + Filter + sort + projection:: + + from uipath.platform.entities import ( + EntityQueryFilter, + EntityQueryFilterGroup, + EntityQuerySortOption, + LogicalOperator, + QueryFilterOperator, + ) - def _retrieve_by_name_spec( - self, - entity_name: str, - ) -> RequestSpec: - return RequestSpec( - method="GET", - endpoint=Endpoint(f"datafabric_/api/Entity/{entity_name}/metadata"), - ) + result = entities_service.query( + "Customers", + filter_group=EntityQueryFilterGroup( + logical_operator=LogicalOperator.And, + query_filters=[ + EntityQueryFilter( + field_name="status", + operator=QueryFilterOperator.Equals, + value="active", + ) + ], + ), + sort_options=[ + EntityQuerySortOption(field_name="created_at", is_descending=True) + ], + selected_fields=["Id", "name", "email"], + start=0, + limit=50, + ) + print(f"Found {result.total_count} customers") + + Aggregates and group-by (counts per status):: - @staticmethod - def _folder_key_headers(folder_key: Optional[str]) -> dict[str, str]: - if folder_key: - return {HEADER_FOLDER_KEY: folder_key} - return {} + from uipath.platform.entities import ( + EntityAggregate, + EntityAggregateFunction, + ) - def _list_entities_spec(self) -> RequestSpec: - return RequestSpec( - method="GET", - endpoint=Endpoint("datafabric_/api/Entity"), + result = entities_service.query( + "Customers", + selected_fields=["status"], + group_by=["status"], + aggregates=[ + EntityAggregate( + function=EntityAggregateFunction.Count, + field="Id", + alias="total", + ) + ], + ) + for row in result.items: + print(row.status, row.total) + """ + return self._data.query( + entity_key, + filter_group=filter_group, + sort_options=sort_options, + selected_fields=selected_fields, + expansions=expansions, + expansion_level=expansion_level, + aggregates=aggregates, + group_by=group_by, + joins=joins, + binnings=binnings, + start=start, + limit=limit, ) - def _list_records_spec( + @traced(name="entity_query", run_type="uipath") + async def query_async( self, entity_key: str, + filter_group: Optional[EntityQueryFilterGroup] = None, + sort_options: Optional[List[EntityQuerySortOption]] = None, + selected_fields: Optional[List[str]] = None, + expansions: Optional[List[Any]] = None, + expansion_level: Optional[int] = None, + aggregates: Optional[List[EntityAggregate]] = None, + group_by: Optional[List[str]] = None, + joins: Optional[List[EntityJoin]] = None, + binnings: Optional[List[EntityBinning]] = None, start: Optional[int] = None, limit: Optional[int] = None, - ) -> RequestSpec: - params: dict[str, Any] = {} - if start is not None: - params["start"] = start - if limit is not None: - params["limit"] = limit - return RequestSpec( - method="GET", - endpoint=Endpoint( - f"datafabric_/api/EntityService/entity/{entity_key}/read" - ), - params=params, + ) -> EntityQueryRecordsResponse: + """Asynchronously query records with structured filters, sorting, expansion, joins, and aggregates. + + Routes to the V2 endpoint when ``binnings`` is provided (numeric/date + binning is gated by the ``enable-binning-on-query`` feature flag on + the backend). + + Args: + entity_key (str): The unique key/identifier of the entity. + filter_group (Optional[EntityQueryFilterGroup]): Nested filter + conditions combined with AND/OR. + sort_options (Optional[List[EntityQuerySortOption]]): Sort fields + and direction. + selected_fields (Optional[List[str]]): Column projection — field + names to include; omit to return all fields. + expansions (Optional[List[Any]]): Foreign-key relationships to + expand inline on each result record. + expansion_level (Optional[int]): Depth of expansion. + aggregates (Optional[List[EntityAggregate]]): Aggregate + expressions. Maximum 5 per query. + group_by (Optional[List[str]]): Fields to group aggregate results + by. Maximum 5; required when both ``aggregates`` and + ``selected_fields`` are supplied. + joins (Optional[List[EntityJoin]]): Cross-entity joins. Maximum + 3, all of the same type. + binnings (Optional[List[EntityBinning]]): Bucket numeric or date + group-by fields. + start (Optional[int]): Records to skip (pagination offset). + limit (Optional[int]): Maximum number of records to return. + + Returns: + EntityQueryRecordsResponse: A response with ``items``, + ``total_count``, ``has_next_page``, and ``next_cursor``. + + Examples: + Filter + sort + pagination:: + + from uipath.platform.entities import ( + EntityQueryFilter, + EntityQueryFilterGroup, + QueryFilterOperator, + ) + + result = await entities_service.query_async( + "Customers", + filter_group=EntityQueryFilterGroup( + query_filters=[ + EntityQueryFilter( + field_name="status", + operator=QueryFilterOperator.Equals, + value="active", + ) + ], + ), + start=0, + limit=25, + ) + print(f"{len(result.items)} of {result.total_count} customers") + """ + return await self._data.query_async( + entity_key, + filter_group=filter_group, + sort_options=sort_options, + selected_fields=selected_fields, + expansions=expansions, + expansion_level=expansion_level, + aggregates=aggregates, + group_by=group_by, + joins=joins, + binnings=binnings, + start=start, + limit=limit, ) - def _query_entity_records_spec( + @traced(name="entity_query_records", run_type="uipath") + def query_entity_records(self, sql_query: str) -> List[Dict[str, Any]]: + """Query entity records using a validated SQL query. + + PREVIEW: This method is in preview and may change in future releases. + + Args: + sql_query (str): A SQL SELECT query to execute against Data Service entities. + Only SELECT statements are allowed. Queries without WHERE must include + a LIMIT clause. Subqueries and multi-statement queries are not permitted. + + Notes: + A routing context is always derived from the configured ``folders_map`` + when present and included in the request body. + + Returns: + List[Dict[str, Any]]: A list of result records as dictionaries. + + Raises: + ValueError: If the SQL query fails validation (e.g., non-SELECT, missing + WHERE/LIMIT, forbidden keywords, subqueries). + """ + return self._data.query_entity_records(sql_query) + + @traced(name="entity_query_records", run_type="uipath") + async def query_entity_records_async(self, sql_query: str) -> List[Dict[str, Any]]: + """Asynchronously query entity records using a validated SQL query. + + PREVIEW: This method is in preview and may change in future releases. + + Args: + sql_query (str): A SQL SELECT query to execute against Data Service entities. + Only SELECT statements are allowed. Queries without WHERE must include + a LIMIT clause. Subqueries and multi-statement queries are not permitted. + + Notes: + A routing context is always derived from the configured ``folders_map`` + when present and included in the request body. + + Returns: + List[Dict[str, Any]]: A list of result records as dictionaries. + + Raises: + ValueError: If the SQL query fails validation (e.g., non-SELECT, missing + WHERE/LIMIT, forbidden keywords, subqueries). + """ + return await self._data.query_entity_records_async(sql_query) + + @traced(name="entity_upload_attachment", run_type="uipath") + def upload_attachment( self, - sql_query: str, - routing_context: Optional[QueryRoutingOverrideContext] = None, - ) -> RequestSpec: - body: Dict[str, Any] = {"query": sql_query} - if routing_context: - body["routingContext"] = routing_context.model_dump( - by_alias=True, exclude_none=True - ) - return RequestSpec( - method="POST", - endpoint=Endpoint("datafabric_/api/v1/query/execute"), - json=body, - ) + entity_id: str, + record_id: str, + field_name: str, + file: Optional[FileContent] = None, + file_path: Optional[str] = None, + expansion_level: Optional[int] = None, + ) -> Dict[str, Any]: + """Upload a file attachment to a File-type field on a record. + + Provide exactly one of ``file`` (raw bytes) or ``file_path`` (path on + disk). - def _insert_batch_spec(self, entity_key: str, records: List[Any]) -> RequestSpec: - return RequestSpec( - method="POST", - endpoint=Endpoint( - f"datafabric_/api/EntityService/entity/{entity_key}/insert-batch" - ), - json=[record.__dict__ for record in records], - ) + Args: + entity_id (str): The unique identifier of the entity. + record_id (str): The unique identifier of the record whose + attachment field is being set. + field_name (str): Name of the File-type field on the entity. + file (Optional[FileContent]): Raw bytes (``bytes`` / + ``bytearray`` / ``memoryview``) of the file to upload. + Mutually exclusive with ``file_path``. + file_path (Optional[str]): Path to a local file to upload. + Mutually exclusive with ``file``. + expansion_level (Optional[int]): Optional FK expansion depth in + the response (``0`` means no expansion). - def _update_batch_spec( - self, entity_key: str, records: List[EntityRecord] - ) -> RequestSpec: - return RequestSpec( - method="POST", - endpoint=Endpoint( - f"datafabric_/api/EntityService/entity/{entity_key}/update-batch" - ), - json=[record.model_dump(by_alias=True) for record in records], + Returns: + Dict[str, Any]: The decoded JSON response (typically the updated + record), or an empty dict when the response has no body. + + Examples: + Upload from raw bytes:: + + with open("contract.pdf", "rb") as f: + data = f.read() + entities_service.upload_attachment( + "Customers", "rec-1", "Contract", file=data + ) + + Upload from a path on disk:: + + entities_service.upload_attachment( + "Customers", "rec-1", "Contract", file_path="./contract.pdf" + ) + """ + return self._data.upload_attachment( + entity_id, + record_id, + field_name, + file=file, + file_path=file_path, + expansion_level=expansion_level, ) - def _delete_batch_spec(self, entity_key: str, record_ids: List[str]) -> RequestSpec: - return RequestSpec( - method="POST", - endpoint=Endpoint( - f"datafabric_/api/EntityService/entity/{entity_key}/delete-batch" - ), - json=record_ids, + @traced(name="entity_upload_attachment", run_type="uipath") + async def upload_attachment_async( + self, + entity_id: str, + record_id: str, + field_name: str, + file: Optional[FileContent] = None, + file_path: Optional[str] = None, + expansion_level: Optional[int] = None, + ) -> Dict[str, Any]: + """Asynchronously upload a file attachment to a File-type field on a record. + + Provide exactly one of ``file`` (raw bytes) or ``file_path`` (path on + disk). + + Args: + entity_id (str): The unique identifier of the entity. + record_id (str): The unique identifier of the record whose + attachment field is being set. + field_name (str): Name of the File-type field on the entity. + file (Optional[FileContent]): Raw bytes of the file to upload. + Mutually exclusive with ``file_path``. + file_path (Optional[str]): Path to a local file to upload. + Mutually exclusive with ``file``. + expansion_level (Optional[int]): Optional FK expansion depth in + the response. + + Returns: + Dict[str, Any]: The decoded JSON response. + + Examples: + Upload from a path on disk:: + + await entities_service.upload_attachment_async( + "Customers", "rec-1", "Contract", file_path="./contract.pdf" + ) + """ + return await self._data.upload_attachment_async( + entity_id, + record_id, + field_name, + file=file, + file_path=file_path, + expansion_level=expansion_level, ) - def _list_choicesets_spec(self) -> RequestSpec: - return RequestSpec( - method="GET", - endpoint=Endpoint("datafabric_/api/Entity/choiceset"), + @traced(name="entity_download_attachment", run_type="uipath") + def download_attachment( + self, entity_id: str, record_id: str, field_name: str + ) -> bytes: + """Download a file attached to a record and return its raw bytes. + + Args: + entity_id (str): The unique identifier of the entity. + record_id (str): The unique identifier of the record containing + the attachment. + field_name (str): Name of the File-type field on the entity. + + Returns: + bytes: The raw file content. + + Examples: + Save the downloaded bytes to disk:: + + content = entities_service.download_attachment( + "Customers", "rec-1", "Contract" + ) + with open("downloaded.pdf", "wb") as f: + f.write(content) + """ + return self._data.download_attachment(entity_id, record_id, field_name) + + @traced(name="entity_download_attachment", run_type="uipath") + async def download_attachment_async( + self, entity_id: str, record_id: str, field_name: str + ) -> bytes: + """Asynchronously download a file attached to a record. + + Args: + entity_id (str): The unique identifier of the entity. + record_id (str): The unique identifier of the record containing + the attachment. + field_name (str): Name of the File-type field on the entity. + + Returns: + bytes: The raw file content. + + Examples: + Save the downloaded bytes to disk:: + + content = await entities_service.download_attachment_async( + "Customers", "rec-1", "Contract" + ) + with open("downloaded.pdf", "wb") as f: + f.write(content) + """ + return await self._data.download_attachment_async( + entity_id, record_id, field_name ) - def _get_choiceset_values_spec( + @traced(name="entity_delete_attachment", run_type="uipath") + def delete_attachment( self, - choiceset_id: str, - start: int | None = None, - limit: int | None = None, - ) -> RequestSpec: - params: dict[str, Any] = {} - if start is not None: - params["start"] = start - if limit is not None: - params["limit"] = limit - return RequestSpec( - method="POST", - endpoint=Endpoint( - f"datafabric_/api/EntityService/entity/{choiceset_id}/query_expansion" - ), - params=params, - json={}, + entity_id: str, + record_id: str, + field_name: str, + expansion_level: Optional[int] = None, + ) -> Dict[str, Any]: + """Remove the file attached to a File-type field on a record. + + Args: + entity_id (str): The unique identifier of the entity. + record_id (str): The unique identifier of the record whose + attachment is being removed. + field_name (str): Name of the File-type field on the entity. + expansion_level (Optional[int]): Optional FK expansion depth in + the response (``0`` means no expansion). + + Returns: + Dict[str, Any]: The decoded JSON response (typically the updated + record), or an empty dict when the response has no body. + + Examples: + Clear an attachment:: + + entities_service.delete_attachment( + "Customers", "rec-1", "Contract" + ) + """ + return self._data.delete_attachment( + entity_id, record_id, field_name, expansion_level=expansion_level ) - def _validate_sql_query(self, sql_query: str) -> None: - query = sql_query.strip().rstrip(";").strip() - if not query: - raise ValueError("SQL query cannot be empty.") + @traced(name="entity_delete_attachment", run_type="uipath") + async def delete_attachment_async( + self, + entity_id: str, + record_id: str, + field_name: str, + expansion_level: Optional[int] = None, + ) -> Dict[str, Any]: + """Asynchronously remove the file attached to a File-type field. - statements = sqlparse.parse(query) - if len(statements) != 1 or not statements[0].tokens: - raise ValueError("Only a single SELECT statement is allowed.") + Args: + entity_id (str): The unique identifier of the entity. + record_id (str): The unique identifier of the record whose + attachment is being removed. + field_name (str): Name of the File-type field on the entity. + expansion_level (Optional[int]): Optional FK expansion depth. - stmt = statements[0] - stmt_type = stmt.get_type() + Returns: + Dict[str, Any]: The decoded JSON response. - if stmt_type != "SELECT": - raise ValueError("Only SELECT statements are allowed.") + Examples: + Clear an attachment:: + + await entities_service.delete_attachment_async( + "Customers", "rec-1", "Contract" + ) + """ + return await self._data.delete_attachment_async( + entity_id, record_id, field_name, expansion_level=expansion_level + ) - keywords = set() - for token in stmt.flatten(): - if token.ttype in Keyword: - keywords.add(token.normalized) + @traced(name="entity_import_records", run_type="uipath") + def import_records( + self, + entity_id: str, + file: Optional[FileContent] = None, + file_path: Optional[str] = None, + ) -> EntityImportRecordsResponse: + """Bulk-import records into an entity from a CSV file. + + Provide exactly one of ``file`` (raw bytes) or ``file_path`` (path on + disk). - for kw in _FORBIDDEN_DML: - if kw in keywords: - raise ValueError(f"SQL keyword '{kw}' is not allowed.") + Args: + entity_id (str): The unique identifier of the entity. + file (Optional[FileContent]): Raw bytes of a CSV file. Mutually + exclusive with ``file_path``. + file_path (Optional[str]): Path to a local CSV file. Mutually + exclusive with ``file``. + + Returns: + EntityImportRecordsResponse: Reports the total rows in the file, + the number successfully inserted, and an optional + ``error_file_link`` pointing to a CSV listing rows that + failed validation. - for kw in _FORBIDDEN_DDL: - if kw in keywords: - raise ValueError(f"SQL keyword '{kw}' is not allowed.") + Examples: + Import from a path on disk:: - for kw in _DISALLOWED_KEYWORDS: - if kw in keywords: - raise ValueError( - f"SQL construct '{kw}' is not allowed in entity queries." + result = entities_service.import_records( + "Customers", file_path="./customers.csv" + ) + print( + f"Inserted {result.inserted_records} of " + f"{result.total_records} rows" ) + if result.error_file_link: + print(f"Errors: {result.error_file_link}") + """ + return self._data.import_records(entity_id, file=file, file_path=file_path) - if self._has_subquery(stmt): - raise ValueError("Subqueries are not allowed.") + @traced(name="entity_import_records", run_type="uipath") + async def import_records_async( + self, + entity_id: str, + file: Optional[FileContent] = None, + file_path: Optional[str] = None, + ) -> EntityImportRecordsResponse: + """Asynchronously bulk-import records into an entity from a CSV file. - has_where = any(isinstance(t, Where) for t in stmt.tokens) - has_limit = "LIMIT" in keywords - has_from = "FROM" in keywords + Provide exactly one of ``file`` (raw bytes) or ``file_path`` (path on + disk). - if not has_from: - raise ValueError("Queries must include a FROM clause.") + Args: + entity_id (str): The unique identifier of the entity. + file (Optional[FileContent]): Raw bytes of a CSV file. + file_path (Optional[str]): Path to a local CSV file. - projection = self._projection_tokens(stmt) + Returns: + EntityImportRecordsResponse: Reports the total, inserted, and + ``error_file_link`` for failed rows. - if self._projection_has_count_star(projection): - raise ValueError( - "COUNT(*) is not supported. Use COUNT(column_name) instead." - ) + Examples: + Import from a path on disk:: + + result = await entities_service.import_records_async( + "Customers", file_path="./customers.csv" + ) + print( + f"Inserted {result.inserted_records} of " + f"{result.total_records} rows" + ) + """ + return await self._data.import_records_async( + entity_id, file=file, file_path=file_path + ) + + # ------------------------------------------------------------------ + # Public helper retained for backward compatibility — tests call this + # ------------------------------------------------------------------ + + def validate_entity_batch( + self, + batch_response: Response, + schema: Optional[Type[Any]] = None, + ) -> EntityRecordsBatchResponse: + """Parse a batch response, optionally validating success records against ``schema``. + + Failure records are returned as :class:`FailureRecord` instances and + are not validated against the user schema. + """ + return self._data.validate_entity_batch(batch_response, schema=schema) - has_aggregate = self._projection_has_aggregate(projection) + # ------------------------------------------------------------------ + # Cross-cutting — entity-set resolution for agent overrides + # ------------------------------------------------------------------ + + @traced(name="resolve_entity_set", run_type="uipath") + def resolve_entity_set( + self, + items: List[DataFabricEntityItem], + ) -> EntitySetResolution: + """Resolve an agent entity set, applying resource overwrites.""" + plan = create_resolution_plan( + items, + _resource_overwrites.get() or {}, + lambda folder_path: ( + self._folders_service.retrieve_key(folder_path=folder_path) + if self._folders_service is not None + else None + ), + ) + entities = fetch_resolved_entities( + plan, + self.retrieve, + self.retrieve_by_name, + logger, + ) + resolution_service: EntitiesService = build_resolution_service( # type: ignore[assignment] + config=self._config, + execution_context=self._execution_context, + folders_service=self._folders_service, + plan=plan, + service_factory=EntitiesService, + ) + return EntitySetResolution( + entities=entities, + entities_service=resolution_service, + ) - if not has_where and not has_limit and not has_aggregate: - raise ValueError("Queries without WHERE must include a LIMIT clause.") + @traced(name="resolve_entity_set", run_type="uipath") + async def resolve_entity_set_async( + self, + items: List[DataFabricEntityItem], + ) -> EntitySetResolution: + """Resolve an agent entity set, applying resource overwrites.""" - has_bare_wildcard = self._projection_has_bare_wildcard(projection) - if has_bare_wildcard: - raise ValueError("SELECT * is not allowed. Specify column names instead.") - if not has_where and self._projection_column_count(projection) > 4: - raise ValueError( - "Selecting more than 4 columns without filtering is not allowed." + async def _resolve_folder_path(folder_path: str) -> Optional[str]: + if self._folders_service is None: + return None + return await self._folders_service.retrieve_key_async( + folder_path=folder_path ) - @staticmethod - def _projection_has_aggregate( - projection: list[sqlparse.sql.Token], - ) -> bool: - """Check whether the projection contains an aggregate function call.""" - - def _has_agg(token: sqlparse.sql.Token) -> bool: - if isinstance(token, Function): - return token.get_name().upper() in _AGGREGATE_FUNCTIONS - if isinstance(token, Identifier): - return any(_has_agg(child) for child in token.tokens) - return False - - for node in projection: - if _has_agg(node): - return True - if isinstance(node, IdentifierList): - if any(_has_agg(child) for child in node.tokens): - return True - return False - - @staticmethod - def _projection_has_count_star( - projection: list[sqlparse.sql.Token], - ) -> bool: - """Check whether projection contains COUNT(*).""" - - def _is_count_star(func: Function) -> bool: - if func.get_name().upper() != "COUNT": - return False - return any(t.ttype is Wildcard for t in func.flatten()) - - def _has_count_star(token: sqlparse.sql.Token) -> bool: - if isinstance(token, Function): - return _is_count_star(token) - if isinstance(token, Identifier): - return any(_has_count_star(child) for child in token.tokens) - return False - - for node in projection: - if _has_count_star(node): - return True - if isinstance(node, IdentifierList): - if any(_has_count_star(child) for child in node.tokens): - return True - return False - - @staticmethod - def _projection_has_bare_wildcard( - projection: list[sqlparse.sql.Token], - ) -> bool: - """Check for a bare ``*`` or qualified ``table.*`` outside a function.""" - - def _identifier_has_wildcard(ident: Identifier) -> bool: - return any(t.ttype is Wildcard for t in ident.tokens) - - for node in projection: - if node.ttype is Wildcard: - return True - if isinstance(node, Identifier) and _identifier_has_wildcard(node): - return True - if isinstance(node, IdentifierList): - for child in node.tokens: - if child.ttype is Wildcard: - return True - if isinstance(child, Identifier) and _identifier_has_wildcard( - child - ): - return True - return False - - @staticmethod - def _has_subquery(stmt: sqlparse.sql.Statement) -> bool: - """Recursively walk the AST looking for SELECT inside parentheses.""" - - def _walk(token: sqlparse.sql.Token) -> bool: - if isinstance(token, Parenthesis): - for child in token.flatten(): - if child.ttype is DML and child.normalized == "SELECT": - return True - if hasattr(token, "tokens"): - for child in token.tokens: - if _walk(child): - return True - return False - - for token in stmt.tokens: - if _walk(token): - return True - return False - - @staticmethod - def _projection_tokens( - stmt: sqlparse.sql.Statement, - ) -> list[sqlparse.sql.Token]: - """Extract non-flattened AST nodes between the first SELECT and FROM.""" - tokens: list[sqlparse.sql.Token] = [] - collecting = False - for token in stmt.tokens: - if token.ttype is DML and token.normalized == "SELECT": - collecting = True - continue - if token.ttype is Keyword and token.normalized in ("FROM", "INTO"): - break - if token.ttype is Keyword and token.normalized == "DISTINCT": - continue - if collecting and token.ttype is not Whitespace: - tokens.append(token) - return tokens - - @staticmethod - def _projection_column_count( - projection: list[sqlparse.sql.Token], - ) -> int: - for node in projection: - if isinstance(node, IdentifierList): - return len(list(node.get_identifiers())) - if isinstance(node, (Identifier, Function)): - return 1 - if node.ttype is Wildcard: - return 1 - return 0 + plan = await create_resolution_plan_async( + items, + _resource_overwrites.get() or {}, + _resolve_folder_path, + ) + entities = await fetch_resolved_entities_async( + plan, + self.retrieve_async, + self.retrieve_by_name_async, + logger, + ) + resolution_service: EntitiesService = build_resolution_service( # type: ignore[assignment] + config=self._config, + execution_context=self._execution_context, + folders_service=self._folders_service, + plan=plan, + service_factory=EntitiesService, + ) + return EntitySetResolution( + entities=entities, + entities_service=resolution_service, + ) # Resolve the forward reference to EntitiesService in EntitySetResolution. diff --git a/packages/uipath-platform/src/uipath/platform/entities/_entity_data_service.py b/packages/uipath-platform/src/uipath/platform/entities/_entity_data_service.py new file mode 100644 index 000000000..69fbe38fa --- /dev/null +++ b/packages/uipath-platform/src/uipath/platform/entities/_entity_data_service.py @@ -0,0 +1,1401 @@ +"""Data-side operations for the Data Fabric entities surface. + +Handles record CRUD (single and batch), structured queries, attachments, +choice-set value lookup, bulk import, and the legacy federated SQL query +escape hatch. Schema definitions are managed by :class:`EntitySchemaService` +and exposed alongside data operations through :class:`EntitiesService`. +""" + +import json as json_module +import logging +from contextlib import nullcontext +from pathlib import Path +from typing import Any, Dict, List, Optional, Type, Union + +import sqlparse +from httpx import HTTPStatusError, Response +from pydantic import BaseModel +from sqlparse.sql import Function, Identifier, IdentifierList, Parenthesis, Where +from sqlparse.tokens import DML, Keyword, Whitespace, Wildcard + +from ..common._base_service import BaseService +from ..common._config import UiPathApiConfig +from ..common._execution_context import UiPathExecutionContext +from ..common._models import Endpoint, RequestSpec +from ..errors._enriched_exception import EnrichedException +from ..orchestrator._folder_service import FolderService +from ._entity_resolution import RoutingStrategy, create_routing_strategy +from .entities import ( + ChoiceSetValue, + EntityAggregate, + EntityBinning, + EntityImportRecordsResponse, + EntityJoin, + EntityQueryFilterGroup, + EntityQueryRecordsResponse, + EntityQuerySortOption, + EntityRecord, + EntityRecordsBatchResponse, + EntityRecordsListResponse, + QueryRoutingOverrideContext, +) + +logger = logging.getLogger(__name__) + +FileContent = Union[bytes, bytearray, memoryview] +"""Acceptable raw bytes types for attachment and CSV uploads.""" + +_FORBIDDEN_DML = {"INSERT", "UPDATE", "DELETE", "MERGE", "REPLACE"} +_FORBIDDEN_DDL = {"DROP", "ALTER", "CREATE", "TRUNCATE"} +_DISALLOWED_KEYWORDS = [ + "WITH", + "UNION", + "INTERSECT", + "EXCEPT", + "OVER", + "ROLLUP", + "CUBE", + "GROUPING", + "PARTITION", +] +_AGGREGATE_FUNCTIONS = ("COUNT", "SUM", "AVG", "MIN", "MAX") + + +class EntityDataService(BaseService): + """HTTP service for entity-record and attachment operations. + + Backend target: ``datafabric_/api/EntityService/...`` plus + ``datafabric_/api/Attachment/...`` for file attachments, and + ``datafabric_/api/v1/query/execute`` for legacy SQL queries. + + !!! warning "Preview Feature" + This service is currently experimental. Behavior and parameters are + subject to change in future versions. + """ + + def __init__( + self, + config: UiPathApiConfig, + execution_context: UiPathExecutionContext, + folders_service: Optional[FolderService] = None, + routing_strategy: Optional[RoutingStrategy] = None, + folders_map: Optional[Dict[str, str]] = None, + entity_name_overrides: Optional[Dict[str, str]] = None, + routing_context: Optional[QueryRoutingOverrideContext] = None, + ) -> None: + """Initialise the data service. + + Either pass a pre-built ``routing_strategy`` (the facade does this so + both services share one) or supply the inputs and let this service + construct its own. + """ + super().__init__(config=config, execution_context=execution_context) + self._folders_service = folders_service + self._routing_strategy: RoutingStrategy = ( + routing_strategy + if routing_strategy is not None + else create_routing_strategy( + folders_map=folders_map, + effective_entity_names=entity_name_overrides, + routing_context=routing_context, + folders_service=folders_service, + ) + ) + + # ------------------------------------------------------------------ + # Choice-set value lookup + # ------------------------------------------------------------------ + + def get_choiceset_values( + self, + choiceset_id: str, + start: Optional[int] = None, + limit: Optional[int] = None, + ) -> List[ChoiceSetValue]: + """Internal implementation; see :meth:`EntitiesService.get_choiceset_values`.""" + spec = self._get_choiceset_values_spec(choiceset_id, start=start, limit=limit) + response = self.request( + spec.method, spec.endpoint, params=spec.params, json=spec.json + ) + return self._parse_choiceset_values(response) + + async def get_choiceset_values_async( + self, + choiceset_id: str, + start: Optional[int] = None, + limit: Optional[int] = None, + ) -> List[ChoiceSetValue]: + """Async variant of :meth:`get_choiceset_values`.""" + spec = self._get_choiceset_values_spec(choiceset_id, start=start, limit=limit) + response = await self.request_async( + spec.method, spec.endpoint, params=spec.params, json=spec.json + ) + return self._parse_choiceset_values(response) + + # ------------------------------------------------------------------ + # List records (multi-record read with OData filters) + # ------------------------------------------------------------------ + + def list_records( + self, + entity_key: str, + schema: Optional[Type[Any]] = None, + start: Optional[int] = None, + limit: Optional[int] = None, + expansion_level: Optional[int] = None, + filter: Optional[str] = None, + orderby: Optional[str] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + ) -> EntityRecordsListResponse: + """Internal implementation; see :meth:`EntitiesService.list_records`.""" + spec = self._list_records_spec( + entity_key, + start=start, + limit=limit, + expansion_level=expansion_level, + filter=filter, + orderby=orderby, + select=select, + expand=expand, + ) + response = self.request(spec.method, spec.endpoint, params=spec.params) + return self._build_records_list_response(response, schema, start, limit) + + async def list_records_async( + self, + entity_key: str, + schema: Optional[Type[Any]] = None, + start: Optional[int] = None, + limit: Optional[int] = None, + expansion_level: Optional[int] = None, + filter: Optional[str] = None, + orderby: Optional[str] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + ) -> EntityRecordsListResponse: + """Async variant of :meth:`list_records`.""" + spec = self._list_records_spec( + entity_key, + start=start, + limit=limit, + expansion_level=expansion_level, + filter=filter, + orderby=orderby, + select=select, + expand=expand, + ) + response = await self.request_async( + spec.method, spec.endpoint, params=spec.params + ) + return self._build_records_list_response(response, schema, start, limit) + + # ------------------------------------------------------------------ + # Single-record operations (fire trigger events; batch versions don't) + # ------------------------------------------------------------------ + + def insert_record( + self, + entity_key: str, + data: Any, + expansion_level: Optional[int] = None, + ) -> EntityRecord: + """Internal implementation; see :meth:`EntitiesService.insert_record`.""" + spec = self._insert_record_spec(entity_key, data, expansion_level) + response = self.request( + spec.method, spec.endpoint, params=spec.params, json=spec.json + ) + return EntityRecord.model_validate(response.json()) + + async def insert_record_async( + self, + entity_key: str, + data: Any, + expansion_level: Optional[int] = None, + ) -> EntityRecord: + """Async variant of :meth:`insert_record`.""" + spec = self._insert_record_spec(entity_key, data, expansion_level) + response = await self.request_async( + spec.method, spec.endpoint, params=spec.params, json=spec.json + ) + return EntityRecord.model_validate(response.json()) + + def get_record( + self, + entity_key: str, + record_id: str, + expansion_level: Optional[int] = None, + ) -> EntityRecord: + """Fetch a single record by its id.""" + spec = self._get_record_spec(entity_key, record_id, expansion_level) + response = self.request(spec.method, spec.endpoint, params=spec.params) + return EntityRecord.model_validate(response.json()) + + async def get_record_async( + self, + entity_key: str, + record_id: str, + expansion_level: Optional[int] = None, + ) -> EntityRecord: + """Async variant of :meth:`get_record`.""" + spec = self._get_record_spec(entity_key, record_id, expansion_level) + response = await self.request_async( + spec.method, spec.endpoint, params=spec.params + ) + return EntityRecord.model_validate(response.json()) + + def update_record( + self, + entity_key: str, + record_id: str, + data: Any, + expansion_level: Optional[int] = None, + ) -> EntityRecord: + """Internal implementation; see :meth:`EntitiesService.update_record`.""" + spec = self._update_record_spec(entity_key, record_id, data, expansion_level) + response = self.request( + spec.method, spec.endpoint, params=spec.params, json=spec.json + ) + return EntityRecord.model_validate(response.json()) + + async def update_record_async( + self, + entity_key: str, + record_id: str, + data: Any, + expansion_level: Optional[int] = None, + ) -> EntityRecord: + """Async variant of :meth:`update_record`.""" + spec = self._update_record_spec(entity_key, record_id, data, expansion_level) + response = await self.request_async( + spec.method, spec.endpoint, params=spec.params, json=spec.json + ) + return EntityRecord.model_validate(response.json()) + + def delete_record(self, entity_key: str, record_id: str) -> None: + """Delete a single record by id.""" + spec = self._delete_record_spec(entity_key, record_id) + self.request(spec.method, spec.endpoint) + + async def delete_record_async(self, entity_key: str, record_id: str) -> None: + """Async variant of :meth:`delete_record`.""" + spec = self._delete_record_spec(entity_key, record_id) + await self.request_async(spec.method, spec.endpoint) + + # ------------------------------------------------------------------ + # Batch record operations + # ------------------------------------------------------------------ + + def insert_records( + self, + entity_key: str, + records: List[Any], + schema: Optional[Type[Any]] = None, + expansion_level: Optional[int] = None, + fail_on_first: Optional[bool] = None, + ) -> EntityRecordsBatchResponse: + """Internal implementation; see :meth:`EntitiesService.insert_records`.""" + spec = self._insert_batch_spec( + entity_key, + records, + expansion_level=expansion_level, + fail_on_first=fail_on_first, + ) + response = self._request_or_extract_batch( + sync_call=lambda: self.request( + spec.method, spec.endpoint, params=spec.params, json=spec.json + ) + ) + if isinstance(response, EntityRecordsBatchResponse): + return response + return self.validate_entity_batch(response, schema) + + async def insert_records_async( + self, + entity_key: str, + records: List[Any], + schema: Optional[Type[Any]] = None, + expansion_level: Optional[int] = None, + fail_on_first: Optional[bool] = None, + ) -> EntityRecordsBatchResponse: + """Async variant of :meth:`insert_records`.""" + spec = self._insert_batch_spec( + entity_key, + records, + expansion_level=expansion_level, + fail_on_first=fail_on_first, + ) + + async def _send_batch() -> Response: + return await self.request_async( + spec.method, spec.endpoint, params=spec.params, json=spec.json + ) + + result = await self._request_or_extract_batch_async(_send_batch) + if isinstance(result, EntityRecordsBatchResponse): + return result + return self.validate_entity_batch(result, schema) + + def update_records( + self, + entity_key: str, + records: List[Any], + schema: Optional[Type[Any]] = None, + expansion_level: Optional[int] = None, + fail_on_first: Optional[bool] = None, + ) -> EntityRecordsBatchResponse: + """Internal implementation; see :meth:`EntitiesService.update_records`.""" + normalized = [self._record_to_dict(record) for record in records] + if schema is not None: + for record in normalized: + EntityRecord.from_data(data=record, model=schema) + + spec = self._update_batch_spec( + entity_key, + normalized, + expansion_level=expansion_level, + fail_on_first=fail_on_first, + ) + response = self._request_or_extract_batch( + sync_call=lambda: self.request( + spec.method, spec.endpoint, params=spec.params, json=spec.json + ) + ) + if isinstance(response, EntityRecordsBatchResponse): + return response + return self.validate_entity_batch(response, schema) + + async def update_records_async( + self, + entity_key: str, + records: List[Any], + schema: Optional[Type[Any]] = None, + expansion_level: Optional[int] = None, + fail_on_first: Optional[bool] = None, + ) -> EntityRecordsBatchResponse: + """Async variant of :meth:`update_records`.""" + normalized = [self._record_to_dict(record) for record in records] + if schema is not None: + for record in normalized: + EntityRecord.from_data(data=record, model=schema) + + spec = self._update_batch_spec( + entity_key, + normalized, + expansion_level=expansion_level, + fail_on_first=fail_on_first, + ) + + async def _send_batch() -> Response: + return await self.request_async( + spec.method, spec.endpoint, params=spec.params, json=spec.json + ) + + result = await self._request_or_extract_batch_async(_send_batch) + if isinstance(result, EntityRecordsBatchResponse): + return result + return self.validate_entity_batch(result, schema) + + def delete_records( + self, + entity_key: str, + record_ids: List[str], + fail_on_first: Optional[bool] = None, + ) -> EntityRecordsBatchResponse: + """Delete multiple records by id in a single batch.""" + spec = self._delete_batch_spec( + entity_key, record_ids, fail_on_first=fail_on_first + ) + result = self._request_or_extract_batch( + sync_call=lambda: self.request( + spec.method, spec.endpoint, params=spec.params, json=spec.json + ) + ) + if isinstance(result, EntityRecordsBatchResponse): + return result + return EntityRecordsBatchResponse.model_validate(result.json()) + + async def delete_records_async( + self, + entity_key: str, + record_ids: List[str], + fail_on_first: Optional[bool] = None, + ) -> EntityRecordsBatchResponse: + """Async variant of :meth:`delete_records`.""" + spec = self._delete_batch_spec( + entity_key, record_ids, fail_on_first=fail_on_first + ) + + async def _send_batch() -> Response: + return await self.request_async( + spec.method, spec.endpoint, params=spec.params, json=spec.json + ) + + result = await self._request_or_extract_batch_async(_send_batch) + if isinstance(result, EntityRecordsBatchResponse): + return result + return EntityRecordsBatchResponse.model_validate(result.json()) + + # ------------------------------------------------------------------ + # Structured query (POST /entity/{id}/query) + # ------------------------------------------------------------------ + + def query( + self, + entity_key: str, + filter_group: Optional[EntityQueryFilterGroup] = None, + sort_options: Optional[List[EntityQuerySortOption]] = None, + selected_fields: Optional[List[str]] = None, + expansions: Optional[List[Any]] = None, + expansion_level: Optional[int] = None, + aggregates: Optional[List[EntityAggregate]] = None, + group_by: Optional[List[str]] = None, + joins: Optional[List[EntityJoin]] = None, + binnings: Optional[List[EntityBinning]] = None, + start: Optional[int] = None, + limit: Optional[int] = None, + ) -> EntityQueryRecordsResponse: + """Internal implementation; see :meth:`EntitiesService.query`.""" + spec = self._query_spec( + entity_key, + filter_group=filter_group, + sort_options=sort_options, + selected_fields=selected_fields, + expansions=expansions, + expansion_level=expansion_level, + aggregates=aggregates, + group_by=group_by, + joins=joins, + binnings=binnings, + start=start, + limit=limit, + ) + response = self.request( + spec.method, spec.endpoint, params=spec.params, json=spec.json + ) + return self._parse_query_response(response, start=start, limit=limit) + + async def query_async( + self, + entity_key: str, + filter_group: Optional[EntityQueryFilterGroup] = None, + sort_options: Optional[List[EntityQuerySortOption]] = None, + selected_fields: Optional[List[str]] = None, + expansions: Optional[List[Any]] = None, + expansion_level: Optional[int] = None, + aggregates: Optional[List[EntityAggregate]] = None, + group_by: Optional[List[str]] = None, + joins: Optional[List[EntityJoin]] = None, + binnings: Optional[List[EntityBinning]] = None, + start: Optional[int] = None, + limit: Optional[int] = None, + ) -> EntityQueryRecordsResponse: + """Async variant of :meth:`query`.""" + spec = self._query_spec( + entity_key, + filter_group=filter_group, + sort_options=sort_options, + selected_fields=selected_fields, + expansions=expansions, + expansion_level=expansion_level, + aggregates=aggregates, + group_by=group_by, + joins=joins, + binnings=binnings, + start=start, + limit=limit, + ) + response = await self.request_async( + spec.method, spec.endpoint, params=spec.params, json=spec.json + ) + return self._parse_query_response(response, start=start, limit=limit) + + # ------------------------------------------------------------------ + # Federated SQL query (legacy escape hatch) + # ------------------------------------------------------------------ + + def query_entity_records( + self, + sql_query: str, + ) -> List[Dict[str, Any]]: + """Internal implementation; see :meth:`EntitiesService.query_entity_records`.""" + return self._query_entities_for_records(sql_query) + + async def query_entity_records_async( + self, + sql_query: str, + ) -> List[Dict[str, Any]]: + """Async variant of :meth:`query_entity_records`.""" + return await self._query_entities_for_records_async(sql_query) + + # ------------------------------------------------------------------ + # Attachments + # ------------------------------------------------------------------ + + def upload_attachment( + self, + entity_id: str, + record_id: str, + field_name: str, + file: Optional[FileContent] = None, + file_path: Optional[str] = None, + expansion_level: Optional[int] = None, + ) -> Dict[str, Any]: + """Internal implementation; see :meth:`EntitiesService.upload_attachment`.""" + spec = self._attachment_endpoint( + entity_id, record_id, field_name, expansion_level + ) + with self._open_file(file, file_path) as handle: + response = self.request( + "POST", + spec.endpoint, + params=spec.params, + files={"file": handle}, + ) + return response.json() if response.content else {} + + async def upload_attachment_async( + self, + entity_id: str, + record_id: str, + field_name: str, + file: Optional[FileContent] = None, + file_path: Optional[str] = None, + expansion_level: Optional[int] = None, + ) -> Dict[str, Any]: + """Async variant of :meth:`upload_attachment`.""" + spec = self._attachment_endpoint( + entity_id, record_id, field_name, expansion_level + ) + with self._open_file(file, file_path) as handle: + response = await self.request_async( + "POST", + spec.endpoint, + params=spec.params, + files={"file": handle}, + ) + return response.json() if response.content else {} + + def download_attachment( + self, entity_id: str, record_id: str, field_name: str + ) -> bytes: + """Internal implementation; see :meth:`EntitiesService.download_attachment`.""" + spec = self._attachment_endpoint(entity_id, record_id, field_name) + response = self.request("GET", spec.endpoint) + return response.content + + async def download_attachment_async( + self, entity_id: str, record_id: str, field_name: str + ) -> bytes: + """Async variant of :meth:`download_attachment`.""" + spec = self._attachment_endpoint(entity_id, record_id, field_name) + response = await self.request_async("GET", spec.endpoint) + return response.content + + def delete_attachment( + self, + entity_id: str, + record_id: str, + field_name: str, + expansion_level: Optional[int] = None, + ) -> Dict[str, Any]: + """Internal implementation; see :meth:`EntitiesService.delete_attachment`.""" + spec = self._attachment_endpoint( + entity_id, record_id, field_name, expansion_level + ) + response = self.request("DELETE", spec.endpoint, params=spec.params) + return response.json() if response.content else {} + + async def delete_attachment_async( + self, + entity_id: str, + record_id: str, + field_name: str, + expansion_level: Optional[int] = None, + ) -> Dict[str, Any]: + """Async variant of :meth:`delete_attachment`.""" + spec = self._attachment_endpoint( + entity_id, record_id, field_name, expansion_level + ) + response = await self.request_async("DELETE", spec.endpoint, params=spec.params) + return response.json() if response.content else {} + + # ------------------------------------------------------------------ + # Bulk import + # ------------------------------------------------------------------ + + def import_records( + self, + entity_id: str, + file: Optional[FileContent] = None, + file_path: Optional[str] = None, + ) -> EntityImportRecordsResponse: + """Internal implementation; see :meth:`EntitiesService.import_records`.""" + with self._open_file(file, file_path) as handle: + response = self.request( + "POST", + Endpoint( + f"datafabric_/api/EntityService/entity/{entity_id}/bulk-upload" + ), + files={"file": handle}, + ) + return EntityImportRecordsResponse.model_validate(response.json() or {}) + + async def import_records_async( + self, + entity_id: str, + file: Optional[FileContent] = None, + file_path: Optional[str] = None, + ) -> EntityImportRecordsResponse: + """Async variant of :meth:`import_records`.""" + with self._open_file(file, file_path) as handle: + response = await self.request_async( + "POST", + Endpoint( + f"datafabric_/api/EntityService/entity/{entity_id}/bulk-upload" + ), + files={"file": handle}, + ) + return EntityImportRecordsResponse.model_validate(response.json() or {}) + + # ------------------------------------------------------------------ + # Public helper for batch response validation + # ------------------------------------------------------------------ + + def validate_entity_batch( + self, + batch_response: Response, + schema: Optional[Type[Any]] = None, + ) -> EntityRecordsBatchResponse: + """Internal implementation; see :meth:`EntitiesService.validate_entity_batch`.""" + parsed = EntityRecordsBatchResponse.model_validate(batch_response.json()) + + validated_successful_records = [] + for successful_record in parsed.success_records: + data = successful_record.model_dump(by_alias=True) + if data.get("Id") is not None: + validated_successful_records.append( + EntityRecord.from_data(data=data, model=schema) + ) + + return EntityRecordsBatchResponse( + success_records=validated_successful_records, + failure_records=parsed.failure_records, + ) + + # ------------------------------------------------------------------ + # Internal helpers — request specs + # ------------------------------------------------------------------ + + def _query_entities_for_records(self, sql_query: str) -> List[Dict[str, Any]]: + """Synchronously run a validated SQL query through the federated query engine.""" + self._validate_sql_query(sql_query) + routing_context = self._routing_strategy.resolve() + spec = self._query_entity_records_spec(sql_query, routing_context) + response = self.request(spec.method, spec.endpoint, json=spec.json) + return response.json().get("results", []) + + async def _query_entities_for_records_async( + self, sql_query: str + ) -> List[Dict[str, Any]]: + """Asynchronously run a validated SQL query through the federated query engine.""" + self._validate_sql_query(sql_query) + routing_context = await self._routing_strategy.resolve_async() + spec = self._query_entity_records_spec(sql_query, routing_context) + response = await self.request_async(spec.method, spec.endpoint, json=spec.json) + return response.json().get("results", []) + + @staticmethod + def _list_records_spec( + entity_key: str, + start: Optional[int] = None, + limit: Optional[int] = None, + expansion_level: Optional[int] = None, + filter: Optional[str] = None, + orderby: Optional[str] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + ) -> RequestSpec: + """Build the GET spec for the multi-record read endpoint.""" + params: Dict[str, Any] = {} + if start is not None: + params["start"] = start + if limit is not None: + params["limit"] = limit + if expansion_level is not None: + params["expansionLevel"] = expansion_level + if filter is not None: + params["$filter"] = filter + if orderby is not None: + params["$orderby"] = orderby + if select: + params["$select"] = ",".join(select) + if expand: + params["$expand"] = ",".join(expand) + return RequestSpec( + method="GET", + endpoint=Endpoint( + f"datafabric_/api/EntityService/entity/{entity_key}/read" + ), + params=params, + ) + + @staticmethod + def _insert_record_spec( + entity_key: str, + data: Any, + expansion_level: Optional[int] = None, + ) -> RequestSpec: + """Build the POST spec for inserting a single record.""" + params: Dict[str, Any] = {} + if expansion_level is not None: + params["expansionLevel"] = expansion_level + return RequestSpec( + method="POST", + endpoint=Endpoint( + f"datafabric_/api/EntityService/entity/{entity_key}/insert" + ), + params=params, + json=EntityDataService._record_to_dict(data), + ) + + @staticmethod + def _get_record_spec( + entity_key: str, + record_id: str, + expansion_level: Optional[int] = None, + ) -> RequestSpec: + """Build the GET spec for fetching a single record by id.""" + params: Dict[str, Any] = {} + if expansion_level is not None: + params["expansionLevel"] = expansion_level + return RequestSpec( + method="GET", + endpoint=Endpoint( + f"datafabric_/api/EntityService/entity/{entity_key}/read/{record_id}" + ), + params=params, + ) + + @staticmethod + def _update_record_spec( + entity_key: str, + record_id: str, + data: Any, + expansion_level: Optional[int] = None, + ) -> RequestSpec: + """Build the POST spec for updating a single record by id.""" + params: Dict[str, Any] = {} + if expansion_level is not None: + params["expansionLevel"] = expansion_level + return RequestSpec( + method="POST", + endpoint=Endpoint( + f"datafabric_/api/EntityService/entity/{entity_key}/update/{record_id}" + ), + params=params, + json=EntityDataService._record_to_dict(data), + ) + + @staticmethod + def _delete_record_spec(entity_key: str, record_id: str) -> RequestSpec: + """Build the DELETE spec for removing a single record by id.""" + return RequestSpec( + method="DELETE", + endpoint=Endpoint( + f"datafabric_/api/EntityService/entity/{entity_key}/delete/{record_id}" + ), + ) + + @staticmethod + def _insert_batch_spec( + entity_key: str, + records: List[Any], + expansion_level: Optional[int] = None, + fail_on_first: Optional[bool] = None, + ) -> RequestSpec: + """Build the POST spec for the batch-insert endpoint.""" + params = EntityDataService._batch_params( + expansion_level=expansion_level, fail_on_first=fail_on_first + ) + return RequestSpec( + method="POST", + endpoint=Endpoint( + f"datafabric_/api/EntityService/entity/{entity_key}/insert-batch" + ), + params=params, + json=[EntityDataService._record_to_dict(record) for record in records], + ) + + @staticmethod + def _update_batch_spec( + entity_key: str, + records: List[Dict[str, Any]], + expansion_level: Optional[int] = None, + fail_on_first: Optional[bool] = None, + ) -> RequestSpec: + """Build the POST spec for the batch-update endpoint.""" + params = EntityDataService._batch_params( + expansion_level=expansion_level, fail_on_first=fail_on_first + ) + return RequestSpec( + method="POST", + endpoint=Endpoint( + f"datafabric_/api/EntityService/entity/{entity_key}/update-batch" + ), + params=params, + json=records, + ) + + @staticmethod + def _delete_batch_spec( + entity_key: str, + record_ids: List[str], + fail_on_first: Optional[bool] = None, + ) -> RequestSpec: + """Build the POST spec for the batch-delete endpoint.""" + params = EntityDataService._batch_params(fail_on_first=fail_on_first) + return RequestSpec( + method="POST", + endpoint=Endpoint( + f"datafabric_/api/EntityService/entity/{entity_key}/delete-batch" + ), + params=params, + json=record_ids, + ) + + @staticmethod + def _batch_params( + expansion_level: Optional[int] = None, + fail_on_first: Optional[bool] = None, + ) -> Dict[str, Any]: + """Build the optional URL params common to all batch endpoints.""" + params: Dict[str, Any] = {} + if expansion_level is not None: + params["expansionLevel"] = expansion_level + if fail_on_first is not None: + params["failOnFirst"] = "true" if fail_on_first else "false" + return params + + @staticmethod + def _query_spec( + entity_key: str, + filter_group: Optional[EntityQueryFilterGroup] = None, + sort_options: Optional[List[EntityQuerySortOption]] = None, + selected_fields: Optional[List[str]] = None, + expansions: Optional[List[Any]] = None, + expansion_level: Optional[int] = None, + aggregates: Optional[List[Any]] = None, + group_by: Optional[List[str]] = None, + joins: Optional[List[EntityJoin]] = None, + binnings: Optional[List[EntityBinning]] = None, + start: Optional[int] = None, + limit: Optional[int] = None, + ) -> RequestSpec: + """Build the request spec for the structured-query endpoint. + + Filters, sorting, projection, expansions, aggregates, group-by, joins, + binnings, ``start``, and ``limit`` are placed in the JSON body; + ``expansionLevel`` is a URL query parameter. The V2 endpoint is used + only when ``binnings`` are supplied. + """ + body: Dict[str, Any] = {} + if filter_group is not None: + body["filterGroup"] = filter_group.model_dump( + by_alias=True, exclude_none=True + ) + if sort_options: + body["sortOptions"] = [ + opt.model_dump(by_alias=True, exclude_none=True) for opt in sort_options + ] + if selected_fields: + body["selectedFields"] = list(selected_fields) + if expansions: + body["expansions"] = [ + e.model_dump(by_alias=True, exclude_none=True) + if isinstance(e, BaseModel) + else e + for e in expansions + ] + if aggregates: + body["aggregates"] = [ + a.model_dump(by_alias=True, exclude_none=True) + if isinstance(a, BaseModel) + else a + for a in aggregates + ] + if group_by: + body["groupBy"] = list(group_by) + if joins: + body["joins"] = [ + j.model_dump(by_alias=True, exclude_none=True) for j in joins + ] + if binnings: + body["binnings"] = [ + b.model_dump(by_alias=True, exclude_none=True) for b in binnings + ] + if start is not None: + body["start"] = start + if limit is not None: + body["limit"] = limit + + params: Dict[str, Any] = {} + if expansion_level is not None: + params["expansionLevel"] = expansion_level + + if binnings: + endpoint = Endpoint( + f"datafabric_/api/v2/EntityService/entity/{entity_key}/query" + ) + else: + endpoint = Endpoint( + f"datafabric_/api/EntityService/entity/{entity_key}/query" + ) + + return RequestSpec( + method="POST", + endpoint=endpoint, + params=params, + json=body, + ) + + @staticmethod + def _query_entity_records_spec( + sql_query: str, + routing_context: Optional[QueryRoutingOverrideContext] = None, + ) -> RequestSpec: + """Build the POST spec for the federated SQL query endpoint.""" + body: Dict[str, Any] = {"query": sql_query} + if routing_context: + body["routingContext"] = routing_context.model_dump( + by_alias=True, exclude_none=True + ) + return RequestSpec( + method="POST", + endpoint=Endpoint("datafabric_/api/v1/query/execute"), + json=body, + ) + + @staticmethod + def _get_choiceset_values_spec( + choiceset_id: str, + start: Optional[int] = None, + limit: Optional[int] = None, + ) -> RequestSpec: + """Build the POST spec for the choice-set values endpoint.""" + params: Dict[str, Any] = {} + if start is not None: + params["start"] = start + if limit is not None: + params["limit"] = limit + return RequestSpec( + method="POST", + endpoint=Endpoint( + f"datafabric_/api/EntityService/entity/{choiceset_id}/query_expansion" + ), + params=params, + json={}, + ) + + @staticmethod + def _attachment_endpoint( + entity_id: str, + record_id: str, + field_name: str, + expansion_level: Optional[int] = None, + ) -> RequestSpec: + """Return the attachment endpoint plus any ``expansionLevel`` query param. + + The HTTP verb is supplied by the caller; only the URL and query + parameters depend on these arguments. + """ + params: Dict[str, Any] = {} + if expansion_level is not None: + params["expansionLevel"] = expansion_level + return RequestSpec( + method="POST", + endpoint=Endpoint( + f"datafabric_/api/Attachment/entity/{entity_id}/{record_id}/{field_name}" + ), + params=params, + ) + + @staticmethod + def _open_file(file: Optional[FileContent], file_path: Optional[str]) -> Any: + """Yield a file-like object from raw bytes or a path on disk. + + Exactly one of ``file`` and ``file_path`` must be supplied. + """ + if (file is None) == (file_path is None): + raise ValueError( + "Provide exactly one of `file` (bytes) or `file_path` (str path on disk)." + ) + if file_path is not None: + return open(Path(file_path), "rb") + return nullcontext(file) + + # ------------------------------------------------------------------ + # Internal helpers — response parsing and record normalisation + # ------------------------------------------------------------------ + + @staticmethod + def _record_to_dict(record: Any) -> Dict[str, Any]: + """Normalize an input record to a plain dict. + + Accepts dicts, Pydantic ``BaseModel`` (including :class:`EntityRecord`), + or any object exposing ``__dict__``. Explicit ``None`` values are + preserved so callers can clear fields by setting them to ``None`` on a + model instance — only unset fields (whose Pydantic default applies) are + dropped via ``exclude_unset=True``. + """ + if isinstance(record, dict): + return dict(record) + if isinstance(record, BaseModel): + return record.model_dump(by_alias=True, exclude_unset=True) + if hasattr(record, "__dict__"): + return {k: v for k, v in record.__dict__.items() if not k.startswith("_")} + raise TypeError( + f"Cannot convert record of type {type(record).__name__} to dict — " + "pass a dict, an EntityRecord, a Pydantic BaseModel, or an object with __dict__." + ) + + @staticmethod + def _build_records_list_response( + response: Response, + schema: Optional[Type[Any]], + start: Optional[int], + limit: Optional[int], + ) -> EntityRecordsListResponse: + """Build an :class:`EntityRecordsListResponse` from a list-records body.""" + body = response.json() or {} + records_data = body.get("value", []) + total_count = int( + body.get("totalRecordCount", body.get("totalCount", len(records_data))) or 0 + ) + records = [ + EntityRecord.from_data(data=record, model=schema) for record in records_data + ] + + next_cursor = body.get("nextCursor") + if limit is not None and limit > 0: + consumed = (start or 0) + len(records) + has_next_page = consumed < total_count + else: + has_next_page = bool(body.get("hasNextPage", False)) + + return EntityRecordsListResponse( + items=records, + total_count=total_count, + has_next_page=has_next_page, + next_cursor=next_cursor, + ) + + @staticmethod + def _parse_query_response( + response: Response, + start: Optional[int] = None, + limit: Optional[int] = None, + ) -> EntityQueryRecordsResponse: + """Parse a query response into :class:`EntityQueryRecordsResponse`. + + ``has_next_page`` is derived from ``start + len(items) < total_count`` + whenever ``limit`` is supplied; ``next_cursor`` is populated only when + the backend returns one, otherwise the caller paginates by passing the + next ``start``. + """ + body = response.json() or {} + # Aggregate / binning rows do not carry an ``Id`` field, so fall back + # to constructing the record without strict validation when the row + # cannot be parsed as a regular entity record. + items_raw = body.get("value", []) or [] + items: List[EntityRecord] = [] + for raw in items_raw: + try: + items.append(EntityRecord.from_data(data=raw)) + except ValueError: + items.append(EntityRecord.model_construct(_fields_set=set(raw), **raw)) + + total_count = int(body.get("totalRecordCount", body.get("totalCount", 0)) or 0) + + next_cursor: Optional[str] = body.get("nextCursor") + has_next_page = bool(body.get("hasNextPage", False)) + if next_cursor is None and limit is not None and limit > 0: + consumed = (start or 0) + len(items) + has_next_page = consumed < total_count + + return EntityQueryRecordsResponse( + items=items, + total_count=total_count, + has_next_page=has_next_page, + next_cursor=next_cursor, + ) + + @staticmethod + def _parse_choiceset_values(response: Response) -> List[ChoiceSetValue]: + """Decode and return the choice-set values from a query-expansion response.""" + data = response.json() + raw_values = data.get("jsonValue", "[]") + items = ( + json_module.loads(raw_values) if isinstance(raw_values, str) else raw_values + ) + return [ChoiceSetValue.model_validate(item) for item in items] + + # ------------------------------------------------------------------ + # Internal helpers — batch error recovery + # ------------------------------------------------------------------ + + def _request_or_extract_batch( + self, + sync_call: Any, + ) -> Union[Response, EntityRecordsBatchResponse]: + """Run a batch request and recover per-record failures from a 400 body. + + On HTTP 400 with a body that lists both ``successRecords`` and + ``failureRecords``, returns the parsed batch response instead of + raising. All other errors propagate. + """ + try: + return sync_call() + except EnrichedException as exc: + extracted = self._extract_batch_response_from_error(exc) + if extracted is not None: + return extracted + raise + + async def _request_or_extract_batch_async( + self, + async_call: Any, + ) -> Union[Response, EntityRecordsBatchResponse]: + """Async variant of :meth:`_request_or_extract_batch`.""" + try: + return await async_call() + except EnrichedException as exc: + extracted = self._extract_batch_response_from_error(exc) + if extracted is not None: + return extracted + raise + + @staticmethod + def _extract_batch_response_from_error( + exc: EnrichedException, + ) -> Optional[EntityRecordsBatchResponse]: + """Return a parsed batch response when the error body matches the per-record-failure shape. + + Recovery is intentionally narrow: only HTTP 400 with a JSON object + containing list-typed ``successRecords`` and ``failureRecords`` keys. + Returns ``None`` for any other status, body shape, or parse failure + so that the original error propagates. + """ + cause = exc.__cause__ + if not isinstance(cause, HTTPStatusError): + return None + if cause.response.status_code != 400: + return None + try: + data = cause.response.json() + except Exception: + return None + if not isinstance(data, dict): + return None + if not ( + isinstance(data.get("successRecords"), list) + and isinstance(data.get("failureRecords"), list) + ): + return None + try: + return EntityRecordsBatchResponse.model_validate(data) + except Exception: + return None + + # ------------------------------------------------------------------ + # Internal helpers — SQL validation (federated query path) + # ------------------------------------------------------------------ + + def _validate_sql_query(self, sql_query: str) -> None: + """Validate a SQL string for the federated query endpoint client-side.""" + query = sql_query.strip().rstrip(";").strip() + if not query: + raise ValueError("SQL query cannot be empty.") + + statements = sqlparse.parse(query) + if len(statements) != 1 or not statements[0].tokens: + raise ValueError("Only a single SELECT statement is allowed.") + + stmt = statements[0] + stmt_type = stmt.get_type() + + if stmt_type != "SELECT": + raise ValueError("Only SELECT statements are allowed.") + + keywords = set() + for token in stmt.flatten(): + if token.ttype in Keyword: + keywords.add(token.normalized) + + for kw in _FORBIDDEN_DML: + if kw in keywords: + raise ValueError(f"SQL keyword '{kw}' is not allowed.") + + for kw in _FORBIDDEN_DDL: + if kw in keywords: + raise ValueError(f"SQL keyword '{kw}' is not allowed.") + + for kw in _DISALLOWED_KEYWORDS: + if kw in keywords: + raise ValueError( + f"SQL construct '{kw}' is not allowed in entity queries." + ) + + if self._has_subquery(stmt): + raise ValueError("Subqueries are not allowed.") + + has_where = any(isinstance(t, Where) for t in stmt.tokens) + has_limit = "LIMIT" in keywords + has_from = "FROM" in keywords + + if not has_from: + raise ValueError("Queries must include a FROM clause.") + + projection = self._projection_tokens(stmt) + + if self._projection_has_count_star(projection): + raise ValueError( + "COUNT(*) is not supported. Use COUNT(column_name) instead." + ) + + has_aggregate = self._projection_has_aggregate(projection) + + if not has_where and not has_limit and not has_aggregate: + raise ValueError("Queries without WHERE must include a LIMIT clause.") + + has_bare_wildcard = self._projection_has_bare_wildcard(projection) + if has_bare_wildcard: + raise ValueError("SELECT * is not allowed. Specify column names instead.") + if not has_where and self._projection_column_count(projection) > 4: + raise ValueError( + "Selecting more than 4 columns without filtering is not allowed." + ) + + @staticmethod + def _projection_has_aggregate( + projection: List[sqlparse.sql.Token], + ) -> bool: + """Return ``True`` when the projection contains an aggregate function call.""" + + def _has_agg(token: sqlparse.sql.Token) -> bool: + if isinstance(token, Function): + return token.get_name().upper() in _AGGREGATE_FUNCTIONS + if isinstance(token, Identifier): + return any(_has_agg(child) for child in token.tokens) + return False + + for node in projection: + if _has_agg(node): + return True + if isinstance(node, IdentifierList): + if any(_has_agg(child) for child in node.tokens): + return True + return False + + @staticmethod + def _projection_has_count_star( + projection: List[sqlparse.sql.Token], + ) -> bool: + """Return ``True`` when the projection contains ``COUNT(*)``.""" + + def _is_count_star(func: Function) -> bool: + if func.get_name().upper() != "COUNT": + return False + return any(t.ttype is Wildcard for t in func.flatten()) + + def _has_count_star(token: sqlparse.sql.Token) -> bool: + if isinstance(token, Function): + return _is_count_star(token) + if isinstance(token, Identifier): + return any(_has_count_star(child) for child in token.tokens) + return False + + for node in projection: + if _has_count_star(node): + return True + if isinstance(node, IdentifierList): + if any(_has_count_star(child) for child in node.tokens): + return True + return False + + @staticmethod + def _projection_has_bare_wildcard( + projection: List[sqlparse.sql.Token], + ) -> bool: + """Return ``True`` for a bare ``*`` or qualified ``table.*`` outside a function.""" + + def _identifier_has_wildcard(ident: Identifier) -> bool: + return any(t.ttype is Wildcard for t in ident.tokens) + + for node in projection: + if node.ttype is Wildcard: + return True + if isinstance(node, Identifier) and _identifier_has_wildcard(node): + return True + if isinstance(node, IdentifierList): + for child in node.tokens: + if child.ttype is Wildcard: + return True + if isinstance(child, Identifier) and _identifier_has_wildcard( + child + ): + return True + return False + + @staticmethod + def _has_subquery(stmt: sqlparse.sql.Statement) -> bool: + """Recursively walk the AST looking for a SELECT inside parentheses.""" + + def _walk(token: sqlparse.sql.Token) -> bool: + if isinstance(token, Parenthesis): + for child in token.flatten(): + if child.ttype is DML and child.normalized == "SELECT": + return True + if hasattr(token, "tokens"): + for child in token.tokens: + if _walk(child): + return True + return False + + for token in stmt.tokens: + if _walk(token): + return True + return False + + @staticmethod + def _projection_tokens( + stmt: sqlparse.sql.Statement, + ) -> List[sqlparse.sql.Token]: + """Return the non-flattened AST nodes between the first SELECT and FROM.""" + tokens: List[sqlparse.sql.Token] = [] + collecting = False + for token in stmt.tokens: + if token.ttype is DML and token.normalized == "SELECT": + collecting = True + continue + if token.ttype is Keyword and token.normalized in ("FROM", "INTO"): + break + if token.ttype is Keyword and token.normalized == "DISTINCT": + continue + if collecting and token.ttype is not Whitespace: + tokens.append(token) + return tokens + + @staticmethod + def _projection_column_count( + projection: List[sqlparse.sql.Token], + ) -> int: + """Return the number of columns referenced by the projection.""" + for node in projection: + if isinstance(node, IdentifierList): + return len(list(node.get_identifiers())) + if isinstance(node, (Identifier, Function)): + return 1 + if node.ttype is Wildcard: + return 1 + return 0 diff --git a/packages/uipath-platform/src/uipath/platform/entities/_entity_schema_service.py b/packages/uipath-platform/src/uipath/platform/entities/_entity_schema_service.py new file mode 100644 index 000000000..8b79b8e4b --- /dev/null +++ b/packages/uipath-platform/src/uipath/platform/entities/_entity_schema_service.py @@ -0,0 +1,499 @@ +"""Schema-side operations for the Data Fabric entities surface. + +Handles entity definitions, choice set listings, and the create / delete / +update-metadata lifecycle that targets the backend ``EntityController``. +Record CRUD, queries, attachments, and bulk import live on +:class:`EntityDataService` and are mediated by :class:`EntitiesService`. +""" + +import re +from typing import Any, Dict, List, Optional, Union + +from httpx import Response + +from ..common._base_service import BaseService +from ..common._config import UiPathApiConfig +from ..common._execution_context import UiPathExecutionContext +from ..common._models import Endpoint, RequestSpec +from ..common.constants import HEADER_FOLDER_KEY +from ..orchestrator._folder_service import FolderService +from .entities import ( + ENTITY_FIELD_CONSTRAINT_DEFAULTS, + ENTITY_FIELD_CONSTRAINT_SPEC, + ENTITY_SCHEMA_FIELD_TYPE_MAP, + RESERVED_FIELD_NAMES, + Entity, + EntityCreateFieldOptions, + EntityCreateOptions, + EntityFieldDataType, + EntityMetadataUpdateOptions, +) + +DATA_FABRIC_TENANT_FOLDER_ID = "00000000-0000-0000-0000-000000000000" + +_NAME_RE = re.compile(r"^[a-zA-Z][a-zA-Z0-9]*$") +"""Entity and field name pattern: must start with a letter, then letters and digits only. + +Matches the UI's create-entity / create-field form validators so any name accepted +here can later be displayed or edited through the Data Service UI. +""" + +_ENTITY_NAME_MIN_LENGTH = 1 +_ENTITY_NAME_MAX_LENGTH = 30 +_FIELD_NAME_MIN_LENGTH = 3 +_FIELD_NAME_MAX_LENGTH = 100 + + +class EntitySchemaService(BaseService): + """HTTP service for entity-schema operations. + + Provides retrieval and lifecycle management for entities and choice sets. + Backend target: ``datafabric_/api/Entity``. + + See Also: + https://docs.uipath.com/data-service/automation-cloud/latest/user-guide/introduction + + !!! warning "Preview Feature" + This service is currently experimental. Behavior and parameters are + subject to change in future versions. + """ + + def __init__( + self, + config: UiPathApiConfig, + execution_context: UiPathExecutionContext, + folders_service: Optional[FolderService] = None, + ) -> None: + """Initialise the schema service.""" + super().__init__(config=config, execution_context=execution_context) + self._folders_service = folders_service + + def retrieve(self, entity_key: str) -> Entity: + """Internal implementation; see :meth:`EntitiesService.retrieve`.""" + spec = self._retrieve_spec(entity_key) + response = self.request(spec.method, spec.endpoint) + return Entity.model_validate(response.json()) + + async def retrieve_async(self, entity_key: str) -> Entity: + """Async variant of :meth:`retrieve`.""" + spec = self._retrieve_spec(entity_key) + response = await self.request_async(spec.method, spec.endpoint) + return Entity.model_validate(response.json()) + + def retrieve_by_name( + self, entity_name: str, folder_key: Optional[str] = None + ) -> Entity: + """Internal implementation; see :meth:`EntitiesService.retrieve_by_name`.""" + spec = self._retrieve_by_name_spec(entity_name) + headers = self._folder_key_headers(folder_key) + response = self.request(spec.method, spec.endpoint, headers=headers) + return Entity.model_validate(response.json()) + + async def retrieve_by_name_async( + self, entity_name: str, folder_key: Optional[str] = None + ) -> Entity: + """Async variant of :meth:`retrieve_by_name`.""" + spec = self._retrieve_by_name_spec(entity_name) + headers = self._folder_key_headers(folder_key) + response = await self.request_async(spec.method, spec.endpoint, headers=headers) + return Entity.model_validate(response.json()) + + def list_entities(self) -> List[Entity]: + """Internal implementation; see :meth:`EntitiesService.list_entities`.""" + spec = self._list_entities_spec() + response = self.request(spec.method, spec.endpoint) + entities_data = response.json() + return [Entity.model_validate(entity) for entity in entities_data] + + async def list_entities_async(self) -> List[Entity]: + """Async variant of :meth:`list_entities`.""" + spec = self._list_entities_spec() + response = await self.request_async(spec.method, spec.endpoint) + entities_data = response.json() + return [Entity.model_validate(entity) for entity in entities_data] + + def list_choicesets(self) -> List[Entity]: + """Internal implementation; see :meth:`EntitiesService.list_choicesets`.""" + spec = self._list_choicesets_spec() + response = self.request(spec.method, spec.endpoint) + return [Entity.model_validate(item) for item in response.json()] + + async def list_choicesets_async(self) -> List[Entity]: + """Async variant of :meth:`list_choicesets`.""" + spec = self._list_choicesets_spec() + response = await self.request_async(spec.method, spec.endpoint) + return [Entity.model_validate(item) for item in response.json()] + + def create_entity( + self, + name: str, + fields: List[EntityCreateFieldOptions], + options: Optional[EntityCreateOptions] = None, + ) -> str: + """Internal implementation; see :meth:`EntitiesService.create_entity`.""" + spec = self._create_entity_spec(name, fields, options) + response = self.request(spec.method, spec.endpoint, json=spec.json) + return self._extract_entity_id(response) + + async def create_entity_async( + self, + name: str, + fields: List[EntityCreateFieldOptions], + options: Optional[EntityCreateOptions] = None, + ) -> str: + """Async variant of :meth:`create_entity`.""" + spec = self._create_entity_spec(name, fields, options) + response = await self.request_async(spec.method, spec.endpoint, json=spec.json) + return self._extract_entity_id(response) + + def delete_entity(self, entity_id: str) -> None: + """Delete an entity and all of its records.""" + spec = self._delete_entity_spec(entity_id) + self.request(spec.method, spec.endpoint) + + async def delete_entity_async(self, entity_id: str) -> None: + """Async variant of :meth:`delete_entity`.""" + spec = self._delete_entity_spec(entity_id) + await self.request_async(spec.method, spec.endpoint) + + def update_entity_metadata( + self, + entity_id: str, + metadata: Union[EntityMetadataUpdateOptions, Dict[str, Any]], + ) -> None: + """Internal implementation; see :meth:`EntitiesService.update_entity_metadata`.""" + spec = self._update_entity_metadata_spec(entity_id, metadata) + self.request(spec.method, spec.endpoint, json=spec.json) + + async def update_entity_metadata_async( + self, + entity_id: str, + metadata: Union[EntityMetadataUpdateOptions, Dict[str, Any]], + ) -> None: + """Async variant of :meth:`update_entity_metadata`.""" + spec = self._update_entity_metadata_spec(entity_id, metadata) + await self.request_async(spec.method, spec.endpoint, json=spec.json) + + # ------------------------------------------------------------------ + # Request-spec builders + # ------------------------------------------------------------------ + + @staticmethod + def _retrieve_spec(entity_key: str) -> RequestSpec: + """Build the GET spec for fetching an entity by key.""" + return RequestSpec( + method="GET", + endpoint=Endpoint(f"datafabric_/api/Entity/{entity_key}"), + ) + + @staticmethod + def _retrieve_by_name_spec(entity_name: str) -> RequestSpec: + """Build the GET spec for fetching an entity by name.""" + return RequestSpec( + method="GET", + endpoint=Endpoint(f"datafabric_/api/Entity/{entity_name}/metadata"), + ) + + @staticmethod + def _folder_key_headers(folder_key: Optional[str]) -> Dict[str, str]: + """Return the folder-key header dict, empty when no key is supplied.""" + if folder_key: + return {HEADER_FOLDER_KEY: folder_key} + return {} + + @staticmethod + def _list_entities_spec() -> RequestSpec: + """Build the GET spec for listing all entities (non-choice-sets).""" + return RequestSpec( + method="GET", + endpoint=Endpoint("datafabric_/api/Entity"), + ) + + @staticmethod + def _list_choicesets_spec() -> RequestSpec: + """Build the GET spec for listing all choice sets.""" + return RequestSpec( + method="GET", + endpoint=Endpoint("datafabric_/api/Entity/choiceset"), + ) + + @classmethod + def _create_entity_spec( + cls, + name: str, + fields: List[EntityCreateFieldOptions], + options: Optional[EntityCreateOptions] = None, + ) -> RequestSpec: + """Build the POST spec for creating an entity with its field schema.""" + cls._validate_name(name, "entity") + for field in fields: + cls._validate_name(field.field_name, "field") + opts = options or EntityCreateOptions() + # The user-facing option ``is_analytics_enabled`` maps to the legacy + # backend field name ``isInsightsEnabled`` — the wire name predates + # the "Analytics" UI rename. + payload: Dict[str, Any] = { + "displayName": opts.display_name or name, + "entityDefinition": { + "name": name, + "fields": [cls._build_schema_field_payload(f) for f in fields], + "folderId": opts.folder_key or DATA_FABRIC_TENANT_FOLDER_ID, + "isRbacEnabled": bool(opts.is_rbac_enabled or False), + "isInsightsEnabled": bool(opts.is_analytics_enabled or False), + "externalFields": opts.external_fields or [], + }, + } + if opts.description is not None: + payload["description"] = opts.description + return RequestSpec( + method="POST", + endpoint=Endpoint("datafabric_/api/Entity"), + json=payload, + ) + + @staticmethod + def _delete_entity_spec(entity_id: str) -> RequestSpec: + """Build the DELETE spec for removing an entity.""" + return RequestSpec( + method="DELETE", + endpoint=Endpoint(f"datafabric_/api/Entity/{entity_id}"), + ) + + @staticmethod + def _update_entity_metadata_spec( + entity_id: str, + metadata: Union[EntityMetadataUpdateOptions, Dict[str, Any]], + ) -> RequestSpec: + """Build the PATCH spec for updating entity metadata. + + Dict inputs are validated through :class:`EntityMetadataUpdateOptions` + so snake_case keys (``display_name``) and camelCase keys + (``displayName``) both serialise to the API field names the backend + expects. + """ + if not isinstance(metadata, EntityMetadataUpdateOptions): + metadata = EntityMetadataUpdateOptions.model_validate(metadata) + body = metadata.model_dump(by_alias=True, exclude_none=True) + return RequestSpec( + method="PATCH", + endpoint=Endpoint(f"datafabric_/api/Entity/{entity_id}/metadata"), + json=body, + ) + + @classmethod + def _build_schema_field_payload( + cls, field: EntityCreateFieldOptions + ) -> Dict[str, Any]: + """Build the API field payload for a single field on create-entity. + + Maps :class:`EntityFieldDataType` to the backend's ``sqlType.name`` and + ``fieldDisplayType`` (e.g. ``STRING`` becomes ``NVARCHAR`` / ``Basic``). + Caller-supplied constraints are validated against + :data:`ENTITY_FIELD_CONSTRAINT_SPEC`; unsupplied per-type constraints + fall back to :data:`ENTITY_FIELD_CONSTRAINT_DEFAULTS` so the field is + persisted fully and remains editable later. + """ + ftype = field.type or EntityFieldDataType.STRING + cls._validate_name(field.field_name, "field") + cls._validate_field_constraints(ftype, field) + + sql_type_name, field_display_type = ENTITY_SCHEMA_FIELD_TYPE_MAP[ftype] + sql_type: Dict[str, Any] = {"name": sql_type_name} + sql_type.update(cls._build_sql_type_constraints(ftype, field)) + + payload: Dict[str, Any] = { + "name": field.field_name, + "displayName": field.display_name or field.field_name, + "sqlType": sql_type, + "fieldDisplayType": field_display_type, + "description": field.description or "", + "isRequired": bool(field.is_required or False), + "isUnique": bool(field.is_unique or False), + "isRbacEnabled": bool(field.is_rbac_enabled or False), + "isEncrypted": bool(field.is_encrypted or False), + } + if field.default_value is not None: + payload["defaultValue"] = field.default_value + if field.choice_set_id is not None: + payload["choiceSetId"] = field.choice_set_id + if field.reference_entity_name is not None: + payload["referenceEntityName"] = field.reference_entity_name + if field.reference_field_name is not None: + payload["referenceFieldName"] = field.reference_field_name + return payload + + @staticmethod + def _build_sql_type_constraints( + ftype: EntityFieldDataType, field: EntityCreateFieldOptions + ) -> Dict[str, Any]: + """Return the ``sqlType`` constraint fields required for ``ftype``. + + Caller-supplied values override defaults where the type accepts them; + types that take no constraints (UUID, DATETIME, CHOICE_SET_SINGLE, + AUTO_NUMBER) return an empty dict. + """ + d = ENTITY_FIELD_CONSTRAINT_DEFAULTS + if ftype is EntityFieldDataType.STRING: + return {"lengthLimit": field.length_limit or d["STRING_LENGTH_LIMIT"]} + if ftype is EntityFieldDataType.MULTILINE_TEXT: + return { + "lengthLimit": field.length_limit or d["MULTILINE_TEXT_LENGTH_LIMIT"] + } + if ftype is EntityFieldDataType.DECIMAL: + return { + "lengthLimit": d["DECIMAL_LENGTH_LIMIT"], + "decimalPrecision": ( + field.decimal_precision + if field.decimal_precision is not None + else d["DECIMAL_PRECISION"] + ), + "maxValue": ( + field.max_value + if field.max_value is not None + else d["NUMERIC_MAX_VALUE"] + ), + "minValue": ( + field.min_value + if field.min_value is not None + else d["NUMERIC_MIN_VALUE"] + ), + } + if ftype is EntityFieldDataType.BOOLEAN: + return {"lengthLimit": d["BOOLEAN_LENGTH_LIMIT"]} + if ftype in ( + EntityFieldDataType.DATE, + EntityFieldDataType.DATETIME_WITH_TZ, + ): + return {"lengthLimit": d["DATE_LENGTH_LIMIT"]} + if ftype in (EntityFieldDataType.INTEGER, EntityFieldDataType.BIG_INTEGER): + return { + "maxValue": ( + field.max_value + if field.max_value is not None + else d["NUMERIC_MAX_VALUE"] + ), + "minValue": ( + field.min_value + if field.min_value is not None + else d["NUMERIC_MIN_VALUE"] + ), + } + if ftype in (EntityFieldDataType.FLOAT, EntityFieldDataType.DOUBLE): + return { + "decimalPrecision": ( + field.decimal_precision + if field.decimal_precision is not None + else d["DECIMAL_PRECISION"] + ), + "maxValue": ( + field.max_value + if field.max_value is not None + else d["NUMERIC_MAX_VALUE"] + ), + "minValue": ( + field.min_value + if field.min_value is not None + else d["NUMERIC_MIN_VALUE"] + ), + } + if ftype in (EntityFieldDataType.FILE, EntityFieldDataType.RELATIONSHIP): + return {"lengthLimit": d["UNIQUEIDENTIFIER_LENGTH_LIMIT"]} + if ftype is EntityFieldDataType.CHOICE_SET_MULTIPLE: + return {"lengthLimit": d["CHOICE_SET_MULTIPLE_LENGTH_LIMIT"]} + # UUID, DATETIME, CHOICE_SET_SINGLE, AUTO_NUMBER — no constraints + return {} + + @staticmethod + def _validate_name(name: str, context: str) -> None: + r"""Validate an entity or field name against the UI's create-form rules. + + Entity names must be 1-30 characters; field names must be 3-100 + characters. Both must match ``^[a-zA-Z][a-zA-Z0-9]*$`` — start with a + letter, then letters or digits only (underscores are not permitted, to + stay consistent with the UI's entity / field creation forms). + + Field names additionally cannot collide with the system-reserved field + names in :data:`RESERVED_FIELD_NAMES`; the reserved-name check runs + first so that short reserved names produce a more informative error. + """ + if context == "field": + if name in RESERVED_FIELD_NAMES: + reserved = ", ".join(sorted(RESERVED_FIELD_NAMES)) + raise ValueError( + f"Field name {name!r} is reserved. Reserved names: {reserved}." + ) + min_len, max_len = _FIELD_NAME_MIN_LENGTH, _FIELD_NAME_MAX_LENGTH + else: + min_len, max_len = _ENTITY_NAME_MIN_LENGTH, _ENTITY_NAME_MAX_LENGTH + + if not (min_len <= len(name) <= max_len) or not _NAME_RE.match(name): + raise ValueError( + f"Invalid {context} name {name!r}. Must start with a letter, " + f"contain only letters and digits, and be {min_len}-{max_len} " + "characters." + ) + + @staticmethod + def _validate_field_constraints( + ftype: EntityFieldDataType, field: EntityCreateFieldOptions + ) -> None: + """Validate caller-supplied per-field constraints. + + Rejects constraints that ``ftype`` does not accept (e.g. + ``decimal_precision`` on ``STRING``), values outside the inclusive + range declared in :data:`ENTITY_FIELD_CONSTRAINT_SPEC`, and + ``min_value`` greater than or equal to ``max_value`` when both are + supplied. + """ + spec = ENTITY_FIELD_CONSTRAINT_SPEC.get(ftype, {}) + provided: Dict[str, Any] = {} + for attr in ("length_limit", "max_value", "min_value", "decimal_precision"): + value = getattr(field, attr) + if value is not None: + provided[attr] = value + + unsupported = [name for name in provided if name not in spec] + if unsupported: + allowed = ", ".join(sorted(spec.keys())) or "none" + raise ValueError( + f"Field {field.field_name!r} of type {ftype.value} does not accept " + f"{', '.join(sorted(unsupported))}. Allowed constraints: {allowed}." + ) + + for name, value in provided.items(): + low, high = spec[name] + if not (low <= value <= high): + raise ValueError( + f"Field {field.field_name!r} of type {ftype.value}: " + f"{name}={value} is out of range [{low}, {high}]." + ) + + if ( + field.min_value is not None + and field.max_value is not None + and field.min_value >= field.max_value + ): + raise ValueError( + f"Field {field.field_name!r}: min_value ({field.min_value}) must be " + f"strictly less than max_value ({field.max_value})." + ) + + @staticmethod + def _extract_entity_id(response: Response) -> str: + """Return the new entity id from a create-entity response. + + Accepts both a bare JSON string id and a JSON object containing + ``id`` or ``entityId``. + """ + try: + body = response.json() + except Exception: + return response.text.strip().strip('"') + if isinstance(body, str): + return body + if isinstance(body, dict): + for key in ("id", "Id", "entityId", "EntityId"): + value = body.get(key) + if isinstance(value, str): + return value + return response.text.strip().strip('"') diff --git a/packages/uipath-platform/src/uipath/platform/entities/entities.py b/packages/uipath-platform/src/uipath/platform/entities/entities.py index 48c8dce07..c20d95b74 100644 --- a/packages/uipath-platform/src/uipath/platform/entities/entities.py +++ b/packages/uipath-platform/src/uipath/platform/entities/entities.py @@ -2,18 +2,20 @@ from __future__ import annotations -from enum import Enum +from enum import Enum, IntEnum from types import EllipsisType from typing import ( TYPE_CHECKING, Any, Dict, + Iterator, List, Optional, Type, Union, get_args, get_origin, + overload, ) from pydantic import AliasChoices, BaseModel, ConfigDict, Field, create_model @@ -82,8 +84,8 @@ class ExternalConnection(BaseModel): id: str connection_id: str = Field(alias="connectionId") element_instance_id: str = Field(alias="elementInstanceId") - folder_id: str = Field(alias="folderKey") # named folderKey in TS SDK - connector_id: str = Field(alias="connectorKey") # named connectorKey in TS SDK + folder_id: str = Field(alias="folderKey") + connector_id: str = Field(alias="connectorKey") connector_name: str = Field(alias="connectorName") connection_name: str = Field(alias="connectionName") @@ -257,7 +259,7 @@ class EntityRecord(BaseModel): "extra": "allow", } - id: str = Field(alias="Id") # Mandatory field validated by Pydantic + id: str = Field(alias="Id") @classmethod def from_data( @@ -356,6 +358,25 @@ class Entity(BaseModel): id: str +class FailureRecord(BaseModel): + """A record that failed to insert/update/delete in a batch operation. + + Backend error responses for failed records do not always include a valid + ``Id`` field — this model accepts arbitrary shapes so the caller can + inspect ``error`` text and the original ``record`` payload. + """ + + model_config = ConfigDict( + validate_by_name=True, + validate_by_alias=True, + extra="allow", + ) + + id: Optional[str] = Field(default=None, alias="Id") + error: Optional[str] = Field(default=None) + record: Optional[Dict[str, Any]] = Field(default=None) + + class EntityRecordsBatchResponse(BaseModel): """Model representing a batch response of entity records.""" @@ -364,8 +385,360 @@ class EntityRecordsBatchResponse(BaseModel): validate_by_alias=True, ) - success_records: List[EntityRecord] = Field(alias="successRecords") - failure_records: List[EntityRecord] = Field(alias="failureRecords") + success_records: List[EntityRecord] = Field( + default_factory=list, alias="successRecords" + ) + failure_records: List[FailureRecord] = Field( + default_factory=list, alias="failureRecords" + ) + + +class EntityRecordsListResponse(List[EntityRecord]): + """List of EntityRecord with pagination metadata. + + Subclasses ``list`` so existing call sites that iterate, index, or call + ``len()`` continue to work; new fields ``total_count``, ``has_next_page``, + and ``next_cursor`` expose pagination information returned by the backend. + """ + + def __init__( + self, + items: Optional[List[EntityRecord]] = None, + total_count: int = 0, + has_next_page: bool = False, + next_cursor: Optional[str] = None, + ) -> None: + """Construct from a list of records plus pagination metadata.""" + super().__init__(items or []) + self.total_count = total_count + self.has_next_page = has_next_page + self.next_cursor = next_cursor + + +class LogicalOperator(IntEnum): + """Logical operator for combining query filter groups.""" + + And = 0 + Or = 1 + + +class QueryFilterOperator(str, Enum): + """Comparison operators supported by the structured query API.""" + + Equals = "=" + NotEquals = "!=" + GreaterThan = ">" + LessThan = "<" + GreaterThanOrEqual = ">=" + LessThanOrEqual = "<=" + Contains = "contains" + NotContains = "not contains" + StartsWith = "startswith" + EndsWith = "endswith" + In = "in" + NotIn = "not in" + + +class EntityQueryFilter(BaseModel): + """A single filter condition for querying entity records.""" + + model_config = ConfigDict(validate_by_name=True, validate_by_alias=True) + + field_name: str = Field(alias="fieldName") + operator: QueryFilterOperator + value: Optional[str] = None + value_list: Optional[List[str]] = Field(default=None, alias="valueList") + + +class EntityQueryFilterGroup(BaseModel): + """A group of query filters combined with a logical operator.""" + + model_config = ConfigDict(validate_by_name=True, validate_by_alias=True) + + logical_operator: Optional[LogicalOperator] = Field( + default=None, alias="logicalOperator" + ) + continue_logical_operator: Optional[LogicalOperator] = Field( + default=None, alias="continueLogicalOperator" + ) + query_filters: Optional[List[EntityQueryFilter]] = Field( + default=None, alias="queryFilters" + ) + filter_groups: Optional[List["EntityQueryFilterGroup"]] = Field( + default=None, alias="filterGroups" + ) + + +class EntityQuerySortOption(BaseModel): + """Sort option for query results.""" + + model_config = ConfigDict(validate_by_name=True, validate_by_alias=True) + + field_name: str = Field(alias="fieldName") + is_descending: Optional[bool] = Field(default=None, alias="isDescending") + + +class EntityAggregateFunction(str, Enum): + """Aggregate functions supported by the Data Fabric query API.""" + + Count = "COUNT" + Sum = "SUM" + Avg = "AVG" + Min = "MIN" + Max = "MAX" + + +class EntityAggregate(BaseModel): + """A single aggregate expression to apply during a query.""" + + model_config = ConfigDict(validate_by_name=True, validate_by_alias=True) + + function: EntityAggregateFunction + field: str + alias: Optional[str] = None + + +class EntityJoin(BaseModel): + """Multi-entity JOIN definition for cross-entity queries.""" + + model_config = ConfigDict( + validate_by_name=True, validate_by_alias=True, extra="allow" + ) + + entity_name: Optional[str] = Field(default=None, alias="entityName") + join_type: Optional[str] = Field(default=None, alias="joinType") + join_field_name: Optional[str] = Field(default=None, alias="joinFieldName") + related_entity_name: Optional[str] = Field(default=None, alias="relatedEntityName") + related_field_name: Optional[str] = Field(default=None, alias="relatedFieldName") + + +class EntityBinning(BaseModel): + """A binning (GROUP BY/aggregation) clause for V2 query endpoint.""" + + model_config = ConfigDict( + validate_by_name=True, validate_by_alias=True, extra="allow" + ) + + field_name: Optional[str] = Field(default=None, alias="fieldName") + aggregate_function: Optional[EntityAggregateFunction] = Field( + default=None, alias="aggregateFunction" + ) + alias: Optional[str] = None + + +class EntityQueryRecordsResponse(BaseModel): + """Response from querying entity records.""" + + model_config = ConfigDict(validate_by_name=True, validate_by_alias=True) + + items: List[EntityRecord] = Field(default_factory=list) + total_count: int = Field(default=0, alias="totalCount") + has_next_page: bool = Field(default=False, alias="hasNextPage") + next_cursor: Optional[str] = Field(default=None, alias="nextCursor") + + def __iter__(self) -> Iterator[EntityRecord]: # type: ignore[override] + """Iterate over records (delegates to ``self.items``).""" + return iter(self.items) + + def __len__(self) -> int: + """Return the number of records (delegates to ``self.items``).""" + return len(self.items) + + @overload + def __getitem__(self, index: int) -> EntityRecord: ... + + @overload + def __getitem__(self, index: slice) -> List[EntityRecord]: ... + + def __getitem__( + self, index: Union[int, slice] + ) -> Union[EntityRecord, List[EntityRecord]]: + """Index or slice records (delegates to ``self.items``).""" + return self.items[index] + + +class EntityFieldDataType(str, Enum): + """User-facing entity field data type names accepted by ``create_entity``.""" + + UUID = "UUID" + STRING = "STRING" + INTEGER = "INTEGER" + DATETIME = "DATETIME" + DATETIME_WITH_TZ = "DATETIME_WITH_TZ" + DECIMAL = "DECIMAL" + FLOAT = "FLOAT" + DOUBLE = "DOUBLE" + DATE = "DATE" + BOOLEAN = "BOOLEAN" + BIG_INTEGER = "BIG_INTEGER" + MULTILINE_TEXT = "MULTILINE_TEXT" + FILE = "FILE" + CHOICE_SET_SINGLE = "CHOICE_SET_SINGLE" + CHOICE_SET_MULTIPLE = "CHOICE_SET_MULTIPLE" + AUTO_NUMBER = "AUTO_NUMBER" + RELATIONSHIP = "RELATIONSHIP" + + +# Maps the user-facing EntityFieldDataType to the ``(sqlType.name, fieldDisplayType)`` +# tuple expected by the backend when creating an entity. ``sqlType.name`` is +# the raw SQL Server type the backend persists; ``fieldDisplayType`` controls +# how the field renders in the UI. +ENTITY_SCHEMA_FIELD_TYPE_MAP: Dict[EntityFieldDataType, "tuple[str, str]"] = { + EntityFieldDataType.UUID: ("UNIQUEIDENTIFIER", "Basic"), + EntityFieldDataType.STRING: ("NVARCHAR", "Basic"), + EntityFieldDataType.INTEGER: ("INT", "Basic"), + EntityFieldDataType.DATETIME: ("DATETIME2", "Basic"), + EntityFieldDataType.DATETIME_WITH_TZ: ("DATETIMEOFFSET", "Basic"), + EntityFieldDataType.DECIMAL: ("DECIMAL", "Basic"), + EntityFieldDataType.FLOAT: ("FLOAT", "Basic"), + EntityFieldDataType.DOUBLE: ("REAL", "Basic"), + EntityFieldDataType.DATE: ("DATE", "Basic"), + EntityFieldDataType.BOOLEAN: ("BIT", "Basic"), + EntityFieldDataType.BIG_INTEGER: ("BIGINT", "Basic"), + EntityFieldDataType.MULTILINE_TEXT: ("MULTILINE", "Basic"), + EntityFieldDataType.FILE: ("UNIQUEIDENTIFIER", "File"), + EntityFieldDataType.CHOICE_SET_SINGLE: ("INT", "ChoiceSetSingle"), + EntityFieldDataType.CHOICE_SET_MULTIPLE: ("NVARCHAR", "ChoiceSetMultiple"), + EntityFieldDataType.AUTO_NUMBER: ("DECIMAL", "AutoNumber"), + EntityFieldDataType.RELATIONSHIP: ("UNIQUEIDENTIFIER", "Relationship"), +} + +# Default and fixed sqlType constraint values applied when the caller does +# not supply them. The backend requires these on field creation — without +# them the field is stored in an incomplete state and the UI later fails +# with "Field type cannot be changed" when editing advanced options. +ENTITY_FIELD_CONSTRAINT_DEFAULTS: Dict[str, int] = { + "STRING_LENGTH_LIMIT": 200, + "MULTILINE_TEXT_LENGTH_LIMIT": 200, + "DECIMAL_LENGTH_LIMIT": 1000, + "DECIMAL_PRECISION": 2, + "BOOLEAN_LENGTH_LIMIT": 100, + "DATE_LENGTH_LIMIT": 1000, + "UNIQUEIDENTIFIER_LENGTH_LIMIT": 300, + "CHOICE_SET_MULTIPLE_LENGTH_LIMIT": 4000, + "NUMERIC_MAX_VALUE": 1_000_000_000_000, + "NUMERIC_MIN_VALUE": -1_000_000_000_000, +} + +# Per-field-type spec describing which user-supplied constraints are valid +# and their inclusive ranges. Field types absent from this map (BOOLEAN, +# DATE, DATETIME, DATETIME_WITH_TZ, FILE, RELATIONSHIP, UUID, CHOICE_SET_*, +# AUTO_NUMBER) accept no user-supplied constraints — passing one raises +# ``ValueError`` so the caller gets a clear local error before any HTTP call. +_MAX_SAFE_INTEGER = 9_007_199_254_740_991 + +ENTITY_FIELD_CONSTRAINT_SPEC: Dict[ + EntityFieldDataType, Dict[str, "tuple[float, float]"] +] = { + EntityFieldDataType.STRING: { + "length_limit": (1, 4000), + }, + EntityFieldDataType.MULTILINE_TEXT: { + "length_limit": (1, 10000), + }, + EntityFieldDataType.INTEGER: { + "max_value": (-_MAX_SAFE_INTEGER, _MAX_SAFE_INTEGER), + "min_value": (-_MAX_SAFE_INTEGER, _MAX_SAFE_INTEGER), + }, + EntityFieldDataType.BIG_INTEGER: { + "max_value": (-_MAX_SAFE_INTEGER, _MAX_SAFE_INTEGER), + "min_value": (-_MAX_SAFE_INTEGER, _MAX_SAFE_INTEGER), + }, + EntityFieldDataType.DECIMAL: { + "max_value": (-_MAX_SAFE_INTEGER, _MAX_SAFE_INTEGER), + "min_value": (-_MAX_SAFE_INTEGER, _MAX_SAFE_INTEGER), + "decimal_precision": (0, 10), + }, + EntityFieldDataType.FLOAT: { + "max_value": (-_MAX_SAFE_INTEGER, _MAX_SAFE_INTEGER), + "min_value": (-_MAX_SAFE_INTEGER, _MAX_SAFE_INTEGER), + "decimal_precision": (0, 10), + }, + EntityFieldDataType.DOUBLE: { + "max_value": (-_MAX_SAFE_INTEGER, _MAX_SAFE_INTEGER), + "min_value": (-_MAX_SAFE_INTEGER, _MAX_SAFE_INTEGER), + "decimal_precision": (0, 10), + }, +} + +RESERVED_FIELD_NAMES = frozenset( + ["Id", "CreatedBy", "CreateTime", "UpdatedBy", "UpdateTime"] +) +"""Field names reserved by the backend — using one as a user field name is rejected.""" + + +class EntityCreateFieldOptions(BaseModel): + """User-facing field definition for creating or updating entity schemas.""" + + model_config = ConfigDict( + validate_by_name=True, validate_by_alias=True, extra="allow" + ) + + field_name: str = Field(alias="fieldName") + type: Optional[EntityFieldDataType] = Field( + default=EntityFieldDataType.STRING, alias="type" + ) + display_name: Optional[str] = Field(default=None, alias="displayName") + description: Optional[str] = None + is_required: Optional[bool] = Field(default=None, alias="isRequired") + is_unique: Optional[bool] = Field(default=None, alias="isUnique") + is_rbac_enabled: Optional[bool] = Field(default=None, alias="isRbacEnabled") + is_encrypted: Optional[bool] = Field(default=None, alias="isEncrypted") + default_value: Optional[str] = Field(default=None, alias="defaultValue") + length_limit: Optional[int] = Field(default=None, alias="lengthLimit") + max_value: Optional[float] = Field(default=None, alias="maxValue") + min_value: Optional[float] = Field(default=None, alias="minValue") + decimal_precision: Optional[int] = Field(default=None, alias="decimalPrecision") + choice_set_id: Optional[str] = Field(default=None, alias="choiceSetId") + reference_entity_name: Optional[str] = Field( + default=None, alias="referenceEntityName" + ) + reference_field_name: Optional[str] = Field( + default=None, alias="referenceFieldName" + ) + + +class EntityCreateOptions(BaseModel): + """Options for creating a new Data Fabric entity.""" + + model_config = ConfigDict( + validate_by_name=True, validate_by_alias=True, extra="allow" + ) + + display_name: Optional[str] = Field(default=None, alias="displayName") + description: Optional[str] = None + folder_key: Optional[str] = Field(default=None, alias="folderKey") + is_rbac_enabled: Optional[bool] = Field(default=None, alias="isRbacEnabled") + is_analytics_enabled: Optional[bool] = Field( + default=None, alias="isAnalyticsEnabled" + ) + external_fields: Optional[List[Dict[str, Any]]] = Field( + default=None, alias="externalFields" + ) + + +class EntityMetadataUpdateOptions(BaseModel): + """Options for updating an entity's metadata via PATCH /metadata.""" + + model_config = ConfigDict( + validate_by_name=True, validate_by_alias=True, extra="allow" + ) + + display_name: Optional[str] = Field(default=None, alias="displayName") + description: Optional[str] = None + is_rbac_enabled: Optional[bool] = Field(default=None, alias="isRbacEnabled") + + +class EntityImportRecordsResponse(BaseModel): + """Response from a bulk import operation.""" + + model_config = ConfigDict( + validate_by_name=True, validate_by_alias=True, extra="allow" + ) + + total_records: int = Field(default=0, alias="totalRecords") + inserted_records: int = Field(default=0, alias="insertedRecords") + error_file_link: Optional[str] = Field(default=None, alias="errorFileLink") class EntityRouting(BaseModel): @@ -412,3 +785,4 @@ class EntitySetResolution(BaseModel): Entity.model_rebuild() +EntityQueryFilterGroup.model_rebuild() diff --git a/packages/uipath-platform/tests/services/test_entities_service.py b/packages/uipath-platform/tests/services/test_entities_service.py index 29ce6fb79..2938ae01c 100644 --- a/packages/uipath-platform/tests/services/test_entities_service.py +++ b/packages/uipath-platform/tests/services/test_entities_service.py @@ -15,6 +15,7 @@ ) from uipath.platform.entities import ChoiceSetValue, DataFabricEntityItem, Entity from uipath.platform.entities._entities_service import EntitiesService +from uipath.platform.entities._entity_data_service import EntityDataService @pytest.fixture @@ -325,7 +326,7 @@ def test_retrieve_records_without_start_and_limit( def test_validate_sql_query_allows_supported_select_queries( self, sql_query: str, service: EntitiesService ) -> None: - service._validate_sql_query(sql_query) + service._data._validate_sql_query(sql_query) @pytest.mark.parametrize( "sql_query,error_message", @@ -415,20 +416,20 @@ def test_validate_sql_query_rejects_disallowed_queries( self, sql_query: str, error_message: str, service: EntitiesService ) -> None: with pytest.raises(ValueError, match=re.escape(error_message)): - service._validate_sql_query(sql_query) + service._data._validate_sql_query(sql_query) def test_query_entity_records_rejects_invalid_sql_before_network_call( self, service: EntitiesService, ) -> None: - service.request = MagicMock() # type: ignore[method-assign] + service._data.request = MagicMock() # type: ignore[method-assign] with pytest.raises( ValueError, match=re.escape("Only SELECT statements are allowed.") ): service.query_entity_records("UPDATE Customers SET name = 'X'") - service.request.assert_not_called() + service._data.request.assert_not_called() def test_query_entity_records_calls_request_for_valid_sql( self, @@ -437,26 +438,26 @@ def test_query_entity_records_calls_request_for_valid_sql( response = MagicMock() response.json.return_value = {"results": [{"id": 1}, {"id": 2}]} - service.request = MagicMock(return_value=response) # type: ignore[method-assign] + service._data.request = MagicMock(return_value=response) # type: ignore[method-assign] result = service.query_entity_records("SELECT id FROM Customers WHERE id > 0") assert result == [{"id": 1}, {"id": 2}] - service.request.assert_called_once() + service._data.request.assert_called_once() @pytest.mark.anyio async def test_query_entity_records_async_rejects_invalid_sql_before_network_call( self, service: EntitiesService, ) -> None: - service.request_async = AsyncMock() # type: ignore[method-assign] + service._data.request_async = AsyncMock() # type: ignore[method-assign] with pytest.raises(ValueError, match=re.escape("Subqueries are not allowed.")): await service.query_entity_records_async( "SELECT id FROM Customers WHERE id IN (SELECT id FROM Orders)" ) - service.request_async.assert_not_called() + service._data.request_async.assert_not_called() @pytest.mark.anyio async def test_query_entity_records_async_calls_request_for_valid_sql( @@ -466,14 +467,14 @@ async def test_query_entity_records_async_calls_request_for_valid_sql( response = MagicMock() response.json.return_value = {"results": [{"id": "c1"}]} - service.request_async = AsyncMock(return_value=response) # type: ignore[method-assign] + service._data.request_async = AsyncMock(return_value=response) # type: ignore[method-assign] result = await service.query_entity_records_async( "SELECT id FROM Customers WHERE id = 'c1'" ) assert result == [{"id": "c1"}] - service.request_async.assert_called_once() + service._data.request_async.assert_called_once() def test_query_entity_records_builds_routing_context_from_folders_map( self, @@ -487,12 +488,12 @@ def test_query_entity_records_builds_routing_context_from_folders_map( ) response = MagicMock() response.json.return_value = {"results": [{"id": 1}]} - service.request = MagicMock(return_value=response) # type: ignore[method-assign] + service._data.request = MagicMock(return_value=response) # type: ignore[method-assign] result = service.query_entity_records("SELECT id FROM Customers LIMIT 10") assert result == [{"id": 1}] - call_kwargs = service.request.call_args + call_kwargs = service._data.request.call_args body = call_kwargs.kwargs.get("json") or call_kwargs[1].get("json") assert body["query"] == "SELECT id FROM Customers LIMIT 10" assert body["routingContext"] == { @@ -515,14 +516,14 @@ async def test_query_entity_records_async_builds_routing_context_from_folders_ma ) response = MagicMock() response.json.return_value = {"results": [{"id": "c1"}]} - service.request_async = AsyncMock(return_value=response) # type: ignore[method-assign] + service._data.request_async = AsyncMock(return_value=response) # type: ignore[method-assign] result = await service.query_entity_records_async( "SELECT id FROM Customers WHERE id = 'c1'" ) assert result == [{"id": "c1"}] - call_kwargs = service.request_async.call_args + call_kwargs = service._data.request_async.call_args body = call_kwargs.kwargs.get("json") or call_kwargs[1].get("json") assert body["routingContext"] == { "entityRoutings": [ @@ -536,11 +537,11 @@ def test_query_entity_records_without_routing_context_omits_key( ) -> None: response = MagicMock() response.json.return_value = {"results": []} - service.request = MagicMock(return_value=response) # type: ignore[method-assign] + service._data.request = MagicMock(return_value=response) # type: ignore[method-assign] service.query_entity_records("SELECT id FROM Customers WHERE id > 0") - call_kwargs = service.request.call_args + call_kwargs = service._data.request.call_args body = call_kwargs.kwargs.get("json") or call_kwargs[1].get("json") assert "routingContext" not in body @@ -560,7 +561,7 @@ def test_query_entity_records_picks_up_entity_overwrites_from_context( ) response = MagicMock() response.json.return_value = {"results": [{"id": 1}]} - service.request = MagicMock(return_value=response) # type: ignore[method-assign] + service._data.request = MagicMock(return_value=response) # type: ignore[method-assign] overwrite = EntityResourceOverwrite( resource_type="entity", @@ -573,7 +574,7 @@ def test_query_entity_records_picks_up_entity_overwrites_from_context( finally: _resource_overwrites.reset(token) - call_kwargs = service.request.call_args + call_kwargs = service._data.request.call_args body = call_kwargs.kwargs.get("json") or call_kwargs[1].get("json") assert body["routingContext"] == { "entityRoutings": [ @@ -601,11 +602,11 @@ def test_query_entity_records_merges_folders_map_with_entity_name_overrides( ) response = MagicMock() response.json.return_value = {"results": []} - service.request = MagicMock(return_value=response) # type: ignore[method-assign] + service._data.request = MagicMock(return_value=response) # type: ignore[method-assign] service.query_entity_records("SELECT id FROM Customers LIMIT 10") - call_kwargs = service.request.call_args + call_kwargs = service._data.request.call_args body = call_kwargs.kwargs.get("json") or call_kwargs[1].get("json") routings = body["routingContext"]["entityRoutings"] assert { @@ -740,7 +741,7 @@ def test_query_entity_records_context_overwrite_same_name_no_override_field( ) response = MagicMock() response.json.return_value = {"results": []} - service.request = MagicMock(return_value=response) # type: ignore[method-assign] + service._data.request = MagicMock(return_value=response) # type: ignore[method-assign] overwrite = EntityResourceOverwrite( resource_type="entity", @@ -753,7 +754,7 @@ def test_query_entity_records_context_overwrite_same_name_no_override_field( finally: _resource_overwrites.reset(token) - call_kwargs = service.request.call_args + call_kwargs = service._data.request.call_args body = call_kwargs.kwargs.get("json") or call_kwargs[1].get("json") assert body["routingContext"] == { "entityRoutings": [ @@ -784,7 +785,7 @@ def test_query_entity_records_resolves_overwrite_folder_path_to_folder_key( ) response = MagicMock() response.json.return_value = {"results": []} - service.request = MagicMock(return_value=response) # type: ignore[method-assign] + service._data.request = MagicMock(return_value=response) # type: ignore[method-assign] overwrite = EntityResourceOverwrite( resource_type="entity", @@ -797,7 +798,7 @@ def test_query_entity_records_resolves_overwrite_folder_path_to_folder_key( finally: _resource_overwrites.reset(token) - call_kwargs = service.request.call_args + call_kwargs = service._data.request.call_args body = call_kwargs.kwargs.get("json") or call_kwargs[1].get("json") assert body["routingContext"] == { "entityRoutings": [ @@ -829,7 +830,7 @@ def test_query_entity_records_uses_folder_id_directly_without_resolution( ) response = MagicMock() response.json.return_value = {"results": []} - service.request = MagicMock(return_value=response) # type: ignore[method-assign] + service._data.request = MagicMock(return_value=response) # type: ignore[method-assign] overwrite = EntityResourceOverwrite( resource_type="entity", @@ -845,7 +846,7 @@ def test_query_entity_records_uses_folder_id_directly_without_resolution( # folder_id is a key — should NOT be sent through FolderService folders_service.retrieve_key.assert_not_called() - call_kwargs = service.request.call_args + call_kwargs = service._data.request.call_args body = call_kwargs.kwargs.get("json") or call_kwargs[1].get("json") assert body["routingContext"] == { "entityRoutings": [ @@ -1096,3 +1097,1102 @@ def test_get_choiceset_values_empty( values = service.get_choiceset_values(choiceset_id) assert values == [] + + +class TestEntitiesServiceNewMethods: + """Single-record, structured-query, attachment, schema and bulk-import tests.""" + + def test_insert_record_fires_post_with_expansion_level( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + from uipath.platform.entities import EntityRecord + + entity_key = uuid.uuid4() + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_key}/insert?expansionLevel=2", + status_code=200, + json={"Id": "rec-1", "name": "alice"}, + ) + + record = service.insert_record( + entity_key=str(entity_key), + data={"name": "alice"}, + expansion_level=2, + ) + + assert isinstance(record, EntityRecord) + assert record.id == "rec-1" + + sent = httpx_mock.get_request() + assert sent is not None + assert sent.method == "POST" + assert json.loads(sent.content) == {"name": "alice"} + + async def test_insert_record_async( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + entity_key = uuid.uuid4() + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_key}/insert", + status_code=200, + json={"Id": "rec-1"}, + ) + + record = await service.insert_record_async( + entity_key=str(entity_key), data={"name": "bob"} + ) + assert record.id == "rec-1" + + def test_get_record( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + entity_key = uuid.uuid4() + record_id = "12345" + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_key}/read/{record_id}?expansionLevel=1", + status_code=200, + json={"Id": record_id, "name": "found"}, + ) + + record = service.get_record( + entity_key=str(entity_key), record_id=record_id, expansion_level=1 + ) + + assert record.id == record_id + + def test_update_record_accepts_dict( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + entity_key = uuid.uuid4() + record_id = "rec-9" + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_key}/update/{record_id}", + status_code=200, + json={"Id": record_id, "name": "updated"}, + ) + + record = service.update_record( + entity_key=str(entity_key), + record_id=record_id, + data={"name": "updated"}, + ) + + assert record.id == record_id + sent = httpx_mock.get_request() + assert sent is not None + assert json.loads(sent.content) == {"name": "updated"} + + def test_delete_record_uses_http_delete( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + entity_key = uuid.uuid4() + record_id = "rec-9" + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_key}/delete/{record_id}", + method="DELETE", + status_code=200, + ) + + service.delete_record(entity_key=str(entity_key), record_id=record_id) + + sent = httpx_mock.get_request() + assert sent is not None + assert sent.method == "DELETE" + + def test_query_v1_with_filter_and_pagination( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + from uipath.platform.entities import ( + EntityQueryFilter, + EntityQueryFilterGroup, + EntityQuerySortOption, + LogicalOperator, + QueryFilterOperator, + ) + + entity_key = uuid.uuid4() + httpx_mock.add_response( + url=re.compile( + rf"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_key}/query.*" + ), + status_code=200, + json={ + "value": [{"Id": "1", "name": "alice"}, {"Id": "2", "name": "bob"}], + "totalRecordCount": 5, + }, + ) + + result = service.query( + entity_key=str(entity_key), + filter_group=EntityQueryFilterGroup( + logical_operator=LogicalOperator.And, + query_filters=[ + EntityQueryFilter( + field_name="status", + operator=QueryFilterOperator.Equals, + value="active", + ) + ], + ), + sort_options=[EntityQuerySortOption(field_name="name", is_descending=True)], + selected_fields=["Id", "name"], + start=0, + limit=2, + expansion_level=1, + ) + + assert result.total_count == 5 + assert len(result.items) == 2 + assert result.has_next_page is True + # Backend doesn't return next_cursor on this endpoint — caller paginates + # by passing the next ``start`` themselves. + assert result.next_cursor is None + + sent = httpx_mock.get_request() + assert sent is not None + assert "/query" in str(sent.url) and "/v2/" not in str(sent.url) + # expansionLevel is a URL query param, not body + assert sent.url.params.get("expansionLevel") == "1" + body = json.loads(sent.content) + assert body["filterGroup"]["logicalOperator"] == 0 # And + assert body["filterGroup"]["queryFilters"][0]["fieldName"] == "status" + assert body["sortOptions"][0]["fieldName"] == "name" + assert body["selectedFields"] == ["Id", "name"] + # start/limit go in BODY, not as $top/$skip query params + assert body["start"] == 0 + assert body["limit"] == 2 + + def test_query_aggregate_response_handles_id_less_rows( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + """Aggregate / GROUP BY rows lack ``Id`` — must not raise.""" + from uipath.platform.entities import ( + EntityAggregate, + EntityAggregateFunction, + ) + + entity_key = uuid.uuid4() + httpx_mock.add_response( + url=re.compile( + rf"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_key}/query.*" + ), + status_code=200, + json={ + "value": [ + {"status": "active", "total": 12}, + {"status": "inactive", "total": 7}, + ], + "totalRecordCount": 2, + }, + ) + + result = service.query( + entity_key=str(entity_key), + selected_fields=["status"], + group_by=["status"], + aggregates=[ + EntityAggregate( + function=EntityAggregateFunction.Count, + field="Id", + alias="total", + ) + ], + ) + + assert result.total_count == 2 + assert len(result.items) == 2 + # Aggregate rows are exposed as EntityRecord with extra fields, no Id. + sent = httpx_mock.get_request() + assert sent is not None + body = json.loads(sent.content) + assert body["aggregates"][0]["function"] == "COUNT" + assert body["aggregates"][0]["alias"] == "total" + assert body["groupBy"] == ["status"] + + def test_query_v2_when_binnings_provided( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + from uipath.platform.entities import EntityAggregateFunction, EntityBinning + + entity_key = uuid.uuid4() + httpx_mock.add_response( + url=re.compile( + rf"{base_url}{org}{tenant}/datafabric_/api/v2/EntityService/entity/{entity_key}/query.*" + ), + status_code=200, + json={"value": [], "totalCount": 0}, + ) + + service.query( + entity_key=str(entity_key), + binnings=[ + EntityBinning( + field_name="status", + aggregate_function=EntityAggregateFunction.Count, + alias="total", + ) + ], + ) + + sent = httpx_mock.get_request() + assert sent is not None + assert "/v2/EntityService/" in str(sent.url) + + def test_upload_attachment_sends_multipart( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + entity_id = "ent-1" + record_id = "rec-1" + field_name = "doc" + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/Attachment/entity/{entity_id}/{record_id}/{field_name}?expansionLevel=1", + method="POST", + status_code=200, + json={"Id": record_id, "doc": "uploaded"}, + ) + + result = service.upload_attachment( + entity_id=entity_id, + record_id=record_id, + field_name=field_name, + file=b"hello world", + expansion_level=1, + ) + + assert result.get("doc") == "uploaded" + + sent = httpx_mock.get_request() + assert sent is not None + assert b"hello world" in sent.content + + def test_download_attachment_returns_bytes( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + entity_id = "ent-1" + record_id = "rec-1" + field_name = "doc" + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/Attachment/entity/{entity_id}/{record_id}/{field_name}", + method="GET", + status_code=200, + content=b"file-content", + ) + + content = service.download_attachment( + entity_id=entity_id, record_id=record_id, field_name=field_name + ) + assert content == b"file-content" + + def test_delete_attachment( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + entity_id = "ent-1" + record_id = "rec-1" + field_name = "doc" + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/Attachment/entity/{entity_id}/{record_id}/{field_name}", + method="DELETE", + status_code=200, + json={}, + ) + + result = service.delete_attachment( + entity_id=entity_id, record_id=record_id, field_name=field_name + ) + assert result == {} + + def test_create_entity_returns_id( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + from uipath.platform.entities import ( + EntityCreateFieldOptions, + EntityCreateOptions, + EntityFieldDataType, + ) + + new_entity_id = str(uuid.uuid4()) + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/Entity", + method="POST", + status_code=200, + json=new_entity_id, + ) + + created_id = service.create_entity( + name="productCatalog", + fields=[ + EntityCreateFieldOptions( + field_name="productName", + type=EntityFieldDataType.STRING, + is_required=True, + length_limit=200, + ), + ], + options=EntityCreateOptions( + display_name="Product Catalog", + description="Catalog of products", + is_rbac_enabled=True, + ), + ) + + assert created_id == new_entity_id + sent = httpx_mock.get_request() + assert sent is not None + body = json.loads(sent.content) + assert body["displayName"] == "Product Catalog" + assert body["entityDefinition"]["name"] == "productCatalog" + assert body["entityDefinition"]["fields"][0]["name"] == "productName" + assert body["entityDefinition"]["isRbacEnabled"] is True + + def test_delete_entity( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + entity_id = "ent-doomed" + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/Entity/{entity_id}", + method="DELETE", + status_code=200, + ) + + service.delete_entity(entity_id=entity_id) + sent = httpx_mock.get_request() + assert sent is not None + assert sent.method == "DELETE" + + def test_update_entity_metadata( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + from uipath.platform.entities import EntityMetadataUpdateOptions + + entity_id = "ent-meta" + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/Entity/{entity_id}/metadata", + method="PATCH", + status_code=200, + json={}, + ) + + service.update_entity_metadata( + entity_id=entity_id, + metadata=EntityMetadataUpdateOptions( + display_name="New Name", is_rbac_enabled=False + ), + ) + + sent = httpx_mock.get_request() + assert sent is not None + body = json.loads(sent.content) + assert body == {"displayName": "New Name", "isRbacEnabled": False} + + def test_import_records( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + entity_id = "ent-imp" + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_id}/bulk-upload", + method="POST", + status_code=200, + json={ + "totalRecords": 10, + "insertedRecords": 9, + "errorFileLink": "https://example.com/errors.csv", + }, + ) + + result = service.import_records(entity_id=entity_id, file=b"a,b,c\n1,2,3\n") + assert result.total_records == 10 + assert result.inserted_records == 9 + assert result.error_file_link == "https://example.com/errors.csv" + + def test_list_records_returns_paginated_metadata( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + entity_key = uuid.uuid4() + httpx_mock.add_response( + url=re.compile( + rf"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_key}/read.*" + ), + status_code=200, + json={ + "totalCount": 7, + "value": [{"Id": "1"}, {"Id": "2"}, {"Id": "3"}], + }, + ) + + records = service.list_records( + entity_key=str(entity_key), + start=0, + limit=3, + expansion_level=2, + filter="status eq 'active'", + orderby="name asc", + select=["Id", "name"], + expand=["Company"], + ) + + # New pagination metadata: backend totalCount surfaced verbatim. + assert records.total_count == 7 + assert records.has_next_page is True + # Backend does not currently emit next_cursor; caller paginates with start. + assert records.next_cursor is None + + # Backward-compat: behaves as a list. + assert isinstance(records, list) + assert len(records) == 3 + assert records[0].id == "1" + + sent = httpx_mock.get_request() + assert sent is not None + params = sent.url.params + assert params.get("expansionLevel") == "2" + assert params.get("$filter") == "status eq 'active'" + assert params.get("$orderby") == "name asc" + assert params.get("$select") == "Id,name" + assert params.get("$expand") == "Company" + + def test_insert_records_passes_expansion_level_and_fail_on_first( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + entity_key = uuid.uuid4() + httpx_mock.add_response( + url=re.compile( + rf"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_key}/insert-batch.*" + ), + status_code=200, + json={"successRecords": [{"Id": "1"}], "failureRecords": []}, + ) + + service.insert_records( + entity_key=str(entity_key), + records=[{"name": "alice"}], + expansion_level=1, + fail_on_first=True, + ) + + sent = httpx_mock.get_request() + assert sent is not None + params = sent.url.params + assert params.get("expansionLevel") == "1" + assert params.get("failOnFirst") == "true" + # Records are normalized to dicts before being sent. + assert json.loads(sent.content) == [{"name": "alice"}] + + def test_update_records_recovers_failure_records_from_4xx( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + """A 400 response that lists per-record failures should parse into the response. + + The caller receives an ``EntityRecordsBatchResponse`` with the failed + records populated rather than an exception, so unknown record ids on + update can be handled the same way as any other batch failure. + """ + entity_key = uuid.uuid4() + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_key}/update-batch", + method="POST", + status_code=400, + json={ + "successRecords": [], + "failureRecords": [ + {"error": "Record not found", "record": {"Id": "missing"}} + ], + }, + ) + + result = service.update_records( + entity_key=str(entity_key), + records=[{"Id": "missing", "name": "x"}], + ) + + assert len(result.failure_records) == 1 + assert result.failure_records[0].error == "Record not found" + + def test_delete_records_recovers_failure_records_from_4xx( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + entity_key = uuid.uuid4() + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_key}/delete-batch", + method="POST", + status_code=400, + json={ + "successRecords": [], + "failureRecords": [{"error": "not found"}], + }, + ) + + result = service.delete_records( + entity_key=str(entity_key), record_ids=["missing"] + ) + + assert result.failure_records[0].error == "not found" + + def test_record_to_dict_accepts_dict_pydantic_and_object(self) -> None: + from uipath.platform.entities import EntityCreateFieldOptions + + # dict + assert EntityDataService._record_to_dict({"a": 1}) == {"a": 1} + # Pydantic model — uses model_dump + result = EntityDataService._record_to_dict( + EntityCreateFieldOptions(field_name="x") + ) + assert result["fieldName"] == "x" + # Object with __dict__ + from dataclasses import dataclass + + @dataclass + class Rec: + name: str + + assert EntityDataService._record_to_dict(Rec(name="bob")) == {"name": "bob"} + + +class TestEntitiesServiceCreateEntitySqlTypeMapping: + """Verify ``create_entity`` produces the SQL types and constraint defaults the backend expects.""" + + def _captured_field( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + field_options, + ): + from uipath.platform.entities import EntityCreateOptions + + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/Entity", + method="POST", + status_code=200, + json="00000000-0000-0000-0000-000000000001", + ) + service.create_entity( + name="myEntity", + fields=[field_options], + options=EntityCreateOptions(display_name="My Entity"), + ) + sent = httpx_mock.get_request() + assert sent is not None + body = json.loads(sent.content) + return body["entityDefinition"]["fields"][0] + + def test_string_field_maps_to_nvarchar_with_default_length( + self, httpx_mock, service, base_url, org, tenant, version + ) -> None: + from uipath.platform.entities import ( + EntityCreateFieldOptions, + EntityFieldDataType, + ) + + f = self._captured_field( + httpx_mock, + service, + base_url, + org, + tenant, + EntityCreateFieldOptions( + field_name="productName", type=EntityFieldDataType.STRING + ), + ) + assert f["sqlType"]["name"] == "NVARCHAR" + assert f["sqlType"]["lengthLimit"] == 200 # default + assert f["fieldDisplayType"] == "Basic" + + def test_decimal_field_includes_precision_and_value_bounds( + self, httpx_mock, service, base_url, org, tenant, version + ) -> None: + from uipath.platform.entities import ( + EntityCreateFieldOptions, + EntityFieldDataType, + ) + + f = self._captured_field( + httpx_mock, + service, + base_url, + org, + tenant, + EntityCreateFieldOptions( + field_name="price", + type=EntityFieldDataType.DECIMAL, + decimal_precision=4, + ), + ) + assert f["sqlType"]["name"] == "DECIMAL" + assert f["sqlType"]["decimalPrecision"] == 4 + assert f["sqlType"]["lengthLimit"] == 1000 + assert f["sqlType"]["maxValue"] == 1_000_000_000_000 + assert f["sqlType"]["minValue"] == -1_000_000_000_000 + + def test_boolean_field_maps_to_bit( + self, httpx_mock, service, base_url, org, tenant, version + ) -> None: + from uipath.platform.entities import ( + EntityCreateFieldOptions, + EntityFieldDataType, + ) + + f = self._captured_field( + httpx_mock, + service, + base_url, + org, + tenant, + EntityCreateFieldOptions( + field_name="isActive", type=EntityFieldDataType.BOOLEAN + ), + ) + assert f["sqlType"]["name"] == "BIT" + assert f["sqlType"]["lengthLimit"] == 100 + + def test_file_field_maps_to_uniqueidentifier_with_file_display_type( + self, httpx_mock, service, base_url, org, tenant, version + ) -> None: + from uipath.platform.entities import ( + EntityCreateFieldOptions, + EntityFieldDataType, + ) + + f = self._captured_field( + httpx_mock, + service, + base_url, + org, + tenant, + EntityCreateFieldOptions( + field_name="document", type=EntityFieldDataType.FILE + ), + ) + assert f["sqlType"]["name"] == "UNIQUEIDENTIFIER" + assert f["fieldDisplayType"] == "File" + assert f["sqlType"]["lengthLimit"] == 300 + + +class TestEntitiesServiceValidation: + """Client-side validation rejects bad entity / field definitions before any HTTP call.""" + + def test_create_entity_rejects_invalid_entity_name(self, service) -> None: + + with pytest.raises(ValueError, match="Invalid entity name"): + service.create_entity(name="1bad", fields=[]) + + def test_create_entity_rejects_invalid_field_name(self, service) -> None: + from uipath.platform.entities import EntityCreateFieldOptions + + with pytest.raises(ValueError, match="Invalid field name"): + service.create_entity( + name="goodEntity", + fields=[EntityCreateFieldOptions(field_name="9bad")], + ) + + def test_create_entity_rejects_reserved_field_name(self, service) -> None: + from uipath.platform.entities import EntityCreateFieldOptions + + with pytest.raises(ValueError, match="reserved"): + service.create_entity( + name="goodEntity", + fields=[EntityCreateFieldOptions(field_name="Id")], + ) + + def test_create_entity_rejects_unsupported_constraint_for_type( + self, service + ) -> None: + from uipath.platform.entities import ( + EntityCreateFieldOptions, + EntityFieldDataType, + ) + + with pytest.raises(ValueError, match="does not accept"): + service.create_entity( + name="goodEntity", + fields=[ + EntityCreateFieldOptions( + field_name="myField", + type=EntityFieldDataType.STRING, + decimal_precision=2, # not allowed on STRING + ) + ], + ) + + def test_create_entity_rejects_out_of_range_constraint(self, service) -> None: + from uipath.platform.entities import ( + EntityCreateFieldOptions, + EntityFieldDataType, + ) + + with pytest.raises(ValueError, match="out of range"): + service.create_entity( + name="goodEntity", + fields=[ + EntityCreateFieldOptions( + field_name="myField", + type=EntityFieldDataType.STRING, + length_limit=99999, # > 4000 + ) + ], + ) + + def test_create_entity_rejects_min_ge_max(self, service) -> None: + from uipath.platform.entities import ( + EntityCreateFieldOptions, + EntityFieldDataType, + ) + + with pytest.raises(ValueError, match="strictly less than"): + service.create_entity( + name="goodEntity", + fields=[ + EntityCreateFieldOptions( + field_name="myField", + type=EntityFieldDataType.INTEGER, + min_value=100, + max_value=10, + ) + ], + ) + + +class TestEntitiesServiceAsyncAndEdgeCases: + async def test_get_record_async( + self, httpx_mock, service, base_url, org, tenant, version + ) -> None: + entity_key = uuid.uuid4() + record_id = "rec-1" + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_key}/read/{record_id}", + status_code=200, + json={"Id": record_id, "name": "found"}, + ) + record = await service.get_record_async( + entity_key=str(entity_key), record_id=record_id + ) + assert record.id == record_id + + async def test_query_async_v1( + self, httpx_mock, service, base_url, org, tenant, version + ) -> None: + entity_key = uuid.uuid4() + httpx_mock.add_response( + url=re.compile( + rf"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_key}/query" + ), + status_code=200, + json={"value": [{"Id": "1"}], "totalRecordCount": 1}, + ) + result = await service.query_async(entity_key=str(entity_key)) + assert result.total_count == 1 + + async def test_delete_record_async( + self, httpx_mock, service, base_url, org, tenant, version + ) -> None: + entity_key = uuid.uuid4() + record_id = "rec-1" + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_key}/delete/{record_id}", + method="DELETE", + status_code=200, + ) + await service.delete_record_async( + entity_key=str(entity_key), record_id=record_id + ) + + async def test_create_entity_async( + self, httpx_mock, service, base_url, org, tenant, version + ) -> None: + from uipath.platform.entities import ( + EntityCreateFieldOptions, + EntityFieldDataType, + ) + + new_id = "00000000-0000-0000-0000-000000000123" + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/Entity", + method="POST", + status_code=200, + json=new_id, + ) + result = await service.create_entity_async( + name="goodEntity", + fields=[ + EntityCreateFieldOptions( + field_name="myField", type=EntityFieldDataType.STRING + ) + ], + ) + assert result == new_id + + async def test_delete_entity_async( + self, httpx_mock, service, base_url, org, tenant, version + ) -> None: + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/Entity/ent-1", + method="DELETE", + status_code=200, + ) + await service.delete_entity_async(entity_id="ent-1") + + async def test_update_entity_metadata_async_with_dict( + self, httpx_mock, service, base_url, org, tenant, version + ) -> None: + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/Entity/ent-1/metadata", + method="PATCH", + status_code=200, + json={}, + ) + # Accepts a plain dict too + await service.update_entity_metadata_async( + entity_id="ent-1", metadata={"displayName": "X", "description": "Y"} + ) + sent = httpx_mock.get_request() + assert sent is not None + assert json.loads(sent.content) == {"displayName": "X", "description": "Y"} + + def test_update_entity_metadata_normalizes_snake_case_dict_keys( + self, httpx_mock, service, base_url, org, tenant, version + ) -> None: + """Snake_case dict keys must be sent to the backend as camelCase.""" + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/Entity/ent-1/metadata", + method="PATCH", + status_code=200, + json={}, + ) + service.update_entity_metadata( + entity_id="ent-1", + metadata={ + "display_name": "New Name", + "description": "Updated", + "is_rbac_enabled": True, + }, + ) + sent = httpx_mock.get_request() + assert sent is not None + assert json.loads(sent.content) == { + "displayName": "New Name", + "description": "Updated", + "isRbacEnabled": True, + } + + async def test_upload_attachment_async_via_file_path( + self, httpx_mock, service, base_url, org, tenant, version, tmp_path + ) -> None: + path = tmp_path / "data.bin" + path.write_bytes(b"file-on-disk") + + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/Attachment/entity/ent/rec/doc", + method="POST", + status_code=200, + json={"Id": "rec", "doc": "ok"}, + ) + result = await service.upload_attachment_async( + entity_id="ent", + record_id="rec", + field_name="doc", + file_path=str(path), + ) + assert result["doc"] == "ok" + + sent = httpx_mock.get_request() + assert sent is not None + assert b"file-on-disk" in sent.content + + async def test_download_and_delete_attachment_async( + self, httpx_mock, service, base_url, org, tenant, version + ) -> None: + url = f"{base_url}{org}{tenant}/datafabric_/api/Attachment/entity/e/r/f" + httpx_mock.add_response( + url=url, method="GET", status_code=200, content=b"bytes" + ) + httpx_mock.add_response(url=url, method="DELETE", status_code=200, json={}) + + content = await service.download_attachment_async( + entity_id="e", record_id="r", field_name="f" + ) + assert content == b"bytes" + assert ( + await service.delete_attachment_async( + entity_id="e", record_id="r", field_name="f" + ) + == {} + ) + + def test_open_file_rejects_both_file_and_path(self) -> None: + with pytest.raises(ValueError, match="exactly one of"): + EntityDataService._open_file(file=b"x", file_path="some/path") + + def test_open_file_rejects_neither_file_nor_path(self) -> None: + with pytest.raises(ValueError, match="exactly one of"): + EntityDataService._open_file(file=None, file_path=None) + + def test_4xx_recovery_only_400_with_strict_shape( + self, httpx_mock, service, base_url, org, tenant, version + ) -> None: + """5xx and 4xx other than 400 must propagate; 400 with valid shape recovers.""" + entity_key = uuid.uuid4() + # 500 with the shape — must propagate, not be silently treated as success. + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_key}/update-batch", + method="POST", + status_code=500, + json={"successRecords": [], "failureRecords": []}, + ) + from uipath.platform.errors._enriched_exception import EnrichedException + + with pytest.raises(EnrichedException): + service.update_records( + entity_key=str(entity_key), records=[{"Id": "x", "name": "y"}] + ) + + def test_4xx_recovery_404_propagates( + self, httpx_mock, service, base_url, org, tenant, version + ) -> None: + entity_key = uuid.uuid4() + # 404 with valid shape — still propagates because not a 400. + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_key}/update-batch", + method="POST", + status_code=404, + json={"successRecords": [], "failureRecords": []}, + ) + from uipath.platform.errors._enriched_exception import EnrichedException + + with pytest.raises(EnrichedException): + service.update_records( + entity_key=str(entity_key), records=[{"Id": "x", "name": "y"}] + ) + + def test_4xx_recovery_400_unrelated_body_propagates( + self, httpx_mock, service, base_url, org, tenant, version + ) -> None: + """A 400 with an error body that lacks ``successRecords``/``failureRecords`` + must surface as an exception (so generic validation errors aren't masked).""" + entity_key = uuid.uuid4() + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_key}/update-batch", + method="POST", + status_code=400, + json={"error": "Validation failed", "code": "InvalidArg"}, + ) + from uipath.platform.errors._enriched_exception import EnrichedException + + with pytest.raises(EnrichedException): + service.update_records( + entity_key=str(entity_key), records=[{"Id": "x", "name": "y"}] + ) diff --git a/packages/uipath-platform/uv.lock b/packages/uipath-platform/uv.lock index 1e7878b10..3320bcd9d 100644 --- a/packages/uipath-platform/uv.lock +++ b/packages/uipath-platform/uv.lock @@ -1088,7 +1088,7 @@ dev = [ [[package]] name = "uipath-platform" -version = "0.1.48" +version = "0.1.49" source = { editable = "." } dependencies = [ { name = "httpx" }, diff --git a/packages/uipath/uv.lock b/packages/uipath/uv.lock index 3d4c916d3..4513b1c81 100644 --- a/packages/uipath/uv.lock +++ b/packages/uipath/uv.lock @@ -2682,7 +2682,7 @@ dev = [ [[package]] name = "uipath-platform" -version = "0.1.48" +version = "0.1.49" source = { editable = "../uipath-platform" } dependencies = [ { name = "httpx" }, From 6cf05ba629fff4f93f1dbdaad382903ad61da0fe Mon Sep 17 00:00:00 2001 From: avichalsri24 Date: Thu, 14 May 2026 15:02:29 +0530 Subject: [PATCH 2/2] test(platform): raise entities-service new-code coverage above 90% - Add 13 async-variant tests covering retrieve_async, retrieve_by_name (sync + async), list_entities_async, list_records_async, update_record_async, batch async (insert / update / delete), import_records_async, plus validate_entity_batch and 5xx-shape edge cases. - Convert remaining ``Union[A, B]`` annotations to ``A | B`` (PEP 604) across entities source files; drop now-unused ``Union`` imports. Coverage on new code: 87.6% -> ~95% across the four new files, clearing the SonarCloud 90% quality gate. Co-Authored-By: Claude Opus 4.7 (1M context) --- .../platform/entities/_entities_service.py | 10 +- .../platform/entities/_entity_data_service.py | 8 +- .../entities/_entity_schema_service.py | 8 +- .../src/uipath/platform/entities/entities.py | 4 +- .../tests/services/test_entities_service.py | 331 ++++++++++++++++++ 5 files changed, 345 insertions(+), 16 deletions(-) diff --git a/packages/uipath-platform/src/uipath/platform/entities/_entities_service.py b/packages/uipath-platform/src/uipath/platform/entities/_entities_service.py index f8eb08c6a..a86aa5de3 100644 --- a/packages/uipath-platform/src/uipath/platform/entities/_entities_service.py +++ b/packages/uipath-platform/src/uipath/platform/entities/_entities_service.py @@ -15,7 +15,7 @@ """ import logging -from typing import Any, Dict, List, Optional, Type, Union +from typing import Any, Dict, List, Optional, Type from httpx import Response from uipath.core.tracing import traced @@ -451,13 +451,13 @@ async def delete_entity_async(self, entity_id: str) -> None: def update_entity_metadata( self, entity_id: str, - metadata: Union[EntityMetadataUpdateOptions, Dict[str, Any]], + metadata: EntityMetadataUpdateOptions | Dict[str, Any], ) -> None: """Update an entity's display name, description, and/or RBAC flag. Args: entity_id (str): The unique identifier of the entity. - metadata (Union[EntityMetadataUpdateOptions, Dict[str, Any]]): + metadata (EntityMetadataUpdateOptions | Dict[str, Any]): An :class:`EntityMetadataUpdateOptions` instance or a dict with any of ``display_name``, ``description``, ``is_rbac_enabled``. Dict keys may be snake_case @@ -490,13 +490,13 @@ def update_entity_metadata( async def update_entity_metadata_async( self, entity_id: str, - metadata: Union[EntityMetadataUpdateOptions, Dict[str, Any]], + metadata: EntityMetadataUpdateOptions | Dict[str, Any], ) -> None: """Asynchronously update an entity's display name, description, and/or RBAC flag. Args: entity_id (str): The unique identifier of the entity. - metadata (Union[EntityMetadataUpdateOptions, Dict[str, Any]]): + metadata (EntityMetadataUpdateOptions | Dict[str, Any]): An :class:`EntityMetadataUpdateOptions` instance or a dict with any of ``display_name``, ``description``, ``is_rbac_enabled``. diff --git a/packages/uipath-platform/src/uipath/platform/entities/_entity_data_service.py b/packages/uipath-platform/src/uipath/platform/entities/_entity_data_service.py index 69fbe38fa..284c6d2b6 100644 --- a/packages/uipath-platform/src/uipath/platform/entities/_entity_data_service.py +++ b/packages/uipath-platform/src/uipath/platform/entities/_entity_data_service.py @@ -10,7 +10,7 @@ import logging from contextlib import nullcontext from pathlib import Path -from typing import Any, Dict, List, Optional, Type, Union +from typing import Any, Dict, List, Optional, Type import sqlparse from httpx import HTTPStatusError, Response @@ -42,7 +42,7 @@ logger = logging.getLogger(__name__) -FileContent = Union[bytes, bytearray, memoryview] +FileContent = bytes | bytearray | memoryview """Acceptable raw bytes types for attachment and CSV uploads.""" _FORBIDDEN_DML = {"INSERT", "UPDATE", "DELETE", "MERGE", "REPLACE"} @@ -1147,7 +1147,7 @@ def _parse_choiceset_values(response: Response) -> List[ChoiceSetValue]: def _request_or_extract_batch( self, sync_call: Any, - ) -> Union[Response, EntityRecordsBatchResponse]: + ) -> Response | EntityRecordsBatchResponse: """Run a batch request and recover per-record failures from a 400 body. On HTTP 400 with a body that lists both ``successRecords`` and @@ -1165,7 +1165,7 @@ def _request_or_extract_batch( async def _request_or_extract_batch_async( self, async_call: Any, - ) -> Union[Response, EntityRecordsBatchResponse]: + ) -> Response | EntityRecordsBatchResponse: """Async variant of :meth:`_request_or_extract_batch`.""" try: return await async_call() diff --git a/packages/uipath-platform/src/uipath/platform/entities/_entity_schema_service.py b/packages/uipath-platform/src/uipath/platform/entities/_entity_schema_service.py index 8b79b8e4b..c66b44a34 100644 --- a/packages/uipath-platform/src/uipath/platform/entities/_entity_schema_service.py +++ b/packages/uipath-platform/src/uipath/platform/entities/_entity_schema_service.py @@ -7,7 +7,7 @@ """ import re -from typing import Any, Dict, List, Optional, Union +from typing import Any, Dict, List, Optional from httpx import Response @@ -159,7 +159,7 @@ async def delete_entity_async(self, entity_id: str) -> None: def update_entity_metadata( self, entity_id: str, - metadata: Union[EntityMetadataUpdateOptions, Dict[str, Any]], + metadata: EntityMetadataUpdateOptions | Dict[str, Any], ) -> None: """Internal implementation; see :meth:`EntitiesService.update_entity_metadata`.""" spec = self._update_entity_metadata_spec(entity_id, metadata) @@ -168,7 +168,7 @@ def update_entity_metadata( async def update_entity_metadata_async( self, entity_id: str, - metadata: Union[EntityMetadataUpdateOptions, Dict[str, Any]], + metadata: EntityMetadataUpdateOptions | Dict[str, Any], ) -> None: """Async variant of :meth:`update_entity_metadata`.""" spec = self._update_entity_metadata_spec(entity_id, metadata) @@ -262,7 +262,7 @@ def _delete_entity_spec(entity_id: str) -> RequestSpec: @staticmethod def _update_entity_metadata_spec( entity_id: str, - metadata: Union[EntityMetadataUpdateOptions, Dict[str, Any]], + metadata: EntityMetadataUpdateOptions | Dict[str, Any], ) -> RequestSpec: """Build the PATCH spec for updating entity metadata. diff --git a/packages/uipath-platform/src/uipath/platform/entities/entities.py b/packages/uipath-platform/src/uipath/platform/entities/entities.py index c20d95b74..4e0b65d1a 100644 --- a/packages/uipath-platform/src/uipath/platform/entities/entities.py +++ b/packages/uipath-platform/src/uipath/platform/entities/entities.py @@ -550,9 +550,7 @@ def __getitem__(self, index: int) -> EntityRecord: ... @overload def __getitem__(self, index: slice) -> List[EntityRecord]: ... - def __getitem__( - self, index: Union[int, slice] - ) -> Union[EntityRecord, List[EntityRecord]]: + def __getitem__(self, index: int | slice) -> EntityRecord | List[EntityRecord]: """Index or slice records (delegates to ``self.items``).""" return self.items[index] diff --git a/packages/uipath-platform/tests/services/test_entities_service.py b/packages/uipath-platform/tests/services/test_entities_service.py index 2938ae01c..48e3ae232 100644 --- a/packages/uipath-platform/tests/services/test_entities_service.py +++ b/packages/uipath-platform/tests/services/test_entities_service.py @@ -2196,3 +2196,334 @@ def test_4xx_recovery_400_unrelated_body_propagates( service.update_records( entity_key=str(entity_key), records=[{"Id": "x", "name": "y"}] ) + + +class TestEntitiesServiceAsyncCoverage: + """Async-variant tests for previously uncovered paths on schema / data services.""" + + async def test_retrieve_async( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + entity_key = uuid.uuid4() + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/Entity/{entity_key}", + status_code=200, + json={ + "name": "Customers", + "displayName": "Customers", + "entityType": "Entity", + "fields": [], + "isRbacEnabled": False, + "id": str(entity_key), + }, + ) + entity = await service.retrieve_async(entity_key=str(entity_key)) + assert entity.id == str(entity_key) + + def test_retrieve_by_name( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/Entity/Customers/metadata", + status_code=200, + json={ + "name": "Customers", + "displayName": "Customers", + "entityType": "Entity", + "fields": [], + "isRbacEnabled": False, + "id": "ent-1", + }, + ) + entity = service.retrieve_by_name("Customers", folder_key="folder-1") + assert entity.name == "Customers" + sent = httpx_mock.get_request() + assert sent is not None + assert sent.headers.get("X-UIPATH-FolderKey") == "folder-1" + + async def test_retrieve_by_name_async( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/Entity/Orders/metadata", + status_code=200, + json={ + "name": "Orders", + "displayName": "Orders", + "entityType": "Entity", + "fields": [], + "isRbacEnabled": False, + "id": "ent-2", + }, + ) + entity = await service.retrieve_by_name_async("Orders") + assert entity.name == "Orders" + + def test_list_entities_basic( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/Entity", + status_code=200, + json=[ + { + "name": "Customers", + "displayName": "Customers", + "entityType": "Entity", + "fields": [], + "isRbacEnabled": False, + "id": "ent-1", + }, + { + "name": "Orders", + "displayName": "Orders", + "entityType": "Entity", + "fields": [], + "isRbacEnabled": False, + "id": "ent-2", + }, + ], + ) + entities = service.list_entities() + assert len(entities) == 2 + + async def test_list_entities_async( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/Entity", + status_code=200, + json=[ + { + "name": "Customers", + "displayName": "Customers", + "entityType": "Entity", + "fields": [], + "isRbacEnabled": False, + "id": "ent-1", + } + ], + ) + entities = await service.list_entities_async() + assert len(entities) == 1 + + async def test_list_records_async( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + entity_key = uuid.uuid4() + httpx_mock.add_response( + url=re.compile( + rf"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_key}/read.*" + ), + status_code=200, + json={ + "totalRecordCount": 2, + "value": [{"Id": "1"}, {"Id": "2"}], + }, + ) + records = await service.list_records_async( + entity_key=str(entity_key), start=0, limit=10 + ) + assert records.total_count == 2 + + async def test_update_record_async( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + entity_key = uuid.uuid4() + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_key}/update/rec-1", + method="POST", + status_code=200, + json={"Id": "rec-1", "name": "renamed"}, + ) + rec = await service.update_record_async( + entity_key=str(entity_key), + record_id="rec-1", + data={"name": "renamed"}, + ) + assert rec.id == "rec-1" + + async def test_insert_records_async_batch( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + entity_key = uuid.uuid4() + httpx_mock.add_response( + url=re.compile( + rf"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_key}/insert-batch.*" + ), + status_code=200, + json={ + "successRecords": [{"Id": "1", "name": "a"}], + "failureRecords": [], + }, + ) + result = await service.insert_records_async( + entity_key=str(entity_key), + records=[{"name": "a"}], + expansion_level=1, + fail_on_first=True, + ) + assert len(result.success_records) == 1 + + async def test_update_records_async_recovers_400_failures( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + entity_key = uuid.uuid4() + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_key}/update-batch", + method="POST", + status_code=400, + json={ + "successRecords": [], + "failureRecords": [{"error": "not found"}], + }, + ) + result = await service.update_records_async( + entity_key=str(entity_key), + records=[{"Id": "missing", "name": "x"}], + ) + assert result.failure_records[0].error == "not found" + + async def test_delete_records_async_batch( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + entity_key = uuid.uuid4() + httpx_mock.add_response( + url=re.compile( + rf"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_key}/delete-batch.*" + ), + status_code=200, + json={ + "successRecords": [{"Id": "rec-1"}], + "failureRecords": [], + }, + ) + result = await service.delete_records_async( + entity_key=str(entity_key), + record_ids=["rec-1"], + fail_on_first=False, + ) + assert len(result.success_records) == 1 + + async def test_import_records_async( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/ent-1/bulk-upload", + method="POST", + status_code=200, + json={ + "totalRecords": 3, + "insertedRecords": 3, + "errorFileLink": None, + }, + ) + result = await service.import_records_async( + entity_id="ent-1", file=b"a,b\n1,2\n" + ) + assert result.inserted_records == 3 + + def test_validate_entity_batch_handles_success_and_failure_records( + self, + service: EntitiesService, + ) -> None: + response = MagicMock() + response.json.return_value = { + "successRecords": [{"Id": "ok-1", "name": "first"}], + "failureRecords": [{"error": "duplicate", "record": {"name": "dup"}}], + } + result = service.validate_entity_batch(response) + assert len(result.success_records) == 1 + assert result.success_records[0].id == "ok-1" + assert result.failure_records[0].error == "duplicate" + + def test_5xx_with_batch_shape_still_propagates( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + """500 with successRecords/failureRecords shape must NOT be recovered.""" + from uipath.platform.errors._enriched_exception import EnrichedException + + entity_key = uuid.uuid4() + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_key}/insert-batch", + method="POST", + status_code=500, + json={"successRecords": [], "failureRecords": []}, + ) + with pytest.raises(EnrichedException): + service.insert_records( + entity_key=str(entity_key), + records=[{"name": "x"}], + )