From 4e6231bb148ce22bc0554fb5683dd6cfc6aa224c Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Sat, 24 May 2025 13:04:36 +0000 Subject: [PATCH 01/25] Highlighting boolean values --- arangoasync/database.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/arangoasync/database.py b/arangoasync/database.py index 60f6ee9..058daf0 100644 --- a/arangoasync/database.py +++ b/arangoasync/database.py @@ -679,7 +679,7 @@ async def has_graph(self, name: str) -> Result[bool]: name (str): Graph name. Returns: - bool: True if the graph exists, False otherwise. + bool: `True` if the graph exists, `False` otherwise. Raises: GraphListError: If the operation fails. From ba450287b701970a1df2fdcb7501bc466a0b5eca Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Sat, 24 May 2025 13:49:16 +0000 Subject: [PATCH 02/25] Adding vertex and edge collection skeleton --- arangoasync/collection.py | 77 ++++++++++++++++++++++++++++++++++++++- 1 file changed, 76 insertions(+), 1 deletion(-) diff --git a/arangoasync/collection.py b/arangoasync/collection.py index 3b4e5a9..adbef25 100644 --- a/arangoasync/collection.py +++ b/arangoasync/collection.py @@ -1,4 +1,9 @@ -__all__ = ["Collection", "StandardCollection"] +__all__ = [ + "Collection", + "EdgeCollection", + "StandardCollection", + "VertexCollection", +] from typing import Any, Generic, List, Optional, Sequence, Tuple, TypeVar, cast @@ -1711,3 +1716,73 @@ def response_handler( return self.deserializer.loads_many(resp.raw_body) return await self._executor.execute(request, response_handler) + + +class VertexCollection(Collection[T, U, V]): + """Vertex collection API wrapper. + + Args: + executor (ApiExecutor): API executor. + name (str): Collection name + graph (str): Graph name. + doc_serializer (Serializer): Document serializer. + doc_deserializer (Deserializer): Document deserializer. + """ + + def __init__( + self, + executor: ApiExecutor, + graph: str, + name: str, + doc_serializer: Serializer[T], + doc_deserializer: Deserializer[U, V], + ) -> None: + super().__init__(executor, name, doc_serializer, doc_deserializer) + self._graph = graph + + def __repr__(self) -> str: + return f"" + + @property + def graph(self) -> str: + """Return the graph name. + + Returns: + str: Graph name. + """ + return self._graph + + +class EdgeCollection(Collection[T, U, V]): + """Edge collection API wrapper. + + Args: + executor (ApiExecutor): API executor. + name (str): Collection name + graph (str): Graph name. + doc_serializer (Serializer): Document serializer. + doc_deserializer (Deserializer): Document deserializer. + """ + + def __init__( + self, + executor: ApiExecutor, + graph: str, + name: str, + doc_serializer: Serializer[T], + doc_deserializer: Deserializer[U, V], + ) -> None: + super().__init__(executor, name, doc_serializer, doc_deserializer) + self._graph = graph + + def __repr__(self) -> str: + return f"" + + @property + def graph(self) -> str: + """Return the graph name. + + Returns: + str: Graph name. + """ + return self._graph From 9dc1353c4a8a9ba9d8cb1f26171de909112d7dfa Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Sat, 24 May 2025 14:34:07 +0000 Subject: [PATCH 03/25] Refactoring serializers --- arangoasync/database.py | 100 +++++++++++++++++++++++++++++----------- arangoasync/graph.py | 99 +++++++++++++++++++++++++++++++++++++-- 2 files changed, 170 insertions(+), 29 deletions(-) diff --git a/arangoasync/database.py b/arangoasync/database.py index 058daf0..3cac02d 100644 --- a/arangoasync/database.py +++ b/arangoasync/database.py @@ -88,6 +88,40 @@ class Database: def __init__(self, executor: ApiExecutor) -> None: self._executor = executor + def _get_doc_serializer( + self, + doc_serializer: Optional[Serializer[T]] = None, + ) -> Serializer[T]: + """Figure out the document serializer, defaulting to `Json`. + + Args: + doc_serializer (Serializer | None): Optional serializer. + + Returns: + Serializer: Either the passed serializer or the default one. + """ + if doc_serializer is None: + return cast(Serializer[T], self.serializer) + else: + return doc_serializer + + def _get_doc_deserializer( + self, + doc_deserializer: Optional[Deserializer[U, V]] = None, + ) -> Deserializer[U, V]: + """Figure out the document deserializer, defaulting to `Json`. + + Args: + doc_deserializer (Deserializer | None): Optional deserializer. + + Returns: + Deserializer: Either the passed deserializer or the default one. + """ + if doc_deserializer is None: + return cast(Deserializer[U, V], self.deserializer) + else: + return doc_deserializer + @property def connection(self) -> Connection: """Return the HTTP connection.""" @@ -390,17 +424,11 @@ def collection( Returns: StandardCollection: Collection API wrapper. """ - if doc_serializer is None: - serializer = cast(Serializer[T], self.serializer) - else: - serializer = doc_serializer - if doc_deserializer is None: - deserializer = cast(Deserializer[U, V], self.deserializer) - else: - deserializer = doc_deserializer - return StandardCollection[T, U, V]( - self._executor, name, serializer, deserializer + self._executor, + name, + self._get_doc_serializer(doc_serializer), + self._get_doc_deserializer(doc_deserializer), ) async def collections( @@ -604,16 +632,11 @@ async def create_collection( def response_handler(resp: Response) -> StandardCollection[T, U, V]: if not resp.is_success: raise CollectionCreateError(resp, request) - if doc_serializer is None: - serializer = cast(Serializer[T], self.serializer) - else: - serializer = doc_serializer - if doc_deserializer is None: - deserializer = cast(Deserializer[U, V], self.deserializer) - else: - deserializer = doc_deserializer return StandardCollection[T, U, V]( - self._executor, name, serializer, deserializer + self._executor, + name, + self._get_doc_serializer(doc_serializer), + self._get_doc_deserializer(doc_deserializer), ) return await self._executor.execute(request, response_handler) @@ -661,16 +684,30 @@ def response_handler(resp: Response) -> bool: return await self._executor.execute(request, response_handler) - def graph(self, name: str) -> Graph: + def graph( + self, + name: str, + doc_serializer: Optional[Serializer[T]] = None, + doc_deserializer: Optional[Deserializer[U, V]] = None, + ) -> Graph[T, U, V]: """Return the graph API wrapper. Args: name (str): Graph name. + doc_serializer (Serializer): Custom document serializer. + This will be used only for document operations. + doc_deserializer (Deserializer): Custom document deserializer. + This will be used only for document operations. Returns: Graph: Graph API wrapper. """ - return Graph(self._executor, name) + return Graph[T, U, V]( + self._executor, + name, + self._get_doc_serializer(doc_serializer), + self._get_doc_deserializer(doc_deserializer), + ) async def has_graph(self, name: str) -> Result[bool]: """Check if a graph exists in the database. @@ -720,17 +757,23 @@ def response_handler(resp: Response) -> List[GraphProperties]: async def create_graph( self, name: str, + doc_serializer: Optional[Serializer[T]] = None, + doc_deserializer: Optional[Deserializer[U, V]] = None, edge_definitions: Optional[Sequence[Json]] = None, is_disjoint: Optional[bool] = None, is_smart: Optional[bool] = None, options: Optional[GraphOptions | Json] = None, orphan_collections: Optional[Sequence[str]] = None, wait_for_sync: Optional[bool] = None, - ) -> Result[Graph]: + ) -> Result[Graph[T, U, V]]: """Create a new graph. Args: name (str): Graph name. + doc_serializer (Serializer): Custom document serializer. + This will be used only for document operations. + doc_deserializer (Deserializer): Custom document deserializer. + This will be used only for document operations. edge_definitions (list | None): List of edge definitions, where each edge definition entry is a dictionary with fields "collection" (name of the edge collection), "from" (list of vertex collection names) and "to" @@ -782,10 +825,15 @@ async def create_graph( params=params, ) - def response_handler(resp: Response) -> Graph: - if resp.is_success: - return Graph(self._executor, name) - raise GraphCreateError(resp, request) + def response_handler(resp: Response) -> Graph[T, U, V]: + if not resp.is_success: + raise GraphCreateError(resp, request) + return Graph[T, U, V]( + self._executor, + name, + self._get_doc_serializer(doc_serializer), + self._get_doc_deserializer(doc_deserializer), + ) return await self._executor.execute(request, response_handler) diff --git a/arangoasync/graph.py b/arangoasync/graph.py index 2047d96..6caea22 100644 --- a/arangoasync/graph.py +++ b/arangoasync/graph.py @@ -1,16 +1,43 @@ +__all__ = ["Graph"] + + +from typing import Generic, TypeVar + +from arangoasync.collection import EdgeCollection, VertexCollection +from arangoasync.exceptions import GraphListError from arangoasync.executor import ApiExecutor +from arangoasync.request import Method, Request +from arangoasync.response import Response +from arangoasync.result import Result +from arangoasync.serialization import Deserializer, Serializer +from arangoasync.typings import GraphProperties, Json, Jsons +T = TypeVar("T") # Serializer type +U = TypeVar("U") # Deserializer loads +V = TypeVar("V") # Deserializer loads_many -class Graph: + +class Graph(Generic[T, U, V]): """Graph API wrapper, representing a graph in ArangoDB. Args: - executor: API executor. Required to execute the API requests. + executor (APIExecutor): Required to execute the API requests. + name (str): Graph name. + doc_serializer (Serializer): Document serializer. + doc_deserializer (Deserializer): Document deserializer. """ - def __init__(self, executor: ApiExecutor, name: str) -> None: + def __init__( + self, + executor: ApiExecutor, + name: str, + doc_serializer: Serializer[T], + doc_deserializer: Deserializer[U, V], + ) -> None: self._executor = executor self._name = name + self._doc_serializer = doc_serializer + self._doc_deserializer = doc_deserializer def __repr__(self) -> str: return f"" @@ -19,3 +46,69 @@ def __repr__(self) -> str: def name(self) -> str: """Name of the graph.""" return self._name + + @property + def serializer(self) -> Serializer[Json]: + """Return the serializer.""" + return self._executor.serializer + + @property + def deserializer(self) -> Deserializer[Json, Jsons]: + """Return the deserializer.""" + return self._executor.deserializer + + async def properties(self) -> Result[GraphProperties]: + """Get the properties of the graph. + + Returns: + GraphProperties: Properties of the graph. + + Raises: + GraphListError: If the operation fails. + + References: + - `get-a-graph `__ + """ # noqa: E501 + request = Request(method=Method.GET, endpoint=f"/_api/gharial/{self._name}") + + def response_handler(resp: Response) -> GraphProperties: + if not resp.is_success: + raise GraphListError(resp, request) + body = self.deserializer.loads(resp.raw_body) + return GraphProperties(body["graph"]) + + return await self._executor.execute(request, response_handler) + + def vertex_collection(self, name: str) -> VertexCollection[T, U, V]: + """Returns the vertex collection API wrapper. + + Args: + name (str): Vertex collection name. + + Returns: + VertexCollection: Vertex collection API wrapper. + """ + return VertexCollection[T, U, V]( + executor=self._executor, + graph=self._name, + name=name, + doc_serializer=self._doc_serializer, + doc_deserializer=self._doc_deserializer, + ) + + def edge_collection(self, name: str) -> EdgeCollection[T, U, V]: + """Returns the edge collection API wrapper. + + Args: + name (str): Edge collection name. + + Returns: + EdgeCollection: Edge collection API wrapper. + """ + return EdgeCollection[T, U, V]( + executor=self._executor, + graph=self._name, + name=name, + doc_serializer=self._doc_serializer, + doc_deserializer=self._doc_deserializer, + ) From 128d328c9468f71713273e1258da8b2a718cb65b Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Sat, 24 May 2025 14:38:09 +0000 Subject: [PATCH 04/25] Using randomized graph name --- tests/helpers.py | 9 +++++++++ tests/test_graph.py | 39 ++++++++++++++++++++++++++------------- 2 files changed, 35 insertions(+), 13 deletions(-) diff --git a/tests/helpers.py b/tests/helpers.py index cf8b3cb..8e91c26 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -19,6 +19,15 @@ def generate_col_name(): return f"test_collection_{uuid4().hex}" +def generate_graph_name(): + """Generate and return a random graph name. + + Returns: + str: Random graph name. + """ + return f"test_graph_{uuid4().hex}" + + def generate_username(): """Generate and return a random username. diff --git a/tests/test_graph.py b/tests/test_graph.py index 0967ff9..6eb01f0 100644 --- a/tests/test_graph.py +++ b/tests/test_graph.py @@ -1,37 +1,50 @@ import pytest from arangoasync.exceptions import GraphCreateError, GraphDeleteError, GraphListError +from tests.helpers import generate_graph_name @pytest.mark.asyncio async def test_graph_basic(db, bad_db): + graph1_name = generate_graph_name() # Test the graph representation - graph = db.graph("test_graph") - assert graph.name == "test_graph" - assert "test_graph" in repr(graph) + graph = db.graph(graph1_name) + assert graph.name == graph1_name + assert graph1_name in repr(graph) # Cannot find any graph + graph2_name = generate_graph_name() assert await db.graphs() == [] - assert await db.has_graph("fake_graph") is False + assert await db.has_graph(graph2_name) is False with pytest.raises(GraphListError): - await bad_db.has_graph("fake_graph") + await bad_db.has_graph(graph2_name) with pytest.raises(GraphListError): await bad_db.graphs() # Create a graph - graph = await db.create_graph("test_graph", wait_for_sync=True) - assert graph.name == "test_graph" + graph = await db.create_graph(graph1_name, wait_for_sync=True) + assert graph.name == graph1_name with pytest.raises(GraphCreateError): - await bad_db.create_graph("test_graph") + await bad_db.create_graph(graph1_name) # Check if the graph exists - assert await db.has_graph("test_graph") is True + assert await db.has_graph(graph1_name) is True graphs = await db.graphs() assert len(graphs) == 1 - assert graphs[0].name == "test_graph" + assert graphs[0].name == graph1_name # Delete the graph - await db.delete_graph("test_graph") - assert await db.has_graph("test_graph") is False + await db.delete_graph(graph1_name) + assert await db.has_graph(graph1_name) is False with pytest.raises(GraphDeleteError): - await bad_db.delete_graph("test_graph") + await bad_db.delete_graph(graph1_name) + + +async def test_graph_properties(db): + # Create a graph + name = generate_graph_name() + graph = await db.create_graph(name) + + # Get the properties of the graph + properties = await graph.properties() + assert properties.name == name From 7a09541f75e05d8eace0deeb20471546d6544ace Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Sat, 24 May 2025 17:31:03 +0000 Subject: [PATCH 05/25] Improving helper types --- arangoasync/typings.py | 93 +++++++++++++++++++++++++++++++++++++++--- tests/test_typings.py | 18 ++++++++ 2 files changed, 106 insertions(+), 5 deletions(-) diff --git a/arangoasync/typings.py b/arangoasync/typings.py index 86c32fd..4c0af22 100644 --- a/arangoasync/typings.py +++ b/arangoasync/typings.py @@ -1692,6 +1692,32 @@ def __init__(self, data: Json) -> None: def name(self) -> str: return cast(str, self._data["name"]) + @property + def is_smart(self) -> bool: + """Check if the graph is a smart graph.""" + return cast(bool, self._data.get("isSmart", False)) + + @property + def is_satellite(self) -> bool: + """Check if the graph is a satellite graph.""" + return cast(bool, self._data.get("isSatellite", False)) + + @property + def number_of_shards(self) -> Optional[int]: + return cast(Optional[int], self._data.get("numberOfShards")) + + @property + def replication_factor(self) -> Optional[int | str]: + return cast(Optional[int | str], self._data.get("replicationFactor")) + + @property + def min_replication_factor(self) -> Optional[int]: + return cast(Optional[int], self._data.get("minReplicationFactor")) + + @property + def write_concern(self) -> Optional[int]: + return cast(Optional[int], self._data.get("writeConcern")) + @property def edge_definitions(self) -> Jsons: return cast(Jsons, self._data.get("edgeDefinitions", list())) @@ -1720,15 +1746,18 @@ class GraphOptions(JsonWrapper): Enterprise Edition. write_concern (int | None): The write concern for new collections in the graph. + + References: + - `create-a-graph `__ """ # noqa: E501 def __init__( self, - number_of_shards: Optional[int], - replication_factor: Optional[int | str], - satellites: Optional[List[str]], - smart_graph_attribute: Optional[str], - write_concern: Optional[int], + number_of_shards: Optional[int] = None, + replication_factor: Optional[int | str] = None, + satellites: Optional[List[str]] = None, + smart_graph_attribute: Optional[str] = None, + write_concern: Optional[int] = None, ) -> None: data: Json = dict() if number_of_shards is not None: @@ -1762,3 +1791,57 @@ def smart_graph_attribute(self) -> Optional[str]: @property def write_concern(self) -> Optional[int]: return cast(Optional[int], self._data.get("writeConcern")) + + +class VertexCollectionOptions(JsonWrapper): + """Special options for vertex collection creation. + + Args: + satellites (list): An array of collection names that is used to create + SatelliteCollections for a (Disjoint) SmartGraph using + SatelliteCollections (Enterprise Edition only). Each array element must + be a string and a valid collection name. + + References: + - `add-a-vertex-collection `__ + """ # noqa: E501 + + def __init__( + self, + satellites: Optional[List[str]] = None, + ) -> None: + data: Json = dict() + if satellites is not None: + data["satellites"] = satellites + super().__init__(data) + + @property + def satellites(self) -> Optional[List[str]]: + return cast(Optional[List[str]], self._data.get("satellites")) + + +class EdgeDefinitionOptions(JsonWrapper): + """Special options for edge definition creation. + + Args: + satellites (list): An array of collection names that is used to create + SatelliteCollections for a (Disjoint) SmartGraph using + SatelliteCollections (Enterprise Edition only). Each array element must + be a string and a valid collection name. + + References: + - `add-an-edge-definition `__ + """ # noqa: E501 + + def __init__( + self, + satellites: Optional[List[str]] = None, + ) -> None: + data: Json = dict() + if satellites is not None: + data["satellites"] = satellites + super().__init__(data) + + @property + def satellites(self) -> Optional[List[str]]: + return cast(Optional[List[str]], self._data.get("satellites")) diff --git a/tests/test_typings.py b/tests/test_typings.py index 7a40c33..fd04fa1 100644 --- a/tests/test_typings.py +++ b/tests/test_typings.py @@ -4,6 +4,7 @@ CollectionInfo, CollectionStatus, CollectionType, + EdgeDefinitionOptions, GraphOptions, GraphProperties, JsonWrapper, @@ -17,6 +18,7 @@ QueryProperties, QueryTrackingConfiguration, UserInfo, + VertexCollectionOptions, ) @@ -368,3 +370,19 @@ def test_GraphOptions(): assert graph_options.satellites == ["satellite1", "satellite2"] assert graph_options.smart_graph_attribute == "region" assert graph_options.write_concern == 1 + + +def test_VertexCollectionOptions(): + options = VertexCollectionOptions( + satellites=["col1", "col2"], + ) + + assert options.satellites == ["col1", "col2"] + + +def test_EdgeDefinitionOptions(): + options = EdgeDefinitionOptions( + satellites=["col1", "col2"], + ) + + assert options.satellites == ["col1", "col2"] From 6ea9259e801bff65eff04cc72175ad95f5638e9d Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Sat, 24 May 2025 18:02:13 +0000 Subject: [PATCH 06/25] Facilitating edge and vertex collection creation --- arangoasync/exceptions.py | 32 ++++++++++ arangoasync/graph.py | 123 ++++++++++++++++++++++++++++++++++++-- tests/test_graph.py | 43 ++++++++++++- 3 files changed, 191 insertions(+), 7 deletions(-) diff --git a/arangoasync/exceptions.py b/arangoasync/exceptions.py index a62e64e..1f09b6d 100644 --- a/arangoasync/exceptions.py +++ b/arangoasync/exceptions.py @@ -263,6 +263,26 @@ class DocumentUpdateError(ArangoServerError): """Failed to update document.""" +class EdgeDefinitionListError(ArangoServerError): + """Failed to retrieve edge definitions.""" + + +class EdgeDefinitionCreateError(ArangoServerError): + """Failed to create edge definition.""" + + +class EdgeDefinitionReplaceError(ArangoServerError): + """Failed to replace edge definition.""" + + +class EdgeDefinitionDeleteError(ArangoServerError): + """Failed to delete edge definition.""" + + +class EdgeListError(ArangoServerError): + """Failed to retrieve edges coming in and out of a vertex.""" + + class GraphCreateError(ArangoServerError): """Failed to create the graph.""" @@ -389,3 +409,15 @@ class UserReplaceError(ArangoServerError): class UserUpdateError(ArangoServerError): """Failed to update user.""" + + +class VertexCollectionCreateError(ArangoServerError): + """Failed to create vertex collection.""" + + +class VertexCollectionDeleteError(ArangoServerError): + """Failed to delete vertex collection.""" + + +class VertexCollectionListError(ArangoServerError): + """Failed to retrieve vertex collections.""" diff --git a/arangoasync/graph.py b/arangoasync/graph.py index 6caea22..2104ff3 100644 --- a/arangoasync/graph.py +++ b/arangoasync/graph.py @@ -1,16 +1,26 @@ __all__ = ["Graph"] -from typing import Generic, TypeVar +from typing import Generic, Optional, Sequence, TypeVar from arangoasync.collection import EdgeCollection, VertexCollection -from arangoasync.exceptions import GraphListError +from arangoasync.exceptions import ( + EdgeDefinitionCreateError, + GraphListError, + VertexCollectionCreateError, +) from arangoasync.executor import ApiExecutor from arangoasync.request import Method, Request from arangoasync.response import Response from arangoasync.result import Result from arangoasync.serialization import Deserializer, Serializer -from arangoasync.typings import GraphProperties, Json, Jsons +from arangoasync.typings import ( + EdgeDefinitionOptions, + GraphProperties, + Json, + Jsons, + VertexCollectionOptions, +) T = TypeVar("T") # Serializer type U = TypeVar("U") # Deserializer loads @@ -67,7 +77,7 @@ async def properties(self) -> Result[GraphProperties]: GraphListError: If the operation fails. References: - - `get-a-graph `__ + - `get-a-graph `__ """ # noqa: E501 request = Request(method=Method.GET, endpoint=f"/_api/gharial/{self._name}") @@ -96,6 +106,48 @@ def vertex_collection(self, name: str) -> VertexCollection[T, U, V]: doc_deserializer=self._doc_deserializer, ) + async def create_vertex_collection( + self, + name: str, + options: Optional[VertexCollectionOptions | Json] = None, + ) -> Result[VertexCollection[T, U, V]]: + """Create a vertex collection in the graph. + + Args: + name (str): Vertex collection name. + options (dict | VertexCollectionOptions | None): Extra options for + creating vertex collections. + + Returns: + VertexCollection: Vertex collection API wrapper. + + Raises: + VertexCollectionCreateError: If the operation fails. + + References: + - `add-a-vertex-collection `__ + """ # noqa: E501 + data: Json = {"collection": name} + + if options is not None: + if isinstance(options, VertexCollectionOptions): + data["options"] = options.to_dict() + else: + data["options"] = options + + request = Request( + method=Method.POST, + endpoint=f"/_api/gharial/{self._name}/vertex", + data=self.serializer.dumps(data), + ) + + def response_handler(resp: Response) -> VertexCollection[T, U, V]: + if not resp.is_success: + raise VertexCollectionCreateError(resp, request) + return self.vertex_collection(name) + + return await self._executor.execute(request, response_handler) + def edge_collection(self, name: str) -> EdgeCollection[T, U, V]: """Returns the edge collection API wrapper. @@ -112,3 +164,66 @@ def edge_collection(self, name: str) -> EdgeCollection[T, U, V]: doc_serializer=self._doc_serializer, doc_deserializer=self._doc_deserializer, ) + + async def create_edge_definition( + self, + edge_collection: str, + from_vertex_collections: Sequence[str], + to_vertex_collections: Sequence[str], + options: Optional[EdgeDefinitionOptions | Json] = None, + ) -> Result[EdgeCollection[T, U, V]]: + """Create an edge definition in the graph. + + This edge definition has to contain a collection and an array of each from + and to vertex collections. + + .. code-block:: python + + { + "edge_collection": "edge_collection_name", + "from_vertex_collections": ["from_vertex_collection_name"], + "to_vertex_collections": ["to_vertex_collection_name"] + } + + Args: + edge_collection (str): Edge collection name. + from_vertex_collections (list): List of vertex collections + that can be used as the "from" vertex in edges. + to_vertex_collections (list): List of vertex collections + that can be used as the "to" vertex in edges. + options (dict | EdgeDefinitionOptions | None): Extra options for + creating edge definitions. + + Returns: + EdgeCollection: Edge collection API wrapper. + + Raises: + EdgeDefinitionCreateError: If the operation fails. + + References: + - `add-an-edge-definition `__ + """ # noqa: E501 + data: Json = { + "collection": edge_collection, + "from": from_vertex_collections, + "to": to_vertex_collections, + } + + if options is not None: + if isinstance(options, VertexCollectionOptions): + data["options"] = options.to_dict() + else: + data["options"] = options + + request = Request( + method=Method.POST, + endpoint=f"/_api/gharial/{self._name}/edge", + data=self.serializer.dumps(data), + ) + + def response_handler(resp: Response) -> EdgeCollection[T, U, V]: + if not resp.is_success: + raise EdgeDefinitionCreateError(resp, request) + return self.edge_collection(edge_collection) + + return await self._executor.execute(request, response_handler) diff --git a/tests/test_graph.py b/tests/test_graph.py index 6eb01f0..5b0124a 100644 --- a/tests/test_graph.py +++ b/tests/test_graph.py @@ -1,7 +1,8 @@ import pytest from arangoasync.exceptions import GraphCreateError, GraphDeleteError, GraphListError -from tests.helpers import generate_graph_name +from arangoasync.typings import GraphOptions +from tests.helpers import generate_col_name, generate_graph_name @pytest.mark.asyncio @@ -40,11 +41,47 @@ async def test_graph_basic(db, bad_db): await bad_db.delete_graph(graph1_name) -async def test_graph_properties(db): +async def test_graph_properties(db, cluster, enterprise): # Create a graph name = generate_graph_name() - graph = await db.create_graph(name) + is_smart = cluster and enterprise + options = GraphOptions(number_of_shards=3) + graph = await db.create_graph(name, is_smart=is_smart, options=options) + + # Create first vertex collection + vcol_name = generate_col_name() + vcol = await graph.create_vertex_collection(vcol_name) + assert vcol.name == vcol_name # Get the properties of the graph properties = await graph.properties() assert properties.name == name + assert properties.is_smart == is_smart + assert properties.number_of_shards == options.number_of_shards + assert properties.orphan_collections == [vcol_name] + + # Create second vertex collection + vcol2_name = generate_col_name() + vcol2 = await graph.create_vertex_collection(vcol2_name) + assert vcol2.name == vcol2_name + properties = await graph.properties() + assert len(properties.orphan_collections) == 2 + + # Create an edge definition + edge_name = generate_col_name() + edge_col = await graph.create_edge_definition( + edge_name, + from_vertex_collections=[vcol_name], + to_vertex_collections=[vcol2_name], + ) + assert edge_col.name == edge_name + + # There should be no more orphan collections + properties = await graph.properties() + assert len(properties.orphan_collections) == 0 + assert len(properties.edge_definitions) == 1 + assert properties.edge_definitions[0]["collection"] == edge_name + assert len(properties.edge_definitions[0]["from"]) == 1 + assert properties.edge_definitions[0]["from"][0] == vcol_name + assert len(properties.edge_definitions[0]["to"]) == 1 + assert properties.edge_definitions[0]["to"][0] == vcol2_name From 526b1355f8a64381ebac90f13119784beefb755c Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Sat, 24 May 2025 19:40:06 +0000 Subject: [PATCH 07/25] Vertex collection management --- arangoasync/exceptions.py | 4 ++ arangoasync/graph.py | 88 +++++++++++++++++++++++++++++++++++++-- tests/conftest.py | 14 ++++++- tests/test_graph.py | 45 +++++++++++++++++++- 4 files changed, 144 insertions(+), 7 deletions(-) diff --git a/arangoasync/exceptions.py b/arangoasync/exceptions.py index 1f09b6d..3024264 100644 --- a/arangoasync/exceptions.py +++ b/arangoasync/exceptions.py @@ -295,6 +295,10 @@ class GraphListError(ArangoServerError): """Failed to retrieve graphs.""" +class GraphPropertiesError(ArangoServerError): + """Failed to retrieve graph properties.""" + + class IndexCreateError(ArangoServerError): """Failed to create collection index.""" diff --git a/arangoasync/graph.py b/arangoasync/graph.py index 2104ff3..3fcc4bc 100644 --- a/arangoasync/graph.py +++ b/arangoasync/graph.py @@ -1,13 +1,15 @@ __all__ = ["Graph"] -from typing import Generic, Optional, Sequence, TypeVar +from typing import Generic, List, Optional, Sequence, TypeVar from arangoasync.collection import EdgeCollection, VertexCollection from arangoasync.exceptions import ( EdgeDefinitionCreateError, - GraphListError, + GraphPropertiesError, VertexCollectionCreateError, + VertexCollectionDeleteError, + VertexCollectionListError, ) from arangoasync.executor import ApiExecutor from arangoasync.request import Method, Request @@ -74,7 +76,7 @@ async def properties(self) -> Result[GraphProperties]: GraphProperties: Properties of the graph. Raises: - GraphListError: If the operation fails. + GraphProperties: If the operation fails. References: - `get-a-graph `__ @@ -83,7 +85,7 @@ async def properties(self) -> Result[GraphProperties]: def response_handler(resp: Response) -> GraphProperties: if not resp.is_success: - raise GraphListError(resp, request) + raise GraphPropertiesError(resp, request) body = self.deserializer.loads(resp.raw_body) return GraphProperties(body["graph"]) @@ -106,6 +108,56 @@ def vertex_collection(self, name: str) -> VertexCollection[T, U, V]: doc_deserializer=self._doc_deserializer, ) + async def vertex_collections(self) -> Result[List[str]]: + """Get the names of all vertex collections in the graph. + + Returns: + list: List of vertex collection names. + + Raises: + VertexCollectionListError: If the operation fails. + + References: + - `list-vertex-collections `__ + """ # noqa: E501 + request = Request( + method=Method.GET, + endpoint=f"/_api/gharial/{self._name}/vertex", + ) + + def response_handler(resp: Response) -> List[str]: + if not resp.is_success: + raise VertexCollectionListError(resp, request) + body = self.deserializer.loads(resp.raw_body) + return list(sorted(set(body["collections"]))) + + return await self._executor.execute(request, response_handler) + + async def has_vertex_collection(self, name: str) -> Result[bool]: + """Check if the graph has the given vertex collection. + + Args: + name (str): Vertex collection mame. + + Returns: + bool: `True` if the graph has the vertex collection, `False` otherwise. + + Raises: + VertexCollectionListError: If the operation fails. + """ + request = Request( + method=Method.GET, + endpoint=f"/_api/gharial/{self._name}/vertex", + ) + + def response_handler(resp: Response) -> bool: + if not resp.is_success: + raise VertexCollectionListError(resp, request) + body = self.deserializer.loads(resp.raw_body) + return name in body["collections"] + + return await self._executor.execute(request, response_handler) + async def create_vertex_collection( self, name: str, @@ -148,6 +200,34 @@ def response_handler(resp: Response) -> VertexCollection[T, U, V]: return await self._executor.execute(request, response_handler) + async def delete_vertex_collection(self, name: str, purge: bool = False) -> None: + """Remove a vertex collection from the graph. + + Args: + name (str): Vertex collection name. + purge (bool): If set to `True`, the vertex collection is not just deleted + from the graph but also from the database completely. Note that you + cannot remove vertex collections that are used in one of the edge + definitions of the graph. + + Raises: + VertexCollectionDeleteError: If the operation fails. + + References: + - `remove-a-vertex-collection `__ + """ # noqa: E501 + request = Request( + method=Method.DELETE, + endpoint=f"/_api/gharial/{self._name}/vertex/{name}", + params={"dropCollection": purge}, + ) + + def response_handler(resp: Response) -> None: + if not resp.is_success: + raise VertexCollectionDeleteError(resp, request) + + await self._executor.execute(request, response_handler) + def edge_collection(self, name: str) -> EdgeCollection[T, U, V]: """Returns the edge collection API wrapper. diff --git a/tests/conftest.py b/tests/conftest.py index e91a591..36d323e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -8,7 +8,12 @@ from arangoasync.auth import Auth, JwtToken from arangoasync.client import ArangoClient from arangoasync.typings import UserInfo -from tests.helpers import generate_col_name, generate_db_name, generate_username +from tests.helpers import ( + generate_col_name, + generate_db_name, + generate_graph_name, + generate_username, +) @dataclass @@ -19,6 +24,7 @@ class GlobalData: secret: str = None token: JwtToken = None sys_db_name: str = "_system" + graph_name: str = "test_graph" username: str = generate_username() cluster: bool = False enterprise: bool = False @@ -64,6 +70,7 @@ def pytest_configure(config): global_data.token = JwtToken.generate_token(global_data.secret) global_data.cluster = config.getoption("cluster") global_data.enterprise = config.getoption("enterprise") + global_data.graph_name = generate_graph_name() async def get_db_version(): async with ArangoClient(hosts=global_data.url) as client: @@ -215,6 +222,11 @@ async def bad_db(arango_client): ) +@pytest_asyncio.fixture +def bad_graph(bad_db): + return bad_db.graph(global_data.graph_name) + + @pytest_asyncio.fixture async def doc_col(db): col_name = generate_col_name() diff --git a/tests/test_graph.py b/tests/test_graph.py index 5b0124a..98ad038 100644 --- a/tests/test_graph.py +++ b/tests/test_graph.py @@ -1,6 +1,14 @@ import pytest -from arangoasync.exceptions import GraphCreateError, GraphDeleteError, GraphListError +from arangoasync.exceptions import ( + GraphCreateError, + GraphDeleteError, + GraphListError, + GraphPropertiesError, + VertexCollectionCreateError, + VertexCollectionDeleteError, + VertexCollectionListError, +) from arangoasync.typings import GraphOptions from tests.helpers import generate_col_name, generate_graph_name @@ -41,13 +49,16 @@ async def test_graph_basic(db, bad_db): await bad_db.delete_graph(graph1_name) -async def test_graph_properties(db, cluster, enterprise): +async def test_graph_properties(db, bad_graph, cluster, enterprise): # Create a graph name = generate_graph_name() is_smart = cluster and enterprise options = GraphOptions(number_of_shards=3) graph = await db.create_graph(name, is_smart=is_smart, options=options) + with pytest.raises(GraphPropertiesError): + await bad_graph.properties() + # Create first vertex collection vcol_name = generate_col_name() vcol = await graph.create_vertex_collection(vcol_name) @@ -85,3 +96,33 @@ async def test_graph_properties(db, cluster, enterprise): assert properties.edge_definitions[0]["from"][0] == vcol_name assert len(properties.edge_definitions[0]["to"]) == 1 assert properties.edge_definitions[0]["to"][0] == vcol2_name + + +async def test_vertex_collections(db, bad_graph): + # Test errors + with pytest.raises(VertexCollectionCreateError): + await bad_graph.create_vertex_collection("bad_col") + with pytest.raises(VertexCollectionListError): + await bad_graph.vertex_collections() + with pytest.raises(VertexCollectionListError): + await bad_graph.has_vertex_collection("bad_col") + with pytest.raises(VertexCollectionDeleteError): + await bad_graph.delete_vertex_collection("bad_col") + + # Create graph + graph = await db.create_graph(generate_graph_name()) + + # Create vertex collections + names = [generate_col_name() for _ in range(3)] + cols = [await graph.create_vertex_collection(name) for name in names] + + # List vertex collection + col_list = await graph.vertex_collections() + assert len(col_list) == 3 + for c in cols: + assert c.name in col_list + assert await graph.has_vertex_collection(c.name) + + # Delete collections + await graph.delete_vertex_collection(names[0]) + assert await graph.has_vertex_collection(names[0]) is False From 9ccbe699e40b2dcb10c95fc62398f2b347b7a2b1 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Sat, 24 May 2025 21:25:35 +0000 Subject: [PATCH 08/25] Edge collection management --- arangoasync/exceptions.py | 4 + arangoasync/graph.py | 180 +++++++++++++++++++++++++++++++++++++- arangoasync/typings.py | 41 +++++++++ tests/test_graph.py | 67 ++++++++++++++ 4 files changed, 290 insertions(+), 2 deletions(-) diff --git a/arangoasync/exceptions.py b/arangoasync/exceptions.py index 3024264..c4ee40a 100644 --- a/arangoasync/exceptions.py +++ b/arangoasync/exceptions.py @@ -263,6 +263,10 @@ class DocumentUpdateError(ArangoServerError): """Failed to update document.""" +class EdgeCollectionListError(ArangoServerError): + """Failed to retrieve edge collections.""" + + class EdgeDefinitionListError(ArangoServerError): """Failed to retrieve edge definitions.""" diff --git a/arangoasync/graph.py b/arangoasync/graph.py index 3fcc4bc..edde3a2 100644 --- a/arangoasync/graph.py +++ b/arangoasync/graph.py @@ -1,11 +1,15 @@ __all__ = ["Graph"] -from typing import Generic, List, Optional, Sequence, TypeVar +from typing import Generic, List, Optional, Sequence, TypeVar, cast from arangoasync.collection import EdgeCollection, VertexCollection from arangoasync.exceptions import ( + EdgeCollectionListError, EdgeDefinitionCreateError, + EdgeDefinitionDeleteError, + EdgeDefinitionListError, + EdgeDefinitionReplaceError, GraphPropertiesError, VertexCollectionCreateError, VertexCollectionDeleteError, @@ -21,6 +25,7 @@ GraphProperties, Json, Jsons, + Params, VertexCollectionOptions, ) @@ -129,7 +134,7 @@ def response_handler(resp: Response) -> List[str]: if not resp.is_success: raise VertexCollectionListError(resp, request) body = self.deserializer.loads(resp.raw_body) - return list(sorted(set(body["collections"]))) + return list(sorted(body["collections"])) return await self._executor.execute(request, response_handler) @@ -245,6 +250,76 @@ def edge_collection(self, name: str) -> EdgeCollection[T, U, V]: doc_deserializer=self._doc_deserializer, ) + async def edge_definitions(self) -> Result[Jsons]: + """Return the edge definitions from the graph. + + Returns: + list: List of edge definitions. + + Raises: + EdgeDefinitionListError: If the operation fails. + """ + request = Request(method=Method.GET, endpoint=f"/_api/gharial/{self._name}") + + def response_handler(resp: Response) -> Jsons: + if not resp.is_success: + raise EdgeDefinitionListError(resp, request) + body = self.deserializer.loads(resp.raw_body) + properties = GraphProperties(body["graph"]) + edge_definitions = properties.format( + GraphProperties.compatibility_formatter + )["edge_definitions"] + return cast(Jsons, edge_definitions) + + return await self._executor.execute(request, response_handler) + + async def has_edge_definition(self, name: str) -> Result[bool]: + """Check if the graph has the given edge definition. + + Returns: + bool: `True` if the graph has the edge definitions, `False` otherwise. + + Raises: + EdgeDefinitionListError: If the operation fails. + """ + request = Request(method=Method.GET, endpoint=f"/_api/gharial/{self._name}") + + def response_handler(resp: Response) -> bool: + if not resp.is_success: + raise EdgeDefinitionListError(resp, request) + body = self.deserializer.loads(resp.raw_body) + return any( + edge_definition["collection"] == name + for edge_definition in body["graph"]["edgeDefinitions"] + ) + + return await self._executor.execute(request, response_handler) + + async def edge_collections(self) -> Result[List[str]]: + """Get the names of all edge collections in the graph. + + Returns: + list: List of edge collection names. + + Raises: + EdgeCollectionListError: If the operation fails. + + References: + - `list-edge-collections `__ + """ # noqa: E501 + request = Request( + method=Method.GET, + endpoint=f"/_api/gharial/{self._name}/edge", + ) + + def response_handler(resp: Response) -> List[str]: + if not resp.is_success: + raise EdgeCollectionListError(resp, request) + body = self.deserializer.loads(resp.raw_body) + return list(sorted(body["collections"])) + + return await self._executor.execute(request, response_handler) + async def create_edge_definition( self, edge_collection: str, @@ -307,3 +382,104 @@ def response_handler(resp: Response) -> EdgeCollection[T, U, V]: return self.edge_collection(edge_collection) return await self._executor.execute(request, response_handler) + + async def replace_edge_definition( + self, + edge_collection: str, + from_vertex_collections: Sequence[str], + to_vertex_collections: Sequence[str], + options: Optional[EdgeDefinitionOptions | Json] = None, + wait_for_sync: Optional[bool] = None, + drop_collections: Optional[bool] = None, + ) -> Result[EdgeCollection[T, U, V]]: + """Replace an edge definition. + + Args: + edge_collection (str): Edge collection name. + from_vertex_collections (list): Names of "from" vertex collections. + to_vertex_collections (list): Names of "to" vertex collections. + options (dict | EdgeDefinitionOptions | None): Extra options for + modifying collections withing this edge definition. + wait_for_sync (bool | None): If set to `True`, the operation waits for + data to be synced to disk before returning. + drop_collections (bool | None): Drop the edge collection in addition to + removing it from the graph. The collection is only dropped if it is + not used in other graphs. + + Returns: + EdgeCollection: API wrapper. + + Raises: + EdgeDefinitionReplaceError: If the operation fails. + + References: + - `replace-an-edge-definition `__ + """ # noqa: E501 + data: Json = { + "collection": edge_collection, + "from": from_vertex_collections, + "to": to_vertex_collections, + } + if options is not None: + if isinstance(options, VertexCollectionOptions): + data["options"] = options.to_dict() + else: + data["options"] = options + + params: Params = {} + if wait_for_sync is not None: + params["waitForSync"] = wait_for_sync + if drop_collections is not None: + params["dropCollections"] = drop_collections + + request = Request( + method=Method.PUT, + endpoint=f"/_api/gharial/{self._name}/edge/{edge_collection}", + data=self.serializer.dumps(data), + params=params, + ) + + def response_handler(resp: Response) -> EdgeCollection[T, U, V]: + if resp.is_success: + return self.edge_collection(edge_collection) + raise EdgeDefinitionReplaceError(resp, request) + + return await self._executor.execute(request, response_handler) + + async def delete_edge_definition( + self, + name: str, + purge: bool = False, + wait_for_sync: Optional[bool] = None, + ) -> None: + """Delete an edge definition from the graph. + + Args: + name (str): Edge collection name. + purge (bool): If set to `True`, the edge definition is not just removed + from the graph but the edge collection is also deleted completely + from the database. + wait_for_sync (bool | None): If set to `True`, the operation waits for + changes to be synced to disk before returning. + + Raises: + EdgeDefinitionDeleteError: If the operation fails. + + References: + - `remove-an-edge-definition `__ + """ # noqa: E501 + params: Params = {"dropCollections": purge} + if wait_for_sync is not None: + params["waitForSync"] = wait_for_sync + + request = Request( + method=Method.DELETE, + endpoint=f"/_api/gharial/{self._name}/edge/{name}", + params=params, + ) + + def response_handler(resp: Response) -> None: + if not resp.is_success: + raise EdgeDefinitionDeleteError(resp, request) + + await self._executor.execute(request, response_handler) diff --git a/arangoasync/typings.py b/arangoasync/typings.py index 4c0af22..280e27e 100644 --- a/arangoasync/typings.py +++ b/arangoasync/typings.py @@ -1726,6 +1726,47 @@ def edge_definitions(self) -> Jsons: def orphan_collections(self) -> List[str]: return cast(List[str], self._data.get("orphanCollections", list())) + @staticmethod + def compatibility_formatter(data: Json) -> Json: + result: Json = {} + + if "_id" in data: + result["id"] = data["_id"] + if "_key" in data: + result["key"] = data["_key"] + if "name" in data: + result["name"] = data["name"] + if "_rev" in data: + result["revision"] = data["_rev"] + if "orphanCollections" in data: + result["orphan_collection"] = data["orphanCollections"] + if "edgeDefinitions" in data: + result["edge_definitions"] = [ + { + "edge_collection": edge_definition["collection"], + "from_vertex_collections": edge_definition["from"], + "to_vertex_collections": edge_definition["to"], + } + for edge_definition in data["edgeDefinitions"] + ] + if "isSmart" in data: + result["smart"] = data["isSmart"] + if "isDisjoint" in data: + result["disjoint"] = data["isDisjoint"] + if "isSatellite" in data: + result["is_satellite"] = data["isSatellite"] + if "smartGraphAttribute" in data: + result["smart_field"] = data["smartGraphAttribute"] + if "numberOfShards" in data: + result["shard_count"] = data["numberOfShards"] + if "replicationFactor" in data: + result["replication_factor"] = data["replicationFactor"] + if "minReplicationFactor" in data: + result["min_replication_factor"] = data["minReplicationFactor"] + if "writeConcern" in data: + result["write_concern"] = data["writeConcern"] + return result + class GraphOptions(JsonWrapper): """Special options for graph creation. diff --git a/tests/test_graph.py b/tests/test_graph.py index 98ad038..4abda65 100644 --- a/tests/test_graph.py +++ b/tests/test_graph.py @@ -1,6 +1,10 @@ import pytest from arangoasync.exceptions import ( + EdgeCollectionListError, + EdgeDefinitionDeleteError, + EdgeDefinitionListError, + EdgeDefinitionReplaceError, GraphCreateError, GraphDeleteError, GraphListError, @@ -126,3 +130,66 @@ async def test_vertex_collections(db, bad_graph): # Delete collections await graph.delete_vertex_collection(names[0]) assert await graph.has_vertex_collection(names[0]) is False + + +async def test_edge_collections(db, bad_graph): + # Test errors + with pytest.raises(EdgeDefinitionListError): + await bad_graph.edge_definitions() + with pytest.raises(EdgeDefinitionListError): + await bad_graph.has_edge_definition("bad_col") + with pytest.raises(EdgeCollectionListError): + await bad_graph.edge_collections() + with pytest.raises(EdgeDefinitionReplaceError): + await bad_graph.replace_edge_definition("foo", ["bar1"], ["bar2"]) + with pytest.raises(EdgeDefinitionDeleteError): + await bad_graph.delete_edge_definition("foo") + + # Create full graph + name = generate_graph_name() + graph = await db.create_graph(name) + vcol_name = generate_col_name() + await graph.create_vertex_collection(vcol_name) + vcol2_name = generate_col_name() + await graph.create_vertex_collection(vcol2_name) + edge_name = generate_col_name() + edge_col = await graph.create_edge_definition( + edge_name, + from_vertex_collections=[vcol_name], + to_vertex_collections=[vcol2_name], + ) + assert edge_col.name == edge_name + + # List edge definitions + edge_definitions = await graph.edge_definitions() + assert len(edge_definitions) == 1 + assert "edge_collection" in edge_definitions[0] + assert "from_vertex_collections" in edge_definitions[0] + assert "to_vertex_collections" in edge_definitions[0] + assert await graph.has_edge_definition(edge_name) is True + assert await graph.has_edge_definition("bad_edge") is False + + edge_cols = await graph.edge_collections() + assert len(edge_cols) == 1 + assert edge_name in edge_cols + + # Replace the edge definition + new_from_collections = [vcol2_name] + new_to_collections = [vcol_name] + replaced_edge_col = await graph.replace_edge_definition( + edge_name, + from_vertex_collections=new_from_collections, + to_vertex_collections=new_to_collections, + ) + assert replaced_edge_col.name == edge_name + + # Verify the updated edge definition + edge_definitions = await graph.edge_definitions() + assert len(edge_definitions) == 1 + assert edge_definitions[0]["edge_collection"] == edge_name + assert edge_definitions[0]["from_vertex_collections"] == new_from_collections + assert edge_definitions[0]["to_vertex_collections"] == new_to_collections + + # Delete the edge definition + await graph.delete_edge_definition(edge_name) + assert await graph.has_edge_definition(edge_name) is False From aa7f0c55960f63c7b6ca3d7c467781653e00a72a Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Sat, 24 May 2025 21:38:26 +0000 Subject: [PATCH 09/25] Adding cluster testcase --- tests/test_graph.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/test_graph.py b/tests/test_graph.py index 4abda65..c46e0ae 100644 --- a/tests/test_graph.py +++ b/tests/test_graph.py @@ -72,7 +72,8 @@ async def test_graph_properties(db, bad_graph, cluster, enterprise): properties = await graph.properties() assert properties.name == name assert properties.is_smart == is_smart - assert properties.number_of_shards == options.number_of_shards + if cluster: + assert properties.number_of_shards == options.number_of_shards assert properties.orphan_collections == [vcol_name] # Create second vertex collection From a7dd6b2e48a6223d19f29d031923724df35dfd39 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Thu, 29 May 2025 05:25:17 +0000 Subject: [PATCH 10/25] Adding note about dictionary-like indexing --- docs/serialization.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/serialization.rst b/docs/serialization.rst index 9fe520e..b6a51df 100644 --- a/docs/serialization.rst +++ b/docs/serialization.rst @@ -80,6 +80,10 @@ that you are modeling your students data using Pydantic_. You want to be able to of a certain type, and also be able to read them back. More so, you would like to get multiple documents back using one of the formats provided by pandas_. +.. note:: + The driver assumes that the types support dictionary-like indexing, i.e. `doc["_id"]` + returns the id of the document. + **Example:** .. code-block:: python From d676262e36be4a84393eac63eba9aa8742146aae Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Thu, 29 May 2025 06:22:38 +0000 Subject: [PATCH 11/25] Inserting and retrieving vertex documents --- arangoasync/collection.py | 160 +++++++++++++++++++++++++++++++++----- arangoasync/graph.py | 69 +++++++++++++++- tests/test_graph.py | 14 +++- 3 files changed, 223 insertions(+), 20 deletions(-) diff --git a/arangoasync/collection.py b/arangoasync/collection.py index adbef25..55de900 100644 --- a/arangoasync/collection.py +++ b/arangoasync/collection.py @@ -6,7 +6,7 @@ ] -from typing import Any, Generic, List, Optional, Sequence, Tuple, TypeVar, cast +from typing import Any, Generic, List, Optional, Sequence, TypeVar, cast from arangoasync.cursor import Cursor from arangoasync.errno import ( @@ -75,6 +75,26 @@ def __init__( self._doc_deserializer = doc_deserializer self._id_prefix = f"{self._name}/" + @staticmethod + def get_col_name(doc: str | Json) -> str: + """Extract the collection name from the document. + + Args: + doc (str | dict): Document ID or body with "_id" field. + + Returns: + str: Collection name. + + Raises: + DocumentParseError: If document ID is missing. + """ + try: + doc_id: str = doc["_id"] if isinstance(doc, dict) else doc + except KeyError: + raise DocumentParseError('field "_id" required') + else: + return doc_id.split("/", 1)[0] + def _validate_id(self, doc_id: str) -> str: """Check the collection name in the document ID. @@ -120,6 +140,9 @@ def _ensure_key_from_id(self, body: Json) -> Json: Returns: dict: Document body with "_key" field if it has "_id" field. + + Raises: + DocumentParseError: If document is malformed. """ if "_id" in body and "_key" not in body: doc_id = self._validate_id(body["_id"]) @@ -127,18 +150,11 @@ def _ensure_key_from_id(self, body: Json) -> Json: body["_key"] = doc_id[len(self._id_prefix) :] return body - def _prep_from_doc( - self, - document: str | Json, - rev: Optional[str] = None, - check_rev: bool = False, - ) -> Tuple[str, Json]: - """Prepare document ID, body and request headers before a query. + def _prep_from_doc(self, document: str | Json) -> str: + """Prepare document ID before a query. Args: document (str | dict): Document ID, key or body. - rev (str | None): Document revision. - check_rev (bool): Whether to check the revision. Returns: Document ID and request headers. @@ -149,7 +165,6 @@ def _prep_from_doc( """ if isinstance(document, dict): doc_id = self._extract_id(document) - rev = rev or document.get("_rev") elif isinstance(document, str): if "/" in document: doc_id = self._validate_id(document) @@ -158,10 +173,7 @@ def _prep_from_doc( else: raise TypeError("Document must be str or a dict") - if not check_rev or rev is None: - return doc_id, {} - else: - return doc_id, {"If-Match": rev} + return doc_id def _build_filter_conditions(self, filters: Optional[Json]) -> str: """Build filter conditions for an AQL query. @@ -597,7 +609,7 @@ async def get( References: - `get-a-document `__ """ # noqa: E501 - handle, _ = self._prep_from_doc(document) + handle = self._prep_from_doc(document) headers: RequestHeaders = {} if allow_dirty_read: @@ -656,7 +668,7 @@ async def has( References: - `get-a-document-header `__ """ # noqa: E501 - handle, _ = self._prep_from_doc(document) + handle = self._prep_from_doc(document) headers: RequestHeaders = {} if allow_dirty_read: @@ -742,7 +754,6 @@ async def insert( - `create-a-document `__ """ # noqa: E501 if isinstance(document, dict): - # We assume that the document deserializer works with dictionaries. document = cast(T, self._ensure_key_from_id(document)) params: Params = {} @@ -1752,6 +1763,119 @@ def graph(self) -> str: """ return self._graph + async def get( + self, + vertex: str | Json, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + ) -> Result[Optional[Json]]: + """Return a document. + + Args: + vertex (str | dict): Document ID, key or body. + Document body must contain the "_id" or "_key" field. + if_match (str | None): The document is returned, if it has the same + revision as the given ETag. + if_none_match (str | None): The document is returned, if it has a + different revision than the given ETag. + + Returns: + Document or `None` if not found. + + Raises: + DocumentRevisionError: If the revision is incorrect. + DocumentGetError: If retrieval fails. + DocumentParseError: If the document is malformed. + + References: + - `get-a-vertex `__ + """ # noqa: E501 + handle = self._prep_from_doc(vertex) + + headers: RequestHeaders = {} + if if_match is not None: + headers["If-Match"] = if_match + if if_none_match is not None: + headers["If-None-Match"] = if_none_match + + request = Request( + method=Method.GET, + endpoint=f"/_api/gharial/{self._graph}/vertex/{handle}", + headers=headers, + ) + + def response_handler(resp: Response) -> Optional[Json]: + if resp.is_success: + data: Json = self.deserializer.loads(resp.raw_body) + return cast(Json, data["vertex"]) + elif resp.status_code == HTTP_NOT_FOUND: + if resp.error_code == DOCUMENT_NOT_FOUND: + return None + else: + raise DocumentGetError(resp, request) + elif resp.status_code == HTTP_PRECONDITION_FAILED: + raise DocumentRevisionError(resp, request) + else: + raise DocumentGetError(resp, request) + + return await self._executor.execute(request, response_handler) + + async def insert( + self, + vertex: T, + wait_for_sync: Optional[bool] = None, + return_new: Optional[bool] = None, + ) -> Result[Json]: + """Insert a new vertex document. + + Args: + vertex (dict): Document to insert. If it contains the "_key" or "_id" + field, the value is used as the key of the new document (otherwise + it is auto-generated). Any "_rev" field is ignored. + wait_for_sync (bool | None): Wait until document has been synced to disk. + return_new (bool | None): Additionally return the complete new document + under the attribute `new` in the result. + + Returns: + dict: Document metadata (e.g. document id, key, revision). + + Raises: + DocumentInsertError: If insertion fails. + DocumentParseError: If the document is malformed. + + References: + - `create-a-vertex `__ + """ # noqa: E501 + if isinstance(vertex, dict): + vertex = cast(T, self._ensure_key_from_id(vertex)) + + params: Params = {} + if wait_for_sync is not None: + params["waitForSync"] = wait_for_sync + if return_new is not None: + params["returnNew"] = return_new + + request = Request( + method=Method.POST, + endpoint=f"/_api/gharial/{self._graph}/vertex/{self.name}", + params=params, + data=self._doc_serializer.dumps(vertex), + ) + + def response_handler(resp: Response) -> Json: + if resp.is_success: + data: Json = self._executor.deserialize(resp.raw_body) + return cast(Json, data["vertex"]) + msg: Optional[str] = None + if resp.status_code == HTTP_NOT_FOUND: + msg = ( + "The graph cannot be found or the collection is not " + "part of the graph." + ) + raise DocumentInsertError(resp, request, msg) + + return await self._executor.execute(request, response_handler) + class EdgeCollection(Collection[T, U, V]): """Edge collection API wrapper. diff --git a/arangoasync/graph.py b/arangoasync/graph.py index edde3a2..a9e1acb 100644 --- a/arangoasync/graph.py +++ b/arangoasync/graph.py @@ -3,7 +3,7 @@ from typing import Generic, List, Optional, Sequence, TypeVar, cast -from arangoasync.collection import EdgeCollection, VertexCollection +from arangoasync.collection import Collection, EdgeCollection, VertexCollection from arangoasync.exceptions import ( EdgeCollectionListError, EdgeDefinitionCreateError, @@ -233,6 +233,73 @@ def response_handler(resp: Response) -> None: await self._executor.execute(request, response_handler) + async def vertex( + self, + vertex: str | Json, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + ) -> Result[Optional[Json]]: + """Return a document. + + Args: + vertex (str | dict): Document ID, key or body. + Document body must contain the "_id" or "_key" field. + if_match (str | None): The document is returned, if it has the same + revision as the given ETag. + if_none_match (str | None): The document is returned, if it has a + different revision than the given ETag. + + Returns: + Document or `None` if not found. + + Raises: + DocumentRevisionError: If the revision is incorrect. + DocumentGetError: If retrieval fails. + DocumentParseError: If the document is malformed. + + References: + - `get-a-vertex `__ + """ # noqa: E501 + return await self.vertex_collection(Collection.get_col_name(vertex)).get( + vertex, + if_match=if_match, + if_none_match=if_none_match, + ) + + async def insert_vertex( + self, + collection: str, + vertex: T, + wait_for_sync: Optional[bool] = None, + return_new: Optional[bool] = None, + ) -> Result[Json]: + """Insert a new vertex document. + + Args: + collection (str): Name of the vertex collection to insert the document into. + vertex (dict): Document to insert. If it contains the "_key" or "_id" + field, the value is used as the key of the new document (otherwise + it is auto-generated). Any "_rev" field is ignored. + wait_for_sync (bool | None): Wait until document has been synced to disk. + return_new (bool | None): Additionally return the complete new document + under the attribute `new` in the result. + + Returns: + dict: Document metadata (e.g. document id, key, revision). + + Raises: + DocumentInsertError: If insertion fails. + DocumentParseError: If the document is malformed. + + References: + - `create-a-vertex `__ + """ # noqa: E501 + return await self.vertex_collection(collection).insert( + vertex, + wait_for_sync=wait_for_sync, + return_new=return_new, + ) + def edge_collection(self, name: str) -> EdgeCollection[T, U, V]: """Returns the edge collection API wrapper. diff --git a/tests/test_graph.py b/tests/test_graph.py index c46e0ae..91418dc 100644 --- a/tests/test_graph.py +++ b/tests/test_graph.py @@ -103,7 +103,7 @@ async def test_graph_properties(db, bad_graph, cluster, enterprise): assert properties.edge_definitions[0]["to"][0] == vcol2_name -async def test_vertex_collections(db, bad_graph): +async def test_vertex_collections(db, docs, bad_graph): # Test errors with pytest.raises(VertexCollectionCreateError): await bad_graph.create_vertex_collection("bad_col") @@ -132,6 +132,18 @@ async def test_vertex_collections(db, bad_graph): await graph.delete_vertex_collection(names[0]) assert await graph.has_vertex_collection(names[0]) is False + # Insert in both collections + v1_meta = await graph.insert_vertex(names[1], docs[0]) + v2_meta = await graph.insert_vertex(names[2], docs[1]) + + # Get the vertex + v1 = await graph.vertex(v1_meta) + assert v1 is not None + v2 = await graph.vertex(v2_meta["_id"]) + assert v2 is not None + v3 = await graph.vertex(f"{names[2]}/bad_id") + assert v3 is None + async def test_edge_collections(db, bad_graph): # Test errors From 0cf44f7723bb444a673707031e6b3bb5e449b1bc Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Thu, 29 May 2025 06:36:52 +0000 Subject: [PATCH 12/25] Moving methods from StandardCollection to base Collection so they are available to other subclasses --- arangoasync/collection.py | 1438 ++++++++++++++++++------------------- 1 file changed, 719 insertions(+), 719 deletions(-) diff --git a/arangoasync/collection.py b/arangoasync/collection.py index 55de900..955b42d 100644 --- a/arangoasync/collection.py +++ b/arangoasync/collection.py @@ -473,29 +473,6 @@ def response_handler(resp: Response) -> bool: return await self._executor.execute(request, response_handler) - -class StandardCollection(Collection[T, U, V]): - """Standard collection API wrapper. - - Args: - executor (ApiExecutor): API executor. - name (str): Collection name - doc_serializer (Serializer): Document serializer. - doc_deserializer (Deserializer): Document deserializer. - """ - - def __init__( - self, - executor: ApiExecutor, - name: str, - doc_serializer: Serializer[T], - doc_deserializer: Deserializer[U, V], - ) -> None: - super().__init__(executor, name, doc_serializer, doc_deserializer) - - def __repr__(self) -> str: - return f"" - async def properties(self) -> Result[CollectionProperties]: """Return the full properties of the current collection. @@ -580,14 +557,14 @@ def response_handler(resp: Response) -> int: return await self._executor.execute(request, response_handler) - async def get( + async def has( self, document: str | Json, allow_dirty_read: bool = False, if_match: Optional[str] = None, if_none_match: Optional[str] = None, - ) -> Result[Optional[U]]: - """Return a document. + ) -> Result[bool]: + """Check if a document exists in the collection. Args: document (str | dict): Document ID, key or body. @@ -599,15 +576,14 @@ async def get( different revision than the given ETag. Returns: - Document or `None` if not found. + `True` if the document exists, `False` otherwise. Raises: DocumentRevisionError: If the revision is incorrect. DocumentGetError: If retrieval fails. - DocumentParseError: If the document is malformed. References: - - `get-a-document `__ + - `get-a-document-header `__ """ # noqa: E501 handle = self._prep_from_doc(document) @@ -620,19 +596,16 @@ async def get( headers["If-None-Match"] = if_none_match request = Request( - method=Method.GET, + method=Method.HEAD, endpoint=f"/_api/document/{handle}", headers=headers, ) - def response_handler(resp: Response) -> Optional[U]: + def response_handler(resp: Response) -> bool: if resp.is_success: - return self._doc_deserializer.loads(resp.raw_body) + return True elif resp.status_code == HTTP_NOT_FOUND: - if resp.error_code == DOCUMENT_NOT_FOUND: - return None - else: - raise DocumentGetError(resp, request) + return False elif resp.status_code == HTTP_PRECONDITION_FAILED: raise DocumentRevisionError(resp, request) else: @@ -640,227 +613,363 @@ def response_handler(resp: Response) -> Optional[U]: return await self._executor.execute(request, response_handler) - async def has( + async def get_many( self, - document: str | Json, - allow_dirty_read: bool = False, - if_match: Optional[str] = None, - if_none_match: Optional[str] = None, - ) -> Result[bool]: - """Check if a document exists in the collection. + documents: Sequence[str | T], + allow_dirty_read: Optional[bool] = None, + ignore_revs: Optional[bool] = None, + ) -> Result[V]: + """Return multiple documents ignoring any missing ones. Args: - document (str | dict): Document ID, key or body. - Document body must contain the "_id" or "_key" field. - allow_dirty_read (bool): Allow reads from followers in a cluster. - if_match (str | None): The document is returned, if it has the same - revision as the given ETag. - if_none_match (str | None): The document is returned, if it has a - different revision than the given ETag. + documents (list): List of document IDs, keys or bodies. A search document + must contain at least a value for the `_key` field. A value for `_rev` + may be specified to verify whether the document has the same revision + value, unless `ignoreRevs` is set to false. + allow_dirty_read (bool | None): Allow reads from followers in a cluster. + ignore_revs (bool | None): If set to `True`, the `_rev` attribute in the + document is ignored. If this is set to `False`, then the `_rev` + attribute given in the body document is taken as a precondition. + The document is only replaced if the current revision is the one + specified. Returns: - `True` if the document exists, `False` otherwise. + list: List of documents. Missing ones are not included. Raises: - DocumentRevisionError: If the revision is incorrect. DocumentGetError: If retrieval fails. References: - - `get-a-document-header `__ + - `get-multiple-documents `__ """ # noqa: E501 - handle = self._prep_from_doc(document) + params: Params = {"onlyget": True} + if ignore_revs is not None: + params["ignoreRevs"] = ignore_revs headers: RequestHeaders = {} - if allow_dirty_read: - headers["x-arango-allow-dirty-read"] = "true" - if if_match is not None: - headers["If-Match"] = if_match - if if_none_match is not None: - headers["If-None-Match"] = if_none_match + if allow_dirty_read is not None: + if allow_dirty_read is True: + headers["x-arango-allow-dirty-read"] = "true" + else: + headers["x-arango-allow-dirty-read"] = "false" request = Request( - method=Method.HEAD, - endpoint=f"/_api/document/{handle}", + method=Method.PUT, + endpoint=f"/_api/document/{self.name}", + params=params, headers=headers, + data=self._doc_serializer.dumps(documents), ) - def response_handler(resp: Response) -> bool: - if resp.is_success: - return True - elif resp.status_code == HTTP_NOT_FOUND: - return False - elif resp.status_code == HTTP_PRECONDITION_FAILED: - raise DocumentRevisionError(resp, request) - else: + def response_handler(resp: Response) -> V: + if not resp.is_success: raise DocumentGetError(resp, request) + return self._doc_deserializer.loads_many(resp.raw_body) return await self._executor.execute(request, response_handler) - async def insert( + async def find( self, - document: T, - wait_for_sync: Optional[bool] = None, - return_new: Optional[bool] = None, - return_old: Optional[bool] = None, - silent: Optional[bool] = None, - overwrite: Optional[bool] = None, - overwrite_mode: Optional[str] = None, - keep_null: Optional[bool] = None, - merge_objects: Optional[bool] = None, - refill_index_caches: Optional[bool] = None, - version_attribute: Optional[str] = None, - ) -> Result[bool | Json]: - """Insert a new document. + filters: Optional[Json] = None, + skip: Optional[int] = None, + limit: Optional[int | str] = None, + allow_dirty_read: Optional[bool] = False, + sort: Optional[Jsons] = None, + ) -> Result[Cursor]: + """Return all documents that match the given filters. Args: - document (dict): Document to insert. If it contains the "_key" or "_id" - field, the value is used as the key of the new document (otherwise - it is auto-generated). Any "_rev" field is ignored. - wait_for_sync (bool | None): Wait until document has been synced to disk. - return_new (bool | None): Additionally return the complete new document - under the attribute `new` in the result. - return_old (bool | None): Additionally return the complete old document - under the attribute `old` in the result. Only available if the - `overwrite` option is used. - silent (bool | None): If set to `True`, no document metadata is returned. - This can be used to save resources. - overwrite (bool | None): If set to `True`, operation does not fail on - duplicate key and existing document is overwritten (replace-insert). - overwrite_mode (str | None): Overwrite mode. Supersedes **overwrite** - option. May be one of "ignore", "replace", "update" or "conflict". - keep_null (bool | None): If set to `True`, fields with value None are - retained in the document. Otherwise, they are removed completely. - Applies only when **overwrite_mode** is set to "update" - (update-insert). - merge_objects (bool | None): If set to `True`, sub-dictionaries are merged - instead of the new one overwriting the old one. Applies only when - **overwrite_mode** is set to "update" (update-insert). - refill_index_caches (bool | None): Whether to add new entries to - in-memory index caches if document insertions affect the edge index - or cache-enabled persistent indexes. - version_attribute (str | None): Support for simple external versioning to - document operations. Only applicable if **overwrite** is set to `True` - or **overwrite_mode** is set to "update" or "replace". + filters (dict | None): Query filters. + skip (int | None): Number of documents to skip. + limit (int | str | None): Maximum number of documents to return. + allow_dirty_read (bool): Allow reads from followers in a cluster. + sort (list | None): Document sort parameters. Returns: - bool | dict: Document metadata (e.g. document id, key, revision) or `True` - if **silent** is set to `True`. + Cursor: Document cursor. Raises: - DocumentInsertError: If insertion fails. - DocumentParseError: If the document is malformed. - - References: - - `create-a-document `__ - """ # noqa: E501 - if isinstance(document, dict): - document = cast(T, self._ensure_key_from_id(document)) + DocumentGetError: If retrieval fails. + SortValidationError: If sort parameters are invalid. + """ + if not self._is_none_or_dict(filters): + raise ValueError("filters parameter must be a dict") + self._validate_sort_parameters(sort) + if not self._is_none_or_int(skip): + raise ValueError("skip parameter must be a non-negative int") + if not (self._is_none_or_int(limit) or limit == "null"): + raise ValueError("limit parameter must be a non-negative int") - params: Params = {} - if wait_for_sync is not None: - params["waitForSync"] = wait_for_sync - if return_new is not None: - params["returnNew"] = return_new - if return_old is not None: - params["returnOld"] = return_old - if silent is not None: - params["silent"] = silent - if overwrite is not None: - params["overwrite"] = overwrite - if overwrite_mode is not None: - params["overwriteMode"] = overwrite_mode - if keep_null is not None: - params["keepNull"] = keep_null - if merge_objects is not None: - params["mergeObjects"] = merge_objects - if refill_index_caches is not None: - params["refillIndexCaches"] = refill_index_caches - if version_attribute is not None: - params["versionAttribute"] = version_attribute + skip = skip if skip is not None else 0 + limit = limit if limit is not None else "null" + query = f""" + FOR doc IN @@collection + {self._build_filter_conditions(filters)} + LIMIT {skip}, {limit} + {self._build_sort_expression(sort)} + RETURN doc + """ + bind_vars = {"@collection": self.name} + data: Json = {"query": query, "bindVars": bind_vars, "count": True} + headers: RequestHeaders = {} + if allow_dirty_read is not None: + if allow_dirty_read is True: + headers["x-arango-allow-dirty-read"] = "true" + else: + headers["x-arango-allow-dirty-read"] = "false" request = Request( method=Method.POST, - endpoint=f"/_api/document/{self._name}", - params=params, - data=self._doc_serializer.dumps(document), + endpoint="/_api/cursor", + data=self.serializer.dumps(data), + headers=headers, ) - def response_handler(resp: Response) -> bool | Json: - if resp.is_success: - if silent is True: - return True - return self._executor.deserialize(resp.raw_body) - msg: Optional[str] = None - if resp.status_code == HTTP_BAD_PARAMETER: - msg = ( - "Body does not contain a valid JSON representation of " - "one document." + def response_handler(resp: Response) -> Cursor: + if not resp.is_success: + raise DocumentGetError(resp, request) + if self._executor.context == "async": + # We cannot have a cursor giving back async jobs + executor: NonAsyncExecutor = DefaultApiExecutor( + self._executor.connection ) - elif resp.status_code == HTTP_NOT_FOUND: - msg = "Collection not found." - raise DocumentInsertError(resp, request, msg) + else: + executor = cast(NonAsyncExecutor, self._executor) + return Cursor(executor, self.deserializer.loads(resp.raw_body)) return await self._executor.execute(request, response_handler) - async def update( + async def update_match( self, - document: T, - ignore_revs: Optional[bool] = None, + filters: Json, + body: T, + limit: Optional[int | str] = None, + keep_none: Optional[bool] = None, + wait_for_sync: Optional[bool] = None, + merge_objects: Optional[bool] = None, + ) -> Result[int]: + """Update matching documents. + + Args: + filters (dict | None): Query filters. + body (dict): Full or partial document body with the updates. + limit (int | str | None): Maximum number of documents to update. + keep_none (bool | None): If set to `True`, fields with value `None` are + retained in the document. Otherwise, they are removed completely. + wait_for_sync (bool | None): Wait until operation has been synced to disk. + merge_objects (bool | None): If set to `True`, sub-dictionaries are merged + instead of the new one overwriting the old one. + + Returns: + int: Number of documents that got updated. + + Raises: + DocumentUpdateError: If update fails. + """ + if not self._is_none_or_dict(filters): + raise ValueError("filters parameter must be a dict") + if not (self._is_none_or_int(limit) or limit == "null"): + raise ValueError("limit parameter must be a non-negative int") + + sync = f", waitForSync: {wait_for_sync}" if wait_for_sync is not None else "" + query = f""" + FOR doc IN @@collection + {self._build_filter_conditions(filters)} + {f"LIMIT {limit}" if limit is not None else ""} + UPDATE doc WITH @body IN @@collection + OPTIONS {{ keepNull: @keep_none, mergeObjects: @merge {sync} }} + """ # noqa: E201 E202 + bind_vars = { + "@collection": self.name, + "body": body, + "keep_none": keep_none, + "merge": merge_objects, + } + data = {"query": query, "bindVars": bind_vars} + + request = Request( + method=Method.POST, + endpoint="/_api/cursor", + data=self.serializer.dumps(data), + ) + + def response_handler(resp: Response) -> int: + if resp.is_success: + result = self.deserializer.loads(resp.raw_body) + return cast(int, result["extra"]["stats"]["writesExecuted"]) + raise DocumentUpdateError(resp, request) + + return await self._executor.execute(request, response_handler) + + async def replace_match( + self, + filters: Json, + body: T, + limit: Optional[int | str] = None, + wait_for_sync: Optional[bool] = None, + ) -> Result[int]: + """Replace matching documents. + + Args: + filters (dict | None): Query filters. + body (dict): New document body. + limit (int | str | None): Maximum number of documents to replace. + wait_for_sync (bool | None): Wait until operation has been synced to disk. + + Returns: + int: Number of documents that got replaced. + + Raises: + DocumentReplaceError: If replace fails. + """ + if not self._is_none_or_dict(filters): + raise ValueError("filters parameter must be a dict") + if not (self._is_none_or_int(limit) or limit == "null"): + raise ValueError("limit parameter must be a non-negative int") + + sync = f"waitForSync: {wait_for_sync}" if wait_for_sync is not None else "" + query = f""" + FOR doc IN @@collection + {self._build_filter_conditions(filters)} + {f"LIMIT {limit}" if limit is not None else ""} + REPLACE doc WITH @body IN @@collection + {f"OPTIONS {{ {sync} }}" if sync else ""} + """ # noqa: E201 E202 + bind_vars = { + "@collection": self.name, + "body": body, + } + data = {"query": query, "bindVars": bind_vars} + + request = Request( + method=Method.POST, + endpoint="/_api/cursor", + data=self.serializer.dumps(data), + ) + + def response_handler(resp: Response) -> int: + if resp.is_success: + result = self.deserializer.loads(resp.raw_body) + return cast(int, result["extra"]["stats"]["writesExecuted"]) + raise DocumentReplaceError(resp, request) + + return await self._executor.execute(request, response_handler) + + async def delete_match( + self, + filters: Json, + limit: Optional[int | str] = None, + wait_for_sync: Optional[bool] = None, + ) -> Result[int]: + """Delete matching documents. + + Args: + filters (dict | None): Query filters. + limit (int | str | None): Maximum number of documents to delete. + wait_for_sync (bool | None): Wait until operation has been synced to disk. + + Returns: + int: Number of documents that got deleted. + + Raises: + DocumentDeleteError: If delete fails. + """ + if not self._is_none_or_dict(filters): + raise ValueError("filters parameter must be a dict") + if not (self._is_none_or_int(limit) or limit == "null"): + raise ValueError("limit parameter must be a non-negative int") + + sync = f"waitForSync: {wait_for_sync}" if wait_for_sync is not None else "" + query = f""" + FOR doc IN @@collection + {self._build_filter_conditions(filters)} + {f"LIMIT {limit}" if limit is not None else ""} + REMOVE doc IN @@collection + {f"OPTIONS {{ {sync} }}" if sync else ""} + """ # noqa: E201 E202 + bind_vars = {"@collection": self.name} + data = {"query": query, "bindVars": bind_vars} + + request = Request( + method=Method.POST, + endpoint="/_api/cursor", + data=self.serializer.dumps(data), + ) + + def response_handler(resp: Response) -> int: + if resp.is_success: + result = self.deserializer.loads(resp.raw_body) + return cast(int, result["extra"]["stats"]["writesExecuted"]) + raise DocumentDeleteError(resp, request) + + return await self._executor.execute(request, response_handler) + + async def insert_many( + self, + documents: Sequence[T], wait_for_sync: Optional[bool] = None, return_new: Optional[bool] = None, return_old: Optional[bool] = None, silent: Optional[bool] = None, + overwrite: Optional[bool] = None, + overwrite_mode: Optional[str] = None, keep_null: Optional[bool] = None, merge_objects: Optional[bool] = None, refill_index_caches: Optional[bool] = None, version_attribute: Optional[str] = None, - if_match: Optional[str] = None, - ) -> Result[bool | Json]: - """Insert a new document. + ) -> Result[Jsons]: + """Insert multiple documents. + + Note: + If inserting a document fails, the exception is not raised but + returned as an object in the "errors" list. It is up to you to + inspect the list to determine which documents were inserted + successfully (returns document metadata) and which were not + (returns exception object). Args: - document (dict): Partial or full document with the updated values. - It must contain the "_key" or "_id" field. - ignore_revs (bool | None): If set to `True`, the `_rev` attribute in the - document is ignored. If this is set to `False`, then the `_rev` - attribute given in the body document is taken as a precondition. - The document is only updated if the current revision is the one - specified. - wait_for_sync (bool | None): Wait until document has been synced to disk. + documents (list): Documents to insert. If an item contains the "_key" or + "_id" field, the value is used as the key of the new document + (otherwise it is auto-generated). Any "_rev" field is ignored. + wait_for_sync (bool | None): Wait until documents have been synced to disk. return_new (bool | None): Additionally return the complete new document under the attribute `new` in the result. return_old (bool | None): Additionally return the complete old document - under the attribute `old` in the result. - silent (bool | None): If set to `True`, no document metadata is returned. - This can be used to save resources. - keep_null (bool | None): If the intention is to delete existing attributes - with the patch command, set this parameter to `False`. - merge_objects (bool | None): Controls whether objects (not arrays) are - merged if present in both the existing and the patch document. - If set to `False`, the value in the patch document overwrites the - existing document’s value. If set to `True`, objects are merged. + under the attribute `old` in the result. Only available if the + `overwrite` option is used. + silent (bool | None): If set to `True`, an empty object is returned as + response if all document operations succeed. No meta-data is returned + for the created documents. If any of the operations raises an error, + an array with the error object(s) is returned. + overwrite (bool | None): If set to `True`, operation does not fail on + duplicate key and existing document is overwritten (replace-insert). + overwrite_mode (str | None): Overwrite mode. Supersedes **overwrite** + option. May be one of "ignore", "replace", "update" or "conflict". + keep_null (bool | None): If set to `True`, fields with value None are + retained in the document. Otherwise, they are removed completely. + Applies only when **overwrite_mode** is set to "update" + (update-insert). + merge_objects (bool | None): If set to `True`, sub-dictionaries are merged + instead of the new one overwriting the old one. Applies only when + **overwrite_mode** is set to "update" (update-insert). refill_index_caches (bool | None): Whether to add new entries to - in-memory index caches if document updates affect the edge index + in-memory index caches if document operations affect the edge index or cache-enabled persistent indexes. version_attribute (str | None): Support for simple external versioning to - document operations. - if_match (str | None): You can conditionally update a document based on a - target revision id by using the "if-match" HTTP header. + document operations. Only applicable if **overwrite** is set to `True` + or **overwrite_mode** is set to "update" or "replace". Returns: - bool | dict: Document metadata (e.g. document id, key, revision) or `True` - if **silent** is set to `True`. + list: Documents metadata (e.g. document id, key, revision) and + errors or just errors if **silent** is set to `True`. Raises: - DocumentRevisionError: If precondition was violated. - DocumentUpdateError: If update fails. + DocumentInsertError: If insertion fails. References: - - `update-a-document `__ + - `create-multiple-documents `__ """ # noqa: E501 params: Params = {} - if ignore_revs is not None: - params["ignoreRevs"] = ignore_revs if wait_for_sync is not None: params["waitForSync"] = wait_for_sync if return_new is not None: @@ -869,6 +978,10 @@ async def update( params["returnOld"] = return_old if silent is not None: params["silent"] = silent + if overwrite is not None: + params["overwrite"] = overwrite + if overwrite_mode is not None: + params["overwriteMode"] = overwrite_mode if keep_null is not None: params["keepNull"] = keep_null if merge_objects is not None: @@ -878,85 +991,79 @@ async def update( if version_attribute is not None: params["versionAttribute"] = version_attribute - headers: RequestHeaders = {} - if if_match is not None: - headers["If-Match"] = if_match - request = Request( - method=Method.PATCH, - endpoint=f"/_api/document/{self._extract_id(cast(Json, document))}", + method=Method.POST, + endpoint=f"/_api/document/{self.name}", + data=self._doc_serializer.dumps(documents), params=params, - headers=headers, - data=self._doc_serializer.dumps(document), ) - def response_handler(resp: Response) -> bool | Json: - if resp.is_success: - if silent is True: - return True - return self._executor.deserialize(resp.raw_body) - msg: Optional[str] = None - if resp.status_code == HTTP_PRECONDITION_FAILED: - raise DocumentRevisionError(resp, request) - elif resp.status_code == HTTP_NOT_FOUND: - msg = "Document, collection or transaction not found." - raise DocumentUpdateError(resp, request, msg) + def response_handler( + resp: Response, + ) -> Jsons: + if not resp.is_success: + raise DocumentInsertError(resp, request) + return self.deserializer.loads_many(resp.raw_body) return await self._executor.execute(request, response_handler) - async def replace( + async def replace_many( self, - document: T, - ignore_revs: Optional[bool] = None, + documents: Sequence[T], wait_for_sync: Optional[bool] = None, + ignore_revs: Optional[bool] = None, return_new: Optional[bool] = None, return_old: Optional[bool] = None, silent: Optional[bool] = None, refill_index_caches: Optional[bool] = None, version_attribute: Optional[str] = None, - if_match: Optional[str] = None, - ) -> Result[bool | Json]: - """Replace a document. + ) -> Result[Jsons]: + """Insert multiple documents. + + Note: + If replacing a document fails, the exception is not raised but + returned as an object in the "errors" list. It is up to you to + inspect the list to determine which documents were replaced + successfully (returns document metadata) and which were not + (returns exception object). Args: - document (dict): New document. It must contain the "_key" or "_id" field. - Edge document must also have "_from" and "_to" fields. - ignore_revs (bool | None): If set to `True`, the `_rev` attribute in the - document is ignored. If this is set to `False`, then the `_rev` - attribute given in the body document is taken as a precondition. - The document is only replaced if the current revision is the one + documents (list): New documents to replace the old ones. An item must + contain the "_key" or "_id" field. + wait_for_sync (bool | None): Wait until documents have been synced to disk. + ignore_revs (bool | None): If this is set to `False`, then any `_rev` + attribute given in a body document is taken as a precondition. The + document is only replaced if the current revision is the one specified. - wait_for_sync (bool | None): Wait until document has been synced to disk. return_new (bool | None): Additionally return the complete new document under the attribute `new` in the result. return_old (bool | None): Additionally return the complete old document under the attribute `old` in the result. - silent (bool | None): If set to `True`, no document metadata is returned. - This can be used to save resources. + silent (bool | None): If set to `True`, an empty object is returned as + response if all document operations succeed. No meta-data is returned + for the created documents. If any of the operations raises an error, + an array with the error object(s) is returned. refill_index_caches (bool | None): Whether to add new entries to - in-memory index caches if document updates affect the edge index + in-memory index caches if document operations affect the edge index or cache-enabled persistent indexes. version_attribute (str | None): Support for simple external versioning to document operations. - if_match (str | None): You can conditionally replace a document based on a - target revision id by using the "if-match" HTTP header. Returns: - bool | dict: Document metadata (e.g. document id, key, revision) or `True` - if **silent** is set to `True`. + list: Documents metadata (e.g. document id, key, revision) and + errors or just errors if **silent** is set to `True`. Raises: - DocumentRevisionError: If precondition was violated. - DocumentReplaceError: If replace fails. + DocumentReplaceError: If replacing fails. References: - - `replace-a-document `__ + - `replace-multiple-documents `__ """ # noqa: E501 params: Params = {} - if ignore_revs is not None: - params["ignoreRevs"] = ignore_revs if wait_for_sync is not None: params["waitForSync"] = wait_for_sync + if ignore_revs is not None: + params["ignoreRevs"] = ignore_revs if return_new is not None: params["returnNew"] = return_new if return_old is not None: @@ -968,411 +1075,279 @@ async def replace( if version_attribute is not None: params["versionAttribute"] = version_attribute - headers: RequestHeaders = {} - if if_match is not None: - headers["If-Match"] = if_match - request = Request( method=Method.PUT, - endpoint=f"/_api/document/{self._extract_id(cast(Json, document))}", + endpoint=f"/_api/document/{self.name}", + data=self._doc_serializer.dumps(documents), params=params, - headers=headers, - data=self._doc_serializer.dumps(document), ) - def response_handler(resp: Response) -> bool | Json: - if resp.is_success: - if silent is True: - return True - return self._executor.deserialize(resp.raw_body) - msg: Optional[str] = None - if resp.status_code == HTTP_PRECONDITION_FAILED: - raise DocumentRevisionError(resp, request) - elif resp.status_code == HTTP_NOT_FOUND: - msg = "Document, collection or transaction not found." - raise DocumentReplaceError(resp, request, msg) + def response_handler( + resp: Response, + ) -> Jsons: + if not resp.is_success: + raise DocumentReplaceError(resp, request) + return self.deserializer.loads_many(resp.raw_body) return await self._executor.execute(request, response_handler) - async def delete( + async def update_many( self, - document: T, - ignore_revs: Optional[bool] = None, - ignore_missing: bool = False, + documents: Sequence[T], wait_for_sync: Optional[bool] = None, + ignore_revs: Optional[bool] = None, + return_new: Optional[bool] = None, return_old: Optional[bool] = None, silent: Optional[bool] = None, + keep_null: Optional[bool] = None, + merge_objects: Optional[bool] = None, refill_index_caches: Optional[bool] = None, - if_match: Optional[str] = None, - ) -> Result[bool | Json]: - """Delete a document. + version_attribute: Optional[str] = None, + ) -> Result[Jsons]: + """Insert multiple documents. + + Note: + If updating a document fails, the exception is not raised but + returned as an object in the "errors" list. It is up to you to + inspect the list to determine which documents were updated + successfully (returned as document metadata) and which were not + (returned as exception object). Args: - document (dict): Document ID, key or body. The body must contain the - "_key" or "_id" field. - ignore_revs (bool | None): If set to `True`, the `_rev` attribute in the - document is ignored. If this is set to `False`, then the `_rev` - attribute given in the body document is taken as a precondition. - The document is only replaced if the current revision is the one + documents (list): Documents to update. An item must contain the "_key" or + "_id" field. + wait_for_sync (bool | None): Wait until documents have been synced to disk. + ignore_revs (bool | None): If this is set to `False`, then any `_rev` + attribute given in a body document is taken as a precondition. The + document is only updated if the current revision is the one specified. - ignore_missing (bool): Do not raise an exception on missing document. - This parameter has no effect in transactions where an exception is - always raised on failures. - wait_for_sync (bool | None): Wait until operation has been synced to disk. + return_new (bool | None): Additionally return the complete new document + under the attribute `new` in the result. return_old (bool | None): Additionally return the complete old document under the attribute `old` in the result. - silent (bool | None): If set to `True`, no document metadata is returned. - This can be used to save resources. + silent (bool | None): If set to `True`, an empty object is returned as + response if all document operations succeed. No meta-data is returned + for the created documents. If any of the operations raises an error, + an array with the error object(s) is returned. + keep_null (bool | None): If set to `True`, fields with value None are + retained in the document. Otherwise, they are removed completely. + Applies only when **overwrite_mode** is set to "update" + (update-insert). + merge_objects (bool | None): If set to `True`, sub-dictionaries are merged + instead of the new one overwriting the old one. Applies only when + **overwrite_mode** is set to "update" (update-insert). refill_index_caches (bool | None): Whether to add new entries to - in-memory index caches if document updates affect the edge index + in-memory index caches if document operations affect the edge index or cache-enabled persistent indexes. - if_match (bool | None): You can conditionally remove a document based - on a target revision id by using the "if-match" HTTP header. + version_attribute (str | None): Support for simple external versioning to + document operations. Returns: - bool | dict: Document metadata (e.g. document id, key, revision) or `True` - if **silent** is set to `True` and the document was found. + list: Documents metadata (e.g. document id, key, revision) and + errors or just errors if **silent** is set to `True`. Raises: - DocumentRevisionError: If precondition was violated. - DocumentDeleteError: If deletion fails. + DocumentUpdateError: If update fails. References: - - `remove-a-document `__ + - `update-multiple-documents `__ """ # noqa: E501 params: Params = {} - if ignore_revs is not None: - params["ignoreRevs"] = ignore_revs if wait_for_sync is not None: params["waitForSync"] = wait_for_sync + if ignore_revs is not None: + params["ignoreRevs"] = ignore_revs + if return_new is not None: + params["returnNew"] = return_new if return_old is not None: params["returnOld"] = return_old if silent is not None: params["silent"] = silent + if keep_null is not None: + params["keepNull"] = keep_null + if merge_objects is not None: + params["mergeObjects"] = merge_objects if refill_index_caches is not None: params["refillIndexCaches"] = refill_index_caches - - headers: RequestHeaders = {} - if if_match is not None: - headers["If-Match"] = if_match - - request = Request( - method=Method.DELETE, - endpoint=f"/_api/document/{self._extract_id(cast(Json, document))}", - params=params, - headers=headers, - ) - - def response_handler(resp: Response) -> bool | Json: - if resp.is_success: - if silent is True: - return True - return self._executor.deserialize(resp.raw_body) - msg: Optional[str] = None - if resp.status_code == HTTP_PRECONDITION_FAILED: - raise DocumentRevisionError(resp, request) - elif resp.status_code == HTTP_NOT_FOUND: - if resp.error_code == DOCUMENT_NOT_FOUND and ignore_missing: - return False - msg = "Document, collection or transaction not found." - raise DocumentDeleteError(resp, request, msg) - - return await self._executor.execute(request, response_handler) - - async def get_many( - self, - documents: Sequence[str | T], - allow_dirty_read: Optional[bool] = None, - ignore_revs: Optional[bool] = None, - ) -> Result[V]: - """Return multiple documents ignoring any missing ones. - - Args: - documents (list): List of document IDs, keys or bodies. A search document - must contain at least a value for the `_key` field. A value for `_rev` - may be specified to verify whether the document has the same revision - value, unless `ignoreRevs` is set to false. - allow_dirty_read (bool | None): Allow reads from followers in a cluster. - ignore_revs (bool | None): If set to `True`, the `_rev` attribute in the - document is ignored. If this is set to `False`, then the `_rev` - attribute given in the body document is taken as a precondition. - The document is only replaced if the current revision is the one - specified. - - Returns: - list: List of documents. Missing ones are not included. - - Raises: - DocumentGetError: If retrieval fails. - - References: - - `get-multiple-documents `__ - """ # noqa: E501 - params: Params = {"onlyget": True} - if ignore_revs is not None: - params["ignoreRevs"] = ignore_revs - - headers: RequestHeaders = {} - if allow_dirty_read is not None: - if allow_dirty_read is True: - headers["x-arango-allow-dirty-read"] = "true" - else: - headers["x-arango-allow-dirty-read"] = "false" + if version_attribute is not None: + params["versionAttribute"] = version_attribute request = Request( - method=Method.PUT, + method=Method.PATCH, endpoint=f"/_api/document/{self.name}", - params=params, - headers=headers, data=self._doc_serializer.dumps(documents), + params=params, ) - def response_handler(resp: Response) -> V: - if not resp.is_success: - raise DocumentGetError(resp, request) - return self._doc_deserializer.loads_many(resp.raw_body) - - return await self._executor.execute(request, response_handler) - - async def find( - self, - filters: Optional[Json] = None, - skip: Optional[int] = None, - limit: Optional[int | str] = None, - allow_dirty_read: Optional[bool] = False, - sort: Optional[Jsons] = None, - ) -> Result[Cursor]: - """Return all documents that match the given filters. - - Args: - filters (dict | None): Query filters. - skip (int | None): Number of documents to skip. - limit (int | str | None): Maximum number of documents to return. - allow_dirty_read (bool): Allow reads from followers in a cluster. - sort (list | None): Document sort parameters. - - Returns: - Cursor: Document cursor. - - Raises: - DocumentGetError: If retrieval fails. - SortValidationError: If sort parameters are invalid. - """ - if not self._is_none_or_dict(filters): - raise ValueError("filters parameter must be a dict") - self._validate_sort_parameters(sort) - if not self._is_none_or_int(skip): - raise ValueError("skip parameter must be a non-negative int") - if not (self._is_none_or_int(limit) or limit == "null"): - raise ValueError("limit parameter must be a non-negative int") - - skip = skip if skip is not None else 0 - limit = limit if limit is not None else "null" - query = f""" - FOR doc IN @@collection - {self._build_filter_conditions(filters)} - LIMIT {skip}, {limit} - {self._build_sort_expression(sort)} - RETURN doc - """ - bind_vars = {"@collection": self.name} - data: Json = {"query": query, "bindVars": bind_vars, "count": True} - headers: RequestHeaders = {} - if allow_dirty_read is not None: - if allow_dirty_read is True: - headers["x-arango-allow-dirty-read"] = "true" - else: - headers["x-arango-allow-dirty-read"] = "false" - - request = Request( - method=Method.POST, - endpoint="/_api/cursor", - data=self.serializer.dumps(data), - headers=headers, - ) - - def response_handler(resp: Response) -> Cursor: + def response_handler( + resp: Response, + ) -> Jsons: if not resp.is_success: - raise DocumentGetError(resp, request) - if self._executor.context == "async": - # We cannot have a cursor giving back async jobs - executor: NonAsyncExecutor = DefaultApiExecutor( - self._executor.connection - ) - else: - executor = cast(NonAsyncExecutor, self._executor) - return Cursor(executor, self.deserializer.loads(resp.raw_body)) - - return await self._executor.execute(request, response_handler) - - async def update_match( - self, - filters: Json, - body: T, - limit: Optional[int | str] = None, - keep_none: Optional[bool] = None, - wait_for_sync: Optional[bool] = None, - merge_objects: Optional[bool] = None, - ) -> Result[int]: - """Update matching documents. - - Args: - filters (dict | None): Query filters. - body (dict): Full or partial document body with the updates. - limit (int | str | None): Maximum number of documents to update. - keep_none (bool | None): If set to `True`, fields with value `None` are - retained in the document. Otherwise, they are removed completely. - wait_for_sync (bool | None): Wait until operation has been synced to disk. - merge_objects (bool | None): If set to `True`, sub-dictionaries are merged - instead of the new one overwriting the old one. - - Returns: - int: Number of documents that got updated. - - Raises: - DocumentUpdateError: If update fails. - """ - if not self._is_none_or_dict(filters): - raise ValueError("filters parameter must be a dict") - if not (self._is_none_or_int(limit) or limit == "null"): - raise ValueError("limit parameter must be a non-negative int") - - sync = f", waitForSync: {wait_for_sync}" if wait_for_sync is not None else "" - query = f""" - FOR doc IN @@collection - {self._build_filter_conditions(filters)} - {f"LIMIT {limit}" if limit is not None else ""} - UPDATE doc WITH @body IN @@collection - OPTIONS {{ keepNull: @keep_none, mergeObjects: @merge {sync} }} - """ # noqa: E201 E202 - bind_vars = { - "@collection": self.name, - "body": body, - "keep_none": keep_none, - "merge": merge_objects, - } - data = {"query": query, "bindVars": bind_vars} - - request = Request( - method=Method.POST, - endpoint="/_api/cursor", - data=self.serializer.dumps(data), - ) - - def response_handler(resp: Response) -> int: - if resp.is_success: - result = self.deserializer.loads(resp.raw_body) - return cast(int, result["extra"]["stats"]["writesExecuted"]) - raise DocumentUpdateError(resp, request) + raise DocumentUpdateError(resp, request) + return self.deserializer.loads_many(resp.raw_body) return await self._executor.execute(request, response_handler) - async def replace_match( + async def delete_many( self, - filters: Json, - body: T, - limit: Optional[int | str] = None, + documents: Sequence[T], wait_for_sync: Optional[bool] = None, - ) -> Result[int]: - """Replace matching documents. + ignore_revs: Optional[bool] = None, + return_old: Optional[bool] = None, + silent: Optional[bool] = None, + refill_index_caches: Optional[bool] = None, + ) -> Result[Jsons]: + """Delete multiple documents. - Args: - filters (dict | None): Query filters. - body (dict): New document body. - limit (int | str | None): Maximum number of documents to replace. - wait_for_sync (bool | None): Wait until operation has been synced to disk. + Note: + If deleting a document fails, the exception is not raised but + returned as an object in the "errors" list. It is up to you to + inspect the list to determine which documents were deleted + successfully (returned as document metadata) and which were not + (returned as exception object). + + Args: + documents (list): Documents to delete. An item must contain the "_key" or + "_id" field. + wait_for_sync (bool | None): Wait until documents have been synced to disk. + ignore_revs (bool | None): If this is set to `False`, then any `_rev` + attribute given in a body document is taken as a precondition. The + document is only updated if the current revision is the one + specified. + return_old (bool | None): Additionally return the complete old document + under the attribute `old` in the result. + silent (bool | None): If set to `True`, an empty object is returned as + response if all document operations succeed. No meta-data is returned + for the created documents. If any of the operations raises an error, + an array with the error object(s) is returned. + refill_index_caches (bool | None): Whether to add new entries to + in-memory index caches if document operations affect the edge index + or cache-enabled persistent indexes. Returns: - int: Number of documents that got replaced. + list: Documents metadata (e.g. document id, key, revision) and + errors or just errors if **silent** is set to `True`. Raises: - DocumentReplaceError: If replace fails. - """ - if not self._is_none_or_dict(filters): - raise ValueError("filters parameter must be a dict") - if not (self._is_none_or_int(limit) or limit == "null"): - raise ValueError("limit parameter must be a non-negative int") + DocumentRemoveError: If removal fails. - sync = f"waitForSync: {wait_for_sync}" if wait_for_sync is not None else "" - query = f""" - FOR doc IN @@collection - {self._build_filter_conditions(filters)} - {f"LIMIT {limit}" if limit is not None else ""} - REPLACE doc WITH @body IN @@collection - {f"OPTIONS {{ {sync} }}" if sync else ""} - """ # noqa: E201 E202 - bind_vars = { - "@collection": self.name, - "body": body, - } - data = {"query": query, "bindVars": bind_vars} + References: + - `remove-multiple-documents `__ + """ # noqa: E501 + params: Params = {} + if wait_for_sync is not None: + params["waitForSync"] = wait_for_sync + if ignore_revs is not None: + params["ignoreRevs"] = ignore_revs + if return_old is not None: + params["returnOld"] = return_old + if silent is not None: + params["silent"] = silent + if refill_index_caches is not None: + params["refillIndexCaches"] = refill_index_caches request = Request( - method=Method.POST, - endpoint="/_api/cursor", - data=self.serializer.dumps(data), + method=Method.DELETE, + endpoint=f"/_api/document/{self.name}", + data=self._doc_serializer.dumps(documents), + params=params, ) - def response_handler(resp: Response) -> int: - if resp.is_success: - result = self.deserializer.loads(resp.raw_body) - return cast(int, result["extra"]["stats"]["writesExecuted"]) - raise DocumentReplaceError(resp, request) + def response_handler( + resp: Response, + ) -> Jsons: + if not resp.is_success: + raise DocumentDeleteError(resp, request) + return self.deserializer.loads_many(resp.raw_body) return await self._executor.execute(request, response_handler) - async def delete_match( + +class StandardCollection(Collection[T, U, V]): + """Standard collection API wrapper. + + Args: + executor (ApiExecutor): API executor. + name (str): Collection name + doc_serializer (Serializer): Document serializer. + doc_deserializer (Deserializer): Document deserializer. + """ + + def __init__( self, - filters: Json, - limit: Optional[int | str] = None, - wait_for_sync: Optional[bool] = None, - ) -> Result[int]: - """Delete matching documents. + executor: ApiExecutor, + name: str, + doc_serializer: Serializer[T], + doc_deserializer: Deserializer[U, V], + ) -> None: + super().__init__(executor, name, doc_serializer, doc_deserializer) + + def __repr__(self) -> str: + return f"" + + async def get( + self, + document: str | Json, + allow_dirty_read: bool = False, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + ) -> Result[Optional[U]]: + """Return a document. Args: - filters (dict | None): Query filters. - limit (int | str | None): Maximum number of documents to delete. - wait_for_sync (bool | None): Wait until operation has been synced to disk. + document (str | dict): Document ID, key or body. + Document body must contain the "_id" or "_key" field. + allow_dirty_read (bool): Allow reads from followers in a cluster. + if_match (str | None): The document is returned, if it has the same + revision as the given ETag. + if_none_match (str | None): The document is returned, if it has a + different revision than the given ETag. Returns: - int: Number of documents that got deleted. + Document or `None` if not found. Raises: - DocumentDeleteError: If delete fails. - """ - if not self._is_none_or_dict(filters): - raise ValueError("filters parameter must be a dict") - if not (self._is_none_or_int(limit) or limit == "null"): - raise ValueError("limit parameter must be a non-negative int") + DocumentRevisionError: If the revision is incorrect. + DocumentGetError: If retrieval fails. + DocumentParseError: If the document is malformed. - sync = f"waitForSync: {wait_for_sync}" if wait_for_sync is not None else "" - query = f""" - FOR doc IN @@collection - {self._build_filter_conditions(filters)} - {f"LIMIT {limit}" if limit is not None else ""} - REMOVE doc IN @@collection - {f"OPTIONS {{ {sync} }}" if sync else ""} - """ # noqa: E201 E202 - bind_vars = {"@collection": self.name} - data = {"query": query, "bindVars": bind_vars} + References: + - `get-a-document `__ + """ # noqa: E501 + handle = self._prep_from_doc(document) + + headers: RequestHeaders = {} + if allow_dirty_read: + headers["x-arango-allow-dirty-read"] = "true" + if if_match is not None: + headers["If-Match"] = if_match + if if_none_match is not None: + headers["If-None-Match"] = if_none_match request = Request( - method=Method.POST, - endpoint="/_api/cursor", - data=self.serializer.dumps(data), + method=Method.GET, + endpoint=f"/_api/document/{handle}", + headers=headers, ) - def response_handler(resp: Response) -> int: + def response_handler(resp: Response) -> Optional[U]: if resp.is_success: - result = self.deserializer.loads(resp.raw_body) - return cast(int, result["extra"]["stats"]["writesExecuted"]) - raise DocumentDeleteError(resp, request) + return self._doc_deserializer.loads(resp.raw_body) + elif resp.status_code == HTTP_NOT_FOUND: + if resp.error_code == DOCUMENT_NOT_FOUND: + return None + else: + raise DocumentGetError(resp, request) + elif resp.status_code == HTTP_PRECONDITION_FAILED: + raise DocumentRevisionError(resp, request) + else: + raise DocumentGetError(resp, request) return await self._executor.execute(request, response_handler) - async def insert_many( + async def insert( self, - documents: Sequence[T], + document: T, wait_for_sync: Optional[bool] = None, return_new: Optional[bool] = None, return_old: Optional[bool] = None, @@ -1383,30 +1358,21 @@ async def insert_many( merge_objects: Optional[bool] = None, refill_index_caches: Optional[bool] = None, version_attribute: Optional[str] = None, - ) -> Result[Jsons]: - """Insert multiple documents. - - Note: - If inserting a document fails, the exception is not raised but - returned as an object in the "errors" list. It is up to you to - inspect the list to determine which documents were inserted - successfully (returns document metadata) and which were not - (returns exception object). + ) -> Result[bool | Json]: + """Insert a new document. Args: - documents (list): Documents to insert. If an item contains the "_key" or - "_id" field, the value is used as the key of the new document - (otherwise it is auto-generated). Any "_rev" field is ignored. - wait_for_sync (bool | None): Wait until documents have been synced to disk. + document (dict): Document to insert. If it contains the "_key" or "_id" + field, the value is used as the key of the new document (otherwise + it is auto-generated). Any "_rev" field is ignored. + wait_for_sync (bool | None): Wait until document has been synced to disk. return_new (bool | None): Additionally return the complete new document under the attribute `new` in the result. return_old (bool | None): Additionally return the complete old document under the attribute `old` in the result. Only available if the `overwrite` option is used. - silent (bool | None): If set to `True`, an empty object is returned as - response if all document operations succeed. No meta-data is returned - for the created documents. If any of the operations raises an error, - an array with the error object(s) is returned. + silent (bool | None): If set to `True`, no document metadata is returned. + This can be used to save resources. overwrite (bool | None): If set to `True`, operation does not fail on duplicate key and existing document is overwritten (replace-insert). overwrite_mode (str | None): Overwrite mode. Supersedes **overwrite** @@ -1419,22 +1385,26 @@ async def insert_many( instead of the new one overwriting the old one. Applies only when **overwrite_mode** is set to "update" (update-insert). refill_index_caches (bool | None): Whether to add new entries to - in-memory index caches if document operations affect the edge index + in-memory index caches if document insertions affect the edge index or cache-enabled persistent indexes. version_attribute (str | None): Support for simple external versioning to document operations. Only applicable if **overwrite** is set to `True` or **overwrite_mode** is set to "update" or "replace". Returns: - list: Documents metadata (e.g. document id, key, revision) and - errors or just errors if **silent** is set to `True`. + bool | dict: Document metadata (e.g. document id, key, revision) or `True` + if **silent** is set to `True`. Raises: DocumentInsertError: If insertion fails. + DocumentParseError: If the document is malformed. References: - - `create-multiple-documents `__ + - `create-a-document `__ """ # noqa: E501 + if isinstance(document, dict): + document = cast(T, self._ensure_key_from_id(document)) + params: Params = {} if wait_for_sync is not None: params["waitForSync"] = wait_for_sync @@ -1459,252 +1429,271 @@ async def insert_many( request = Request( method=Method.POST, - endpoint=f"/_api/document/{self.name}", - data=self._doc_serializer.dumps(documents), + endpoint=f"/_api/document/{self._name}", params=params, + data=self._doc_serializer.dumps(document), ) - def response_handler( - resp: Response, - ) -> Jsons: - if not resp.is_success: - raise DocumentInsertError(resp, request) - return self.deserializer.loads_many(resp.raw_body) + def response_handler(resp: Response) -> bool | Json: + if resp.is_success: + if silent is True: + return True + return self._executor.deserialize(resp.raw_body) + msg: Optional[str] = None + if resp.status_code == HTTP_BAD_PARAMETER: + msg = ( + "Body does not contain a valid JSON representation of " + "one document." + ) + elif resp.status_code == HTTP_NOT_FOUND: + msg = "Collection not found." + raise DocumentInsertError(resp, request, msg) return await self._executor.execute(request, response_handler) - async def replace_many( + async def update( self, - documents: Sequence[T], - wait_for_sync: Optional[bool] = None, + document: T, ignore_revs: Optional[bool] = None, + wait_for_sync: Optional[bool] = None, return_new: Optional[bool] = None, return_old: Optional[bool] = None, silent: Optional[bool] = None, + keep_null: Optional[bool] = None, + merge_objects: Optional[bool] = None, refill_index_caches: Optional[bool] = None, version_attribute: Optional[str] = None, - ) -> Result[Jsons]: - """Insert multiple documents. - - Note: - If replacing a document fails, the exception is not raised but - returned as an object in the "errors" list. It is up to you to - inspect the list to determine which documents were replaced - successfully (returns document metadata) and which were not - (returns exception object). + if_match: Optional[str] = None, + ) -> Result[bool | Json]: + """Insert a new document. Args: - documents (list): New documents to replace the old ones. An item must - contain the "_key" or "_id" field. - wait_for_sync (bool | None): Wait until documents have been synced to disk. - ignore_revs (bool | None): If this is set to `False`, then any `_rev` - attribute given in a body document is taken as a precondition. The - document is only replaced if the current revision is the one + document (dict): Partial or full document with the updated values. + It must contain the "_key" or "_id" field. + ignore_revs (bool | None): If set to `True`, the `_rev` attribute in the + document is ignored. If this is set to `False`, then the `_rev` + attribute given in the body document is taken as a precondition. + The document is only updated if the current revision is the one specified. + wait_for_sync (bool | None): Wait until document has been synced to disk. return_new (bool | None): Additionally return the complete new document under the attribute `new` in the result. return_old (bool | None): Additionally return the complete old document under the attribute `old` in the result. - silent (bool | None): If set to `True`, an empty object is returned as - response if all document operations succeed. No meta-data is returned - for the created documents. If any of the operations raises an error, - an array with the error object(s) is returned. + silent (bool | None): If set to `True`, no document metadata is returned. + This can be used to save resources. + keep_null (bool | None): If the intention is to delete existing attributes + with the patch command, set this parameter to `False`. + merge_objects (bool | None): Controls whether objects (not arrays) are + merged if present in both the existing and the patch document. + If set to `False`, the value in the patch document overwrites the + existing document’s value. If set to `True`, objects are merged. refill_index_caches (bool | None): Whether to add new entries to - in-memory index caches if document operations affect the edge index + in-memory index caches if document updates affect the edge index or cache-enabled persistent indexes. version_attribute (str | None): Support for simple external versioning to document operations. + if_match (str | None): You can conditionally update a document based on a + target revision id by using the "if-match" HTTP header. Returns: - list: Documents metadata (e.g. document id, key, revision) and - errors or just errors if **silent** is set to `True`. + bool | dict: Document metadata (e.g. document id, key, revision) or `True` + if **silent** is set to `True`. Raises: - DocumentReplaceError: If replacing fails. + DocumentRevisionError: If precondition was violated. + DocumentUpdateError: If update fails. References: - - `replace-multiple-documents `__ + - `update-a-document `__ """ # noqa: E501 params: Params = {} - if wait_for_sync is not None: - params["waitForSync"] = wait_for_sync if ignore_revs is not None: params["ignoreRevs"] = ignore_revs + if wait_for_sync is not None: + params["waitForSync"] = wait_for_sync if return_new is not None: params["returnNew"] = return_new if return_old is not None: params["returnOld"] = return_old if silent is not None: params["silent"] = silent + if keep_null is not None: + params["keepNull"] = keep_null + if merge_objects is not None: + params["mergeObjects"] = merge_objects if refill_index_caches is not None: params["refillIndexCaches"] = refill_index_caches if version_attribute is not None: params["versionAttribute"] = version_attribute + headers: RequestHeaders = {} + if if_match is not None: + headers["If-Match"] = if_match + request = Request( - method=Method.PUT, - endpoint=f"/_api/document/{self.name}", - data=self._doc_serializer.dumps(documents), + method=Method.PATCH, + endpoint=f"/_api/document/{self._extract_id(cast(Json, document))}", params=params, + headers=headers, + data=self._doc_serializer.dumps(document), ) - def response_handler( - resp: Response, - ) -> Jsons: - if not resp.is_success: - raise DocumentReplaceError(resp, request) - return self.deserializer.loads_many(resp.raw_body) + def response_handler(resp: Response) -> bool | Json: + if resp.is_success: + if silent is True: + return True + return self._executor.deserialize(resp.raw_body) + msg: Optional[str] = None + if resp.status_code == HTTP_PRECONDITION_FAILED: + raise DocumentRevisionError(resp, request) + elif resp.status_code == HTTP_NOT_FOUND: + msg = "Document, collection or transaction not found." + raise DocumentUpdateError(resp, request, msg) return await self._executor.execute(request, response_handler) - async def update_many( + async def replace( self, - documents: Sequence[T], - wait_for_sync: Optional[bool] = None, + document: T, ignore_revs: Optional[bool] = None, + wait_for_sync: Optional[bool] = None, return_new: Optional[bool] = None, return_old: Optional[bool] = None, silent: Optional[bool] = None, - keep_null: Optional[bool] = None, - merge_objects: Optional[bool] = None, refill_index_caches: Optional[bool] = None, version_attribute: Optional[str] = None, - ) -> Result[Jsons]: - """Insert multiple documents. - - Note: - If updating a document fails, the exception is not raised but - returned as an object in the "errors" list. It is up to you to - inspect the list to determine which documents were updated - successfully (returned as document metadata) and which were not - (returned as exception object). + if_match: Optional[str] = None, + ) -> Result[bool | Json]: + """Replace a document. Args: - documents (list): Documents to update. An item must contain the "_key" or - "_id" field. - wait_for_sync (bool | None): Wait until documents have been synced to disk. - ignore_revs (bool | None): If this is set to `False`, then any `_rev` - attribute given in a body document is taken as a precondition. The - document is only updated if the current revision is the one + document (dict): New document. It must contain the "_key" or "_id" field. + Edge document must also have "_from" and "_to" fields. + ignore_revs (bool | None): If set to `True`, the `_rev` attribute in the + document is ignored. If this is set to `False`, then the `_rev` + attribute given in the body document is taken as a precondition. + The document is only replaced if the current revision is the one specified. + wait_for_sync (bool | None): Wait until document has been synced to disk. return_new (bool | None): Additionally return the complete new document under the attribute `new` in the result. return_old (bool | None): Additionally return the complete old document under the attribute `old` in the result. - silent (bool | None): If set to `True`, an empty object is returned as - response if all document operations succeed. No meta-data is returned - for the created documents. If any of the operations raises an error, - an array with the error object(s) is returned. - keep_null (bool | None): If set to `True`, fields with value None are - retained in the document. Otherwise, they are removed completely. - Applies only when **overwrite_mode** is set to "update" - (update-insert). - merge_objects (bool | None): If set to `True`, sub-dictionaries are merged - instead of the new one overwriting the old one. Applies only when - **overwrite_mode** is set to "update" (update-insert). + silent (bool | None): If set to `True`, no document metadata is returned. + This can be used to save resources. refill_index_caches (bool | None): Whether to add new entries to - in-memory index caches if document operations affect the edge index + in-memory index caches if document updates affect the edge index or cache-enabled persistent indexes. version_attribute (str | None): Support for simple external versioning to document operations. + if_match (str | None): You can conditionally replace a document based on a + target revision id by using the "if-match" HTTP header. Returns: - list: Documents metadata (e.g. document id, key, revision) and - errors or just errors if **silent** is set to `True`. + bool | dict: Document metadata (e.g. document id, key, revision) or `True` + if **silent** is set to `True`. Raises: - DocumentUpdateError: If update fails. + DocumentRevisionError: If precondition was violated. + DocumentReplaceError: If replace fails. References: - - `update-multiple-documents `__ + - `replace-a-document `__ """ # noqa: E501 params: Params = {} - if wait_for_sync is not None: - params["waitForSync"] = wait_for_sync if ignore_revs is not None: params["ignoreRevs"] = ignore_revs + if wait_for_sync is not None: + params["waitForSync"] = wait_for_sync if return_new is not None: params["returnNew"] = return_new if return_old is not None: params["returnOld"] = return_old if silent is not None: params["silent"] = silent - if keep_null is not None: - params["keepNull"] = keep_null - if merge_objects is not None: - params["mergeObjects"] = merge_objects if refill_index_caches is not None: params["refillIndexCaches"] = refill_index_caches if version_attribute is not None: params["versionAttribute"] = version_attribute + headers: RequestHeaders = {} + if if_match is not None: + headers["If-Match"] = if_match + request = Request( - method=Method.PATCH, - endpoint=f"/_api/document/{self.name}", - data=self._doc_serializer.dumps(documents), + method=Method.PUT, + endpoint=f"/_api/document/{self._extract_id(cast(Json, document))}", params=params, + headers=headers, + data=self._doc_serializer.dumps(document), ) - def response_handler( - resp: Response, - ) -> Jsons: - if not resp.is_success: - raise DocumentUpdateError(resp, request) - return self.deserializer.loads_many(resp.raw_body) + def response_handler(resp: Response) -> bool | Json: + if resp.is_success: + if silent is True: + return True + return self._executor.deserialize(resp.raw_body) + msg: Optional[str] = None + if resp.status_code == HTTP_PRECONDITION_FAILED: + raise DocumentRevisionError(resp, request) + elif resp.status_code == HTTP_NOT_FOUND: + msg = "Document, collection or transaction not found." + raise DocumentReplaceError(resp, request, msg) return await self._executor.execute(request, response_handler) - async def delete_many( + async def delete( self, - documents: Sequence[T], - wait_for_sync: Optional[bool] = None, + document: T, ignore_revs: Optional[bool] = None, + ignore_missing: bool = False, + wait_for_sync: Optional[bool] = None, return_old: Optional[bool] = None, silent: Optional[bool] = None, refill_index_caches: Optional[bool] = None, - ) -> Result[Jsons]: - """Delete multiple documents. - - Note: - If deleting a document fails, the exception is not raised but - returned as an object in the "errors" list. It is up to you to - inspect the list to determine which documents were deleted - successfully (returned as document metadata) and which were not - (returned as exception object). + if_match: Optional[str] = None, + ) -> Result[bool | Json]: + """Delete a document. Args: - documents (list): Documents to delete. An item must contain the "_key" or - "_id" field. - wait_for_sync (bool | None): Wait until documents have been synced to disk. - ignore_revs (bool | None): If this is set to `False`, then any `_rev` - attribute given in a body document is taken as a precondition. The - document is only updated if the current revision is the one + document (dict): Document ID, key or body. The body must contain the + "_key" or "_id" field. + ignore_revs (bool | None): If set to `True`, the `_rev` attribute in the + document is ignored. If this is set to `False`, then the `_rev` + attribute given in the body document is taken as a precondition. + The document is only replaced if the current revision is the one specified. + ignore_missing (bool): Do not raise an exception on missing document. + This parameter has no effect in transactions where an exception is + always raised on failures. + wait_for_sync (bool | None): Wait until operation has been synced to disk. return_old (bool | None): Additionally return the complete old document under the attribute `old` in the result. - silent (bool | None): If set to `True`, an empty object is returned as - response if all document operations succeed. No meta-data is returned - for the created documents. If any of the operations raises an error, - an array with the error object(s) is returned. + silent (bool | None): If set to `True`, no document metadata is returned. + This can be used to save resources. refill_index_caches (bool | None): Whether to add new entries to - in-memory index caches if document operations affect the edge index + in-memory index caches if document updates affect the edge index or cache-enabled persistent indexes. + if_match (bool | None): You can conditionally remove a document based + on a target revision id by using the "if-match" HTTP header. Returns: - list: Documents metadata (e.g. document id, key, revision) and - errors or just errors if **silent** is set to `True`. + bool | dict: Document metadata (e.g. document id, key, revision) or `True` + if **silent** is set to `True` and the document was found. Raises: - DocumentRemoveError: If removal fails. + DocumentRevisionError: If precondition was violated. + DocumentDeleteError: If deletion fails. References: - - `remove-multiple-documents `__ + - `remove-a-document `__ """ # noqa: E501 params: Params = {} - if wait_for_sync is not None: - params["waitForSync"] = wait_for_sync if ignore_revs is not None: params["ignoreRevs"] = ignore_revs + if wait_for_sync is not None: + params["waitForSync"] = wait_for_sync if return_old is not None: params["returnOld"] = return_old if silent is not None: @@ -1712,19 +1701,30 @@ async def delete_many( if refill_index_caches is not None: params["refillIndexCaches"] = refill_index_caches + headers: RequestHeaders = {} + if if_match is not None: + headers["If-Match"] = if_match + request = Request( method=Method.DELETE, - endpoint=f"/_api/document/{self.name}", - data=self._doc_serializer.dumps(documents), + endpoint=f"/_api/document/{self._extract_id(cast(Json, document))}", params=params, + headers=headers, ) - def response_handler( - resp: Response, - ) -> Jsons: - if not resp.is_success: - raise DocumentDeleteError(resp, request) - return self.deserializer.loads_many(resp.raw_body) + def response_handler(resp: Response) -> bool | Json: + if resp.is_success: + if silent is True: + return True + return self._executor.deserialize(resp.raw_body) + msg: Optional[str] = None + if resp.status_code == HTTP_PRECONDITION_FAILED: + raise DocumentRevisionError(resp, request) + elif resp.status_code == HTTP_NOT_FOUND: + if resp.error_code == DOCUMENT_NOT_FOUND and ignore_missing: + return False + msg = "Document, collection or transaction not found." + raise DocumentDeleteError(resp, request, msg) return await self._executor.execute(request, response_handler) From d969ce63c50572daf479f54d76cafceececbfcd2 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Thu, 29 May 2025 08:29:48 +0000 Subject: [PATCH 13/25] Adding CRUD for vertex collections --- arangoasync/collection.py | 263 +++++++++++++++++++++++++++++++++++++- arangoasync/graph.py | 128 +++++++++++++++++++ tests/test_graph.py | 32 ++++- 3 files changed, 416 insertions(+), 7 deletions(-) diff --git a/arangoasync/collection.py b/arangoasync/collection.py index 955b42d..d95f2cf 100644 --- a/arangoasync/collection.py +++ b/arangoasync/collection.py @@ -89,7 +89,7 @@ def get_col_name(doc: str | Json) -> str: DocumentParseError: If document ID is missing. """ try: - doc_id: str = doc["_id"] if isinstance(doc, dict) else doc + doc_id: str = doc if isinstance(doc, str) else doc["_id"] except KeyError: raise DocumentParseError('field "_id" required') else: @@ -1754,6 +1754,27 @@ def __init__( def __repr__(self) -> str: return f"" + @staticmethod + def _parse_result(data: Json) -> Json: + """Parse the result from the response. + + Args: + data (dict): Response data. + + Returns: + dict: Parsed result. + """ + result: Json = {} + if "new" in data or "old" in data: + result["vertex"] = data["vertex"] + if "new" in data: + result["new"] = data["new"] + if "old" in data: + result["old"] = data["old"] + else: + result = data["vertex"] + return result + @property def graph(self) -> str: """Return the graph name. @@ -1766,6 +1787,7 @@ def graph(self) -> str: async def get( self, vertex: str | Json, + rev: Optional[str] = None, if_match: Optional[str] = None, if_none_match: Optional[str] = None, ) -> Result[Optional[Json]]: @@ -1774,13 +1796,15 @@ async def get( Args: vertex (str | dict): Document ID, key or body. Document body must contain the "_id" or "_key" field. + rev (str | None): If this is set a document is only returned if it + has exactly this revision. if_match (str | None): The document is returned, if it has the same revision as the given ETag. if_none_match (str | None): The document is returned, if it has a different revision than the given ETag. Returns: - Document or `None` if not found. + dict | None: Document or `None` if not found. Raises: DocumentRevisionError: If the revision is incorrect. @@ -1798,16 +1822,20 @@ async def get( if if_none_match is not None: headers["If-None-Match"] = if_none_match + params: Params = {} + if rev is not None: + params["rev"] = rev + request = Request( method=Method.GET, endpoint=f"/_api/gharial/{self._graph}/vertex/{handle}", headers=headers, + params=params, ) def response_handler(resp: Response) -> Optional[Json]: if resp.is_success: - data: Json = self.deserializer.loads(resp.raw_body) - return cast(Json, data["vertex"]) + return self._parse_result(self.deserializer.loads(resp.raw_body)) elif resp.status_code == HTTP_NOT_FOUND: if resp.error_code == DOCUMENT_NOT_FOUND: return None @@ -1838,6 +1866,8 @@ async def insert( Returns: dict: Document metadata (e.g. document id, key, revision). + If `return_new` is specified, the result contains the document + metadata in the "vertex" field and the new document in the "new" field. Raises: DocumentInsertError: If insertion fails. @@ -1864,8 +1894,7 @@ async def insert( def response_handler(resp: Response) -> Json: if resp.is_success: - data: Json = self._executor.deserialize(resp.raw_body) - return cast(Json, data["vertex"]) + return self._parse_result(self.deserializer.loads(resp.raw_body)) msg: Optional[str] = None if resp.status_code == HTTP_NOT_FOUND: msg = ( @@ -1876,6 +1905,228 @@ def response_handler(resp: Response) -> Json: return await self._executor.execute(request, response_handler) + async def update( + self, + vertex: T, + wait_for_sync: Optional[bool] = None, + keep_null: Optional[bool] = None, + return_new: Optional[bool] = None, + return_old: Optional[bool] = None, + if_match: Optional[str] = None, + ) -> Result[Json]: + """Insert a new document. + + Args: + vertex (dict): Partial or full document with the updated values. + It must contain the "_key" or "_id" field. + wait_for_sync (bool | None): Wait until document has been synced to disk. + keep_null (bool | None): If the intention is to delete existing attributes + with the patch command, set this parameter to `False`. + return_new (bool | None): Additionally return the complete new document + under the attribute `new` in the result. + return_old (bool | None): Additionally return the complete old document + under the attribute `old` in the result. + if_match (str | None): You can conditionally update a document based on a + target revision id by using the "if-match" HTTP header. + + Returns: + dict: Document metadata (e.g. document id, key, revision). + If `return_new` or "return_old" are specified, the result contains + the document metadata in the "vertex" field and two additional fields + ("new" and "old"). + + Raises: + DocumentUpdateError: If update fails. + + References: + - `update-a-vertex `__ + """ # noqa: E501 + params: Params = {} + if wait_for_sync is not None: + params["waitForSync"] = wait_for_sync + if keep_null is not None: + params["keepNull"] = keep_null + if return_new is not None: + params["returnNew"] = return_new + if return_old is not None: + params["returnOld"] = return_old + + headers: RequestHeaders = {} + if if_match is not None: + headers["If-Match"] = if_match + + request = Request( + method=Method.PATCH, + endpoint=f"/_api/gharial/{self._graph}/vertex/" + f"{self._prep_from_doc(cast(Json, vertex))}", + params=params, + headers=headers, + data=self._doc_serializer.dumps(vertex), + ) + + def response_handler(resp: Response) -> Json: + if resp.is_success: + return self._parse_result(self.deserializer.loads(resp.raw_body)) + msg: Optional[str] = None + if resp.status_code == HTTP_PRECONDITION_FAILED: + raise DocumentRevisionError(resp, request) + elif resp.status_code == HTTP_NOT_FOUND: + msg = ( + "Vertex or graph not found, or the collection is not part of " + "this graph. Error may also occur if the transaction ID is " + "unknown." + ) + raise DocumentUpdateError(resp, request, msg) + + return await self._executor.execute(request, response_handler) + + async def replace( + self, + vertex: T, + wait_for_sync: Optional[bool] = None, + keep_null: Optional[bool] = None, + return_new: Optional[bool] = None, + return_old: Optional[bool] = None, + if_match: Optional[str] = None, + ) -> Result[Json]: + """Replace a document. + + Args: + vertex (dict): New document. It must contain the "_key" or "_id" field. + wait_for_sync (bool | None): Wait until document has been synced to disk. + keep_null (bool | None): If the intention is to delete existing attributes + with the patch command, set this parameter to `False`. + return_new (bool | None): Additionally return the complete new document + under the attribute `new` in the result. + return_old (bool | None): Additionally return the complete old document + under the attribute `old` in the result. + if_match (str | None): You can conditionally replace a document based on a + target revision id by using the "if-match" HTTP header. + + Returns: + dict: Document metadata (e.g. document id, key, revision). + If `return_new` or "return_old" are specified, the result contains + the document metadata in the "vertex" field and two additional fields + ("new" and "old"). + + Raises: + DocumentRevisionError: If precondition was violated. + DocumentReplaceError: If replace fails. + + References: + - `replace-a-vertex `__ + """ # noqa: E501 + params: Params = {} + if wait_for_sync is not None: + params["waitForSync"] = wait_for_sync + if keep_null is not None: + params["keepNull"] = keep_null + if return_new is not None: + params["returnNew"] = return_new + if return_old is not None: + params["returnOld"] = return_old + + headers: RequestHeaders = {} + if if_match is not None: + headers["If-Match"] = if_match + + request = Request( + method=Method.PUT, + endpoint=f"/_api/gharial/{self._graph}/vertex/" + f"{self._prep_from_doc(cast(Json, vertex))}", + params=params, + headers=headers, + data=self._doc_serializer.dumps(vertex), + ) + + def response_handler(resp: Response) -> Json: + if resp.is_success: + return self._parse_result(self.deserializer.loads(resp.raw_body)) + msg: Optional[str] = None + if resp.status_code == HTTP_PRECONDITION_FAILED: + raise DocumentRevisionError(resp, request) + elif resp.status_code == HTTP_NOT_FOUND: + msg = ( + "Vertex or graph not found, or the collection is not part of " + "this graph. Error may also occur if the transaction ID is " + "unknown." + ) + raise DocumentReplaceError(resp, request, msg) + + return await self._executor.execute(request, response_handler) + + async def delete( + self, + vertex: T, + ignore_missing: bool = False, + wait_for_sync: Optional[bool] = None, + return_old: Optional[bool] = None, + if_match: Optional[str] = None, + ) -> Result[bool | Json]: + """Delete a document. + + Args: + vertex (dict): Document ID, key or body. The body must contain the + "_key" or "_id" field. + ignore_missing (bool): Do not raise an exception on missing document. + wait_for_sync (bool | None): Wait until operation has been synced to disk. + return_old (bool | None): Additionally return the complete old document + under the attribute `old` in the result. + if_match (str | None): You can conditionally replace a document based on a + target revision id by using the "if-match" HTTP header. + + Returns: + bool | dict: `True` if vertex was deleted successfully, `False` if vertex + was not found and **ignore_missing** was set to `True` (does not apply in + transactions). Old document is returned if **return_old** is set to + `True`. + + Raises: + DocumentRevisionError: If precondition was violated. + DocumentDeleteError: If deletion fails. + + References: + - `remove-a-vertex `__ + """ # noqa: E501 + params: Params = {} + if wait_for_sync is not None: + params["waitForSync"] = wait_for_sync + if return_old is not None: + params["returnOld"] = return_old + + headers: RequestHeaders = {} + if if_match is not None: + headers["If-Match"] = if_match + + request = Request( + method=Method.DELETE, + endpoint=f"/_api/gharial/{self._graph}/vertex/" + f"{self._prep_from_doc(cast(Json, vertex))}", + params=params, + headers=headers, + ) + + def response_handler(resp: Response) -> bool | Json: + if resp.is_success: + data: Json = self.deserializer.loads(resp.raw_body) + if "old" in data: + return cast(Json, data["old"]) + return True + msg: Optional[str] = None + if resp.status_code == HTTP_PRECONDITION_FAILED: + raise DocumentRevisionError(resp, request) + elif resp.status_code == HTTP_NOT_FOUND: + if resp.error_code == DOCUMENT_NOT_FOUND and ignore_missing: + return False + msg = ( + "Vertex or graph not found, or the collection is not part of " + "this graph. Error may also occur if the transaction ID is " + "unknown." + ) + raise DocumentDeleteError(resp, request, msg) + + return await self._executor.execute(request, response_handler) + class EdgeCollection(Collection[T, U, V]): """Edge collection API wrapper. diff --git a/arangoasync/graph.py b/arangoasync/graph.py index a9e1acb..d8697a1 100644 --- a/arangoasync/graph.py +++ b/arangoasync/graph.py @@ -300,6 +300,134 @@ async def insert_vertex( return_new=return_new, ) + async def update_vertex( + self, + vertex: T, + wait_for_sync: Optional[bool] = None, + keep_null: Optional[bool] = None, + return_new: Optional[bool] = None, + return_old: Optional[bool] = None, + if_match: Optional[str] = None, + ) -> Result[Json]: + """Insert a new document. + + Args: + vertex (dict): Partial or full document with the updated values. + It must contain the "_key" or "_id" field. + wait_for_sync (bool | None): Wait until document has been synced to disk. + keep_null (bool | None): If the intention is to delete existing attributes + with the patch command, set this parameter to `False`. + return_new (bool | None): Additionally return the complete new document + under the attribute `new` in the result. + return_old (bool | None): Additionally return the complete old document + under the attribute `old` in the result. + if_match (str | None): You can conditionally update a document based on a + target revision id by using the "if-match" HTTP header. + + Returns: + bool | dict: Document metadata (e.g. document id, key, revision). + + Raises: + DocumentUpdateError: If update fails. + + References: + - `update-a-vertex `__ + """ # noqa: E501 + col = Collection.get_col_name(cast(Json | str, vertex)) + return await self.vertex_collection(col).update( + vertex, + wait_for_sync=wait_for_sync, + keep_null=keep_null, + return_new=return_new, + return_old=return_old, + if_match=if_match, + ) + + async def replace_vertex( + self, + vertex: T, + wait_for_sync: Optional[bool] = None, + keep_null: Optional[bool] = None, + return_new: Optional[bool] = None, + return_old: Optional[bool] = None, + if_match: Optional[str] = None, + ) -> Result[Json]: + """Replace a document. + + Args: + vertex (dict): New document. It must contain the "_key" or "_id" field. + wait_for_sync (bool | None): Wait until document has been synced to disk. + keep_null (bool | None): If the intention is to delete existing attributes + with the patch command, set this parameter to `False`. + return_new (bool | None): Additionally return the complete new document + under the attribute `new` in the result. + return_old (bool | None): Additionally return the complete old document + under the attribute `old` in the result. + if_match (str | None): You can conditionally replace a document based on a + target revision id by using the "if-match" HTTP header. + + Returns: + bool | dict: Document metadata (e.g. document id, key, revision). + + Raises: + DocumentRevisionError: If precondition was violated. + DocumentReplaceError: If replace fails. + + References: + - `replace-a-vertex `__ + """ # noqa: E501 + col = Collection.get_col_name(cast(Json | str, vertex)) + return await self.vertex_collection(col).replace( + vertex, + wait_for_sync=wait_for_sync, + keep_null=keep_null, + return_new=return_new, + return_old=return_old, + if_match=if_match, + ) + + async def delete_vertex( + self, + vertex: T, + ignore_missing: bool = False, + wait_for_sync: Optional[bool] = None, + return_old: Optional[bool] = None, + if_match: Optional[str] = None, + ) -> Result[bool | Json]: + """Delete a document. + + Args: + vertex (dict): Document ID, key or body. The body must contain the + "_key" or "_id" field. + ignore_missing (bool): Do not raise an exception on missing document. + wait_for_sync (bool | None): Wait until operation has been synced to disk. + return_old (bool | None): Additionally return the complete old document + under the attribute `old` in the result. + if_match (str | None): You can conditionally replace a document based on a + target revision id by using the "if-match" HTTP header. + + Returns: + bool | dict: `True` if vertex was deleted successfully, `False` if vertex + was not found and **ignore_missing** was set to `True` (does not apply in + transactions). Old document is returned if **return_old** is set to + `True`. + + Raises: + DocumentRevisionError: If precondition was violated. + DocumentDeleteError: If deletion fails. + + References: + - `remove-a-vertex `__ + """ # noqa: E501 + col = Collection.get_col_name(cast(Json | str, vertex)) + return await self.vertex_collection(col).delete( + vertex, + ignore_missing=ignore_missing, + wait_for_sync=wait_for_sync, + return_old=return_old, + if_match=if_match, + ) + def edge_collection(self, name: str) -> EdgeCollection[T, U, V]: """Returns the edge collection API wrapper. diff --git a/tests/test_graph.py b/tests/test_graph.py index 91418dc..dd926e4 100644 --- a/tests/test_graph.py +++ b/tests/test_graph.py @@ -1,6 +1,7 @@ import pytest from arangoasync.exceptions import ( + DocumentDeleteError, EdgeCollectionListError, EdgeDefinitionDeleteError, EdgeDefinitionListError, @@ -134,16 +135,45 @@ async def test_vertex_collections(db, docs, bad_graph): # Insert in both collections v1_meta = await graph.insert_vertex(names[1], docs[0]) - v2_meta = await graph.insert_vertex(names[2], docs[1]) + v2_meta = await graph.insert_vertex(names[2], docs[1], return_new=True) + assert "new" in v2_meta + v2_meta = v2_meta["vertex"] # Get the vertex v1 = await graph.vertex(v1_meta) assert v1 is not None + assert v1["text"] == docs[0]["text"] v2 = await graph.vertex(v2_meta["_id"]) assert v2 is not None v3 = await graph.vertex(f"{names[2]}/bad_id") assert v3 is None + # Update one vertex + v1["text"] = "updated_text" + v1_meta = await graph.update_vertex(v1, return_new=True) + assert "new" in v1_meta + v1 = await graph.vertex(v1_meta["vertex"]) + assert v1["text"] == "updated_text" + + # Replace the other vertex + v1["text"] = "replaced_text" + v1["additional"] = "data" + v1.pop("loc") + v1_meta = await graph.replace_vertex(v1, return_old=True, return_new=True) + assert "old" in v1_meta + assert "new" in v1_meta + v1 = await graph.vertex(v1_meta["vertex"]) + assert v1["text"] == "replaced_text" + assert "additional" in v1 + assert "loc" not in v1 + + # Delete a vertex + v1 = await graph.delete_vertex(v1["_id"], return_old=True) + assert "_id" in v1 + assert await graph.delete_vertex(v1["_id"], ignore_missing=True) is False + with pytest.raises(DocumentDeleteError): + assert await graph.delete_vertex(v1["_id"]) + async def test_edge_collections(db, bad_graph): # Test errors From e56929ead9e2e749389e78e08e9fdb416e4da91b Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Thu, 29 May 2025 08:40:12 +0000 Subject: [PATCH 14/25] Adding "has" for vertex collections --- arangoasync/graph.py | 32 ++++++++++++++++++++++++++++++++ tests/test_graph.py | 4 ++++ 2 files changed, 36 insertions(+) diff --git a/arangoasync/graph.py b/arangoasync/graph.py index d8697a1..71895ab 100644 --- a/arangoasync/graph.py +++ b/arangoasync/graph.py @@ -233,6 +233,38 @@ def response_handler(resp: Response) -> None: await self._executor.execute(request, response_handler) + async def has_vertex( + self, + vertex: str | Json, + allow_dirty_read: bool = False, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + ) -> Result[bool]: + """Check if the vertex exists in the graph. + + Args: + vertex (str | dict): Document ID, key or body. + Document body must contain the "_id" or "_key" field. + allow_dirty_read (bool): Allow reads from followers in a cluster. + if_match (str | None): The document is returned, if it has the same + revision as the given ETag. + if_none_match (str | None): The document is returned, if it has a + different revision than the given ETag. + + Returns: + `True` if the document exists, `False` otherwise. + + Raises: + DocumentRevisionError: If the revision is incorrect. + DocumentGetError: If retrieval fails. + """ # noqa: E501 + return await self.vertex_collection(Collection.get_col_name(vertex)).has( + vertex, + allow_dirty_read=allow_dirty_read, + if_match=if_match, + if_none_match=if_none_match, + ) + async def vertex( self, vertex: str | Json, diff --git a/tests/test_graph.py b/tests/test_graph.py index dd926e4..8d6b14f 100644 --- a/tests/test_graph.py +++ b/tests/test_graph.py @@ -174,6 +174,10 @@ async def test_vertex_collections(db, docs, bad_graph): with pytest.raises(DocumentDeleteError): assert await graph.delete_vertex(v1["_id"]) + # Check has method + assert await graph.has_vertex(v1) is False + assert await graph.has_vertex(v2["_id"]) is True + async def test_edge_collections(db, bad_graph): # Test errors From d3b45afbbe6fc97ff5ffb545cee7b0e350fefc60 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Thu, 29 May 2025 09:46:38 +0000 Subject: [PATCH 15/25] Marking tests as asyncio --- tests/test_graph.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/test_graph.py b/tests/test_graph.py index 8d6b14f..213d9b9 100644 --- a/tests/test_graph.py +++ b/tests/test_graph.py @@ -54,6 +54,7 @@ async def test_graph_basic(db, bad_db): await bad_db.delete_graph(graph1_name) +@pytest.mark.asyncio async def test_graph_properties(db, bad_graph, cluster, enterprise): # Create a graph name = generate_graph_name() @@ -104,6 +105,7 @@ async def test_graph_properties(db, bad_graph, cluster, enterprise): assert properties.edge_definitions[0]["to"][0] == vcol2_name +@pytest.mark.asyncio async def test_vertex_collections(db, docs, bad_graph): # Test errors with pytest.raises(VertexCollectionCreateError): @@ -179,6 +181,7 @@ async def test_vertex_collections(db, docs, bad_graph): assert await graph.has_vertex(v2["_id"]) is True +@pytest.mark.asyncio async def test_edge_collections(db, bad_graph): # Test errors with pytest.raises(EdgeDefinitionListError): From d1fabd4ac92c897c18c71a664ba2a3c4a948745c Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Thu, 29 May 2025 10:47:12 +0000 Subject: [PATCH 16/25] Inserting and retrieving edges --- arangoasync/collection.py | 162 ++++++++++++++++++++++++++++++++++++-- arangoasync/graph.py | 140 +++++++++++++++++++++++++++++--- tests/test_graph.py | 84 +++++++++++++++----- 3 files changed, 349 insertions(+), 37 deletions(-) diff --git a/arangoasync/collection.py b/arangoasync/collection.py index d95f2cf..55409a0 100644 --- a/arangoasync/collection.py +++ b/arangoasync/collection.py @@ -1465,7 +1465,7 @@ async def update( version_attribute: Optional[str] = None, if_match: Optional[str] = None, ) -> Result[bool | Json]: - """Insert a new document. + """Update a document. Args: document (dict): Partial or full document with the updated values. @@ -1791,7 +1791,7 @@ async def get( if_match: Optional[str] = None, if_none_match: Optional[str] = None, ) -> Result[Optional[Json]]: - """Return a document. + """Return a vertex from the graph. Args: vertex (str | dict): Document ID, key or body. @@ -1914,7 +1914,7 @@ async def update( return_old: Optional[bool] = None, if_match: Optional[str] = None, ) -> Result[Json]: - """Insert a new document. + """Update a vertex in the graph. Args: vertex (dict): Partial or full document with the updated values. @@ -1989,7 +1989,7 @@ async def replace( return_old: Optional[bool] = None, if_match: Optional[str] = None, ) -> Result[Json]: - """Replace a document. + """Replace a vertex in the graph. Args: vertex (dict): New document. It must contain the "_key" or "_id" field. @@ -2063,7 +2063,7 @@ async def delete( return_old: Optional[bool] = None, if_match: Optional[str] = None, ) -> Result[bool | Json]: - """Delete a document. + """Delete a vertex from the graph. Args: vertex (dict): Document ID, key or body. The body must contain the @@ -2077,9 +2077,9 @@ async def delete( Returns: bool | dict: `True` if vertex was deleted successfully, `False` if vertex - was not found and **ignore_missing** was set to `True` (does not apply in - transactions). Old document is returned if **return_old** is set to - `True`. + was not found and **ignore_missing** was set to `True` (does not apply + in transactions). Old document is returned if **return_old** is set + to `True`. Raises: DocumentRevisionError: If precondition was violated. @@ -2153,6 +2153,27 @@ def __init__( def __repr__(self) -> str: return f"" + @staticmethod + def _parse_result(data: Json) -> Json: + """Parse the result from the response. + + Args: + data (dict): Response data. + + Returns: + dict: Parsed result. + """ + result: Json = {} + if "new" in data or "old" in data: + result["edge"] = data["edge"] + if "new" in data: + result["new"] = data["new"] + if "old" in data: + result["old"] = data["old"] + else: + result = data["edge"] + return result + @property def graph(self) -> str: """Return the graph name. @@ -2161,3 +2182,128 @@ def graph(self) -> str: str: Graph name. """ return self._graph + + async def get( + self, + edge: str | Json, + rev: Optional[str] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + ) -> Result[Optional[Json]]: + """Return an edge from the graph. + + Args: + edge (str | dict): Document ID, key or body. + Document body must contain the "_id" or "_key" field. + rev (str | None): If this is set a document is only returned if it + has exactly this revision. + if_match (str | None): The document is returned, if it has the same + revision as the given ETag. + if_none_match (str | None): The document is returned, if it has a + different revision than the given ETag. + + Returns: + dict | None: Document or `None` if not found. + + Raises: + DocumentRevisionError: If the revision is incorrect. + DocumentGetError: If retrieval fails. + DocumentParseError: If the document is malformed. + + References: + - `get-an-edge `__ + """ # noqa: E501 + handle = self._prep_from_doc(edge) + + headers: RequestHeaders = {} + if if_match is not None: + headers["If-Match"] = if_match + if if_none_match is not None: + headers["If-None-Match"] = if_none_match + + params: Params = {} + if rev is not None: + params["rev"] = rev + + request = Request( + method=Method.GET, + endpoint=f"/_api/gharial/{self._graph}/edge/{handle}", + headers=headers, + params=params, + ) + + def response_handler(resp: Response) -> Optional[Json]: + if resp.is_success: + return self._parse_result(self.deserializer.loads(resp.raw_body)) + elif resp.status_code == HTTP_NOT_FOUND: + if resp.error_code == DOCUMENT_NOT_FOUND: + return None + else: + raise DocumentGetError(resp, request) + elif resp.status_code == HTTP_PRECONDITION_FAILED: + raise DocumentRevisionError(resp, request) + else: + raise DocumentGetError(resp, request) + + return await self._executor.execute(request, response_handler) + + async def insert( + self, + edge: T, + wait_for_sync: Optional[bool] = None, + return_new: Optional[bool] = None, + ) -> Result[Json]: + """Insert a new edge document. + + Args: + edge (dict): Document to insert. It must contain "_from" and + "_to" fields. If it contains the "_key" or "_id" + field, the value is used as the key of the new document (otherwise + it is auto-generated). Any "_rev" field is ignored. + wait_for_sync (bool | None): Wait until document has been synced to disk. + return_new (bool | None): Additionally return the complete new document + under the attribute `new` in the result. + + Returns: + dict: Document metadata (e.g. document id, key, revision). + If `return_new` is specified, the result contains the document + metadata in the "edge" field and the new document in the "new" field. + + Raises: + DocumentInsertError: If insertion fails. + DocumentParseError: If the document is malformed. + + References: + - `create-an-edge `__ + """ # noqa: E501 + if isinstance(edge, dict): + edge = cast(T, self._ensure_key_from_id(edge)) + + params: Params = {} + if wait_for_sync is not None: + params["waitForSync"] = wait_for_sync + if return_new is not None: + params["returnNew"] = return_new + + request = Request( + method=Method.POST, + endpoint=f"/_api/gharial/{self._graph}/edge/{self.name}", + params=params, + data=self._doc_serializer.dumps(edge), + ) + + def response_handler(resp: Response) -> Json: + if resp.is_success: + return self._parse_result(self.deserializer.loads(resp.raw_body)) + msg: Optional[str] = None + if resp.status_code == HTTP_NOT_FOUND: + msg = ( + "The graph cannot be found or the edge collection is not " + "part of the graph. It is also possible that the vertex " + "collection referenced in the _from or _to attribute is not part " + "of the graph or the vertex collection is part of the graph, but " + "does not exist. Finally check that _from or _to vertex do exist." + ) + raise DocumentInsertError(resp, request, msg) + + return await self._executor.execute(request, response_handler) diff --git a/arangoasync/graph.py b/arangoasync/graph.py index 71895ab..c7eda26 100644 --- a/arangoasync/graph.py +++ b/arangoasync/graph.py @@ -258,7 +258,8 @@ async def has_vertex( DocumentRevisionError: If the revision is incorrect. DocumentGetError: If retrieval fails. """ # noqa: E501 - return await self.vertex_collection(Collection.get_col_name(vertex)).has( + col = Collection.get_col_name(vertex) + return await self.vertex_collection(col).has( vertex, allow_dirty_read=allow_dirty_read, if_match=if_match, @@ -271,7 +272,7 @@ async def vertex( if_match: Optional[str] = None, if_none_match: Optional[str] = None, ) -> Result[Optional[Json]]: - """Return a document. + """Return a vertex document. Args: vertex (str | dict): Document ID, key or body. @@ -292,7 +293,8 @@ async def vertex( References: - `get-a-vertex `__ """ # noqa: E501 - return await self.vertex_collection(Collection.get_col_name(vertex)).get( + col = Collection.get_col_name(vertex) + return await self.vertex_collection(col).get( vertex, if_match=if_match, if_none_match=if_none_match, @@ -318,6 +320,8 @@ async def insert_vertex( Returns: dict: Document metadata (e.g. document id, key, revision). + If `return_new` is specified, the result contains the document + metadata in the "vertex" field and the new document in the "new" field. Raises: DocumentInsertError: If insertion fails. @@ -341,7 +345,7 @@ async def update_vertex( return_old: Optional[bool] = None, if_match: Optional[str] = None, ) -> Result[Json]: - """Insert a new document. + """Update a vertex in the graph. Args: vertex (dict): Partial or full document with the updated values. @@ -357,7 +361,10 @@ async def update_vertex( target revision id by using the "if-match" HTTP header. Returns: - bool | dict: Document metadata (e.g. document id, key, revision). + dict: Document metadata (e.g. document id, key, revision). + If `return_new` or "return_old" are specified, the result contains + the document metadata in the "vertex" field and two additional fields + ("new" and "old"). Raises: DocumentUpdateError: If update fails. @@ -384,7 +391,7 @@ async def replace_vertex( return_old: Optional[bool] = None, if_match: Optional[str] = None, ) -> Result[Json]: - """Replace a document. + """Replace a vertex in the graph. Args: vertex (dict): New document. It must contain the "_key" or "_id" field. @@ -399,7 +406,10 @@ async def replace_vertex( target revision id by using the "if-match" HTTP header. Returns: - bool | dict: Document metadata (e.g. document id, key, revision). + dict: Document metadata (e.g. document id, key, revision). + If `return_new` or "return_old" are specified, the result contains + the document metadata in the "vertex" field and two additional fields + ("new" and "old"). Raises: DocumentRevisionError: If precondition was violated. @@ -426,7 +436,7 @@ async def delete_vertex( return_old: Optional[bool] = None, if_match: Optional[str] = None, ) -> Result[bool | Json]: - """Delete a document. + """Delete a vertex in the graph. Args: vertex (dict): Document ID, key or body. The body must contain the @@ -440,9 +450,9 @@ async def delete_vertex( Returns: bool | dict: `True` if vertex was deleted successfully, `False` if vertex - was not found and **ignore_missing** was set to `True` (does not apply in - transactions). Old document is returned if **return_old** is set to - `True`. + was not found and **ignore_missing** was set to `True` (does not apply + in transactions). Old document is returned if **return_old** is set + to `True`. Raises: DocumentRevisionError: If precondition was violated. @@ -710,3 +720,111 @@ def response_handler(resp: Response) -> None: raise EdgeDefinitionDeleteError(resp, request) await self._executor.execute(request, response_handler) + + async def has_edge( + self, + edge: str | Json, + allow_dirty_read: bool = False, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + ) -> Result[bool]: + """Check if the edge exists in the graph. + + Args: + edge (str | dict): Document ID, key or body. + Document body must contain the "_id" or "_key" field. + allow_dirty_read (bool): Allow reads from followers in a cluster. + if_match (str | None): The document is returned, if it has the same + revision as the given ETag. + if_none_match (str | None): The document is returned, if it has a + different revision than the given ETag. + + Returns: + `True` if the document exists, `False` otherwise. + + Raises: + DocumentRevisionError: If the revision is incorrect. + DocumentGetError: If retrieval fails. + """ # noqa: E501 + col = Collection.get_col_name(edge) + return await self.edge_collection(col).has( + edge, + allow_dirty_read=allow_dirty_read, + if_match=if_match, + if_none_match=if_none_match, + ) + + async def edge( + self, + edge: str | Json, + rev: Optional[str] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + ) -> Result[Optional[Json]]: + """Return an edge from the graph. + + Args: + edge (str | dict): Document ID, key or body. + Document body must contain the "_id" or "_key" field. + rev (str | None): If this is set a document is only returned if it + has exactly this revision. + if_match (str | None): The document is returned, if it has the same + revision as the given ETag. + if_none_match (str | None): The document is returned, if it has a + different revision than the given ETag. + + Returns: + dict | None: Document or `None` if not found. + + Raises: + DocumentRevisionError: If the revision is incorrect. + DocumentGetError: If retrieval fails. + DocumentParseError: If the document is malformed. + + References: + - `get-an-edge `__ + """ # noqa: E501 + col = Collection.get_col_name(edge) + return await self.edge_collection(col).get( + edge, + rev=rev, + if_match=if_match, + if_none_match=if_none_match, + ) + + async def insert_edge( + self, + collection: str, + edge: T, + wait_for_sync: Optional[bool] = None, + return_new: Optional[bool] = None, + ) -> Result[Json]: + """Insert a new edge document. + + Args: + collection (str): Name of the vertex collection to insert the document into. + edge (dict): Document to insert. It must contain "_from" and + "_to" fields. If it contains the "_key" or "_id" + field, the value is used as the key of the new document (otherwise + it is auto-generated). Any "_rev" field is ignored. + wait_for_sync (bool | None): Wait until document has been synced to disk. + return_new (bool | None): Additionally return the complete new document + under the attribute `new` in the result. + + Returns: + dict: Document metadata (e.g. document id, key, revision). + If `return_new` is specified, the result contains the document + metadata in the "edge" field and the new document in the "new" field. + + Raises: + DocumentInsertError: If insertion fails. + DocumentParseError: If the document is malformed. + + References: + - `create-an-edge `__ + """ # noqa: E501 + return await self.edge_collection(collection).insert( + edge, + wait_for_sync=wait_for_sync, + return_new=return_new, + ) diff --git a/tests/test_graph.py b/tests/test_graph.py index 213d9b9..d7ff807 100644 --- a/tests/test_graph.py +++ b/tests/test_graph.py @@ -198,17 +198,19 @@ async def test_edge_collections(db, bad_graph): # Create full graph name = generate_graph_name() graph = await db.create_graph(name) - vcol_name = generate_col_name() - await graph.create_vertex_collection(vcol_name) - vcol2_name = generate_col_name() - await graph.create_vertex_collection(vcol2_name) - edge_name = generate_col_name() + teachers_col_name = generate_col_name() + await db.create_collection(teachers_col_name) + await graph.create_vertex_collection(teachers_col_name) + students_col_name = generate_col_name() + await db.create_collection(students_col_name) + await graph.create_vertex_collection(students_col_name) + edge_col_name = generate_col_name() edge_col = await graph.create_edge_definition( - edge_name, - from_vertex_collections=[vcol_name], - to_vertex_collections=[vcol2_name], + edge_col_name, + from_vertex_collections=[teachers_col_name], + to_vertex_collections=[students_col_name], ) - assert edge_col.name == edge_name + assert edge_col.name == edge_col_name # List edge definitions edge_definitions = await graph.edge_definitions() @@ -216,30 +218,76 @@ async def test_edge_collections(db, bad_graph): assert "edge_collection" in edge_definitions[0] assert "from_vertex_collections" in edge_definitions[0] assert "to_vertex_collections" in edge_definitions[0] - assert await graph.has_edge_definition(edge_name) is True + assert await graph.has_edge_definition(edge_col_name) is True assert await graph.has_edge_definition("bad_edge") is False edge_cols = await graph.edge_collections() assert len(edge_cols) == 1 - assert edge_name in edge_cols + assert edge_col_name in edge_cols + + # Design the graph + teachers = [ + {"_key": "101", "name": "Mr. Smith"}, + {"_key": "102", "name": "Ms. Johnson"}, + {"_key": "103", "name": "Dr. Brown"}, + ] + students = [ + {"_key": "123", "name": "Alice"}, + {"_key": "456", "name": "Bob"}, + {"_key": "789", "name": "Charlie"}, + ] + edges = [ + { + "_from": f"{teachers_col_name}/101", + "_to": f"{students_col_name}/123", + "subject": "Math", + }, + { + "_from": f"{teachers_col_name}/102", + "_to": f"{students_col_name}/456", + "subject": "Science", + }, + { + "_from": f"{teachers_col_name}/103", + "_to": f"{students_col_name}/789", + "subject": "History", + }, + ] + + # Create an edge + await graph.insert_vertex(teachers_col_name, teachers[0]) + await graph.insert_vertex(students_col_name, students[0]) + edge_meta = await graph.insert_edge( + edge_col_name, + edges[0], + return_new=True, + ) + assert "new" in edge_meta + + # Check for edge existence + edge_id = edge_meta["new"]["_id"] + assert await graph.has_edge(edge_id) is True + assert await graph.has_edge(f"{edge_col_name}/bad_id") is False + edge = await graph.edge(edge_id) + assert edge is not None # Replace the edge definition - new_from_collections = [vcol2_name] - new_to_collections = [vcol_name] + new_from_collections = [students_col_name] + new_to_collections = [teachers_col_name] replaced_edge_col = await graph.replace_edge_definition( - edge_name, + edge_col_name, from_vertex_collections=new_from_collections, to_vertex_collections=new_to_collections, ) - assert replaced_edge_col.name == edge_name + assert replaced_edge_col.name == edge_col_name # Verify the updated edge definition edge_definitions = await graph.edge_definitions() assert len(edge_definitions) == 1 - assert edge_definitions[0]["edge_collection"] == edge_name + assert edge_definitions[0]["edge_collection"] == edge_col_name assert edge_definitions[0]["from_vertex_collections"] == new_from_collections assert edge_definitions[0]["to_vertex_collections"] == new_to_collections # Delete the edge definition - await graph.delete_edge_definition(edge_name) - assert await graph.has_edge_definition(edge_name) is False + await graph.delete_edge_definition(edge_col_name) + assert await graph.has_edge_definition(edge_col_name) is False From 5662f37a186c0664d008e07ec3763f6d8349b1af Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Thu, 29 May 2025 10:57:23 +0000 Subject: [PATCH 17/25] Event loop scope --- tests/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index 36d323e..775f871 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -143,7 +143,7 @@ def docs(): ] -@pytest.fixture(scope="session") +@pytest_asyncio.fixture(scope="session") def event_loop(): loop = asyncio.new_event_loop() yield loop From 3eed2b2a624757bd9e1d7f3a43d9da5a2916751b Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Thu, 29 May 2025 11:05:25 +0000 Subject: [PATCH 18/25] Event loop scope again --- tests/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index 775f871..36d323e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -143,7 +143,7 @@ def docs(): ] -@pytest_asyncio.fixture(scope="session") +@pytest.fixture(scope="session") def event_loop(): loop = asyncio.new_event_loop() yield loop From a5c62780e4d3a6a660f610df73c2f04ba7149aaf Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Thu, 29 May 2025 11:14:35 +0000 Subject: [PATCH 19/25] Updating edge --- arangoasync/collection.py | 78 +++++++++++++++++++++++++++++++++++++++ arangoasync/graph.py | 47 +++++++++++++++++++++++ tests/conftest.py | 2 +- tests/test_graph.py | 8 ++++ 4 files changed, 134 insertions(+), 1 deletion(-) diff --git a/arangoasync/collection.py b/arangoasync/collection.py index 55409a0..7090413 100644 --- a/arangoasync/collection.py +++ b/arangoasync/collection.py @@ -2307,3 +2307,81 @@ def response_handler(resp: Response) -> Json: raise DocumentInsertError(resp, request, msg) return await self._executor.execute(request, response_handler) + + async def update( + self, + edge: T, + wait_for_sync: Optional[bool] = None, + keep_null: Optional[bool] = None, + return_new: Optional[bool] = None, + return_old: Optional[bool] = None, + if_match: Optional[str] = None, + ) -> Result[Json]: + """Update a vertex in the graph. + + Args: + edge (dict): Partial or full document with the updated values. + It must contain the "_key" or "_id" field, along with "_from" and + "_to" fields. + wait_for_sync (bool | None): Wait until document has been synced to disk. + keep_null (bool | None): If the intention is to delete existing attributes + with the patch command, set this parameter to `False`. + return_new (bool | None): Additionally return the complete new document + under the attribute `new` in the result. + return_old (bool | None): Additionally return the complete old document + under the attribute `old` in the result. + if_match (str | None): You can conditionally update a document based on a + target revision id by using the "if-match" HTTP header. + + Returns: + dict: Document metadata (e.g. document id, key, revision). + If `return_new` or "return_old" are specified, the result contains + the document metadata in the "vertex" field and two additional fields + ("new" and "old"). + + Raises: + DocumentUpdateError: If update fails. + + References: + - `update-an-edge `__ + """ # noqa: E501 + params: Params = {} + if wait_for_sync is not None: + params["waitForSync"] = wait_for_sync + if keep_null is not None: + params["keepNull"] = keep_null + if return_new is not None: + params["returnNew"] = return_new + if return_old is not None: + params["returnOld"] = return_old + + headers: RequestHeaders = {} + if if_match is not None: + headers["If-Match"] = if_match + + request = Request( + method=Method.PATCH, + endpoint=f"/_api/gharial/{self._graph}/edge/" + f"{self._prep_from_doc(cast(Json, edge))}", + params=params, + headers=headers, + data=self._doc_serializer.dumps(edge), + ) + + def response_handler(resp: Response) -> Json: + if resp.is_success: + return self._parse_result(self.deserializer.loads(resp.raw_body)) + msg: Optional[str] = None + if resp.status_code == HTTP_PRECONDITION_FAILED: + raise DocumentRevisionError(resp, request) + elif resp.status_code == HTTP_NOT_FOUND: + msg = ( + "The graph cannot be found or the edge collection is not " + "part of the graph. It is also possible that the vertex " + "collection referenced in the _from or _to attribute is not part " + "of the graph or the vertex collection is part of the graph, but " + "does not exist. Finally check that _from or _to vertex do exist." + ) + raise DocumentUpdateError(resp, request, msg) + + return await self._executor.execute(request, response_handler) diff --git a/arangoasync/graph.py b/arangoasync/graph.py index c7eda26..dba49e3 100644 --- a/arangoasync/graph.py +++ b/arangoasync/graph.py @@ -828,3 +828,50 @@ async def insert_edge( wait_for_sync=wait_for_sync, return_new=return_new, ) + + async def update_edge( + self, + edge: T, + wait_for_sync: Optional[bool] = None, + keep_null: Optional[bool] = None, + return_new: Optional[bool] = None, + return_old: Optional[bool] = None, + if_match: Optional[str] = None, + ) -> Result[Json]: + """Update a vertex in the graph. + + Args: + edge (dict): Partial or full document with the updated values. + It must contain the "_key" or "_id" field, along with "_from" and + "_to" fields. + wait_for_sync (bool | None): Wait until document has been synced to disk. + keep_null (bool | None): If the intention is to delete existing attributes + with the patch command, set this parameter to `False`. + return_new (bool | None): Additionally return the complete new document + under the attribute `new` in the result. + return_old (bool | None): Additionally return the complete old document + under the attribute `old` in the result. + if_match (str | None): You can conditionally update a document based on a + target revision id by using the "if-match" HTTP header. + + Returns: + dict: Document metadata (e.g. document id, key, revision). + If `return_new` or "return_old" are specified, the result contains + the document metadata in the "vertex" field and two additional fields + ("new" and "old"). + + Raises: + DocumentUpdateError: If update fails. + + References: + - `update-an-edge `__ + """ # noqa: E501 + col = Collection.get_col_name(cast(Json | str, edge)) + return await self.edge_collection(col).update( + edge, + wait_for_sync=wait_for_sync, + keep_null=keep_null, + return_new=return_new, + return_old=return_old, + if_match=if_match, + ) diff --git a/tests/conftest.py b/tests/conftest.py index 36d323e..98d75de 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -245,7 +245,7 @@ def db_version(): return global_data.db_version -@pytest_asyncio.fixture(scope="session", autouse=True) +@pytest_asyncio.fixture(autouse=True) async def teardown(): yield async with ArangoClient(hosts=global_data.url) as client: diff --git a/tests/test_graph.py b/tests/test_graph.py index d7ff807..6700723 100644 --- a/tests/test_graph.py +++ b/tests/test_graph.py @@ -271,6 +271,14 @@ async def test_edge_collections(db, bad_graph): edge = await graph.edge(edge_id) assert edge is not None + # Update an edge + edge["subject"] = "Advanced Math" + updated_edge_meta = await graph.update_edge(edge, return_new=True, return_old=True) + assert "new" in updated_edge_meta + assert "old" in updated_edge_meta + edge = await graph.edge(edge_id) + assert edge["subject"] == "Advanced Math" + # Replace the edge definition new_from_collections = [students_col_name] new_to_collections = [teachers_col_name] From 5e4c0b44134e506b86350adf0916b31bba5a642d Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Thu, 29 May 2025 11:32:00 +0000 Subject: [PATCH 20/25] Edges CRUD --- arangoasync/collection.py | 155 +++++++++++++++++++++++++++++++++++++- arangoasync/graph.py | 93 ++++++++++++++++++++++- tests/test_graph.py | 40 ++++++++-- 3 files changed, 277 insertions(+), 11 deletions(-) diff --git a/arangoasync/collection.py b/arangoasync/collection.py index 7090413..c8f1856 100644 --- a/arangoasync/collection.py +++ b/arangoasync/collection.py @@ -2317,7 +2317,7 @@ async def update( return_old: Optional[bool] = None, if_match: Optional[str] = None, ) -> Result[Json]: - """Update a vertex in the graph. + """Update an edge in the graph. Args: edge (dict): Partial or full document with the updated values. @@ -2336,7 +2336,7 @@ async def update( Returns: dict: Document metadata (e.g. document id, key, revision). If `return_new` or "return_old" are specified, the result contains - the document metadata in the "vertex" field and two additional fields + the document metadata in the "edge" field and two additional fields ("new" and "old"). Raises: @@ -2385,3 +2385,154 @@ def response_handler(resp: Response) -> Json: raise DocumentUpdateError(resp, request, msg) return await self._executor.execute(request, response_handler) + + async def replace( + self, + edge: T, + wait_for_sync: Optional[bool] = None, + keep_null: Optional[bool] = None, + return_new: Optional[bool] = None, + return_old: Optional[bool] = None, + if_match: Optional[str] = None, + ) -> Result[Json]: + """Replace an edge in the graph. + + Args: + edge (dict): Partial or full document with the updated values. + It must contain the "_key" or "_id" field, along with "_from" and + "_to" fields. + wait_for_sync (bool | None): Wait until document has been synced to disk. + keep_null (bool | None): If the intention is to delete existing attributes + with the patch command, set this parameter to `False`. + return_new (bool | None): Additionally return the complete new document + under the attribute `new` in the result. + return_old (bool | None): Additionally return the complete old document + under the attribute `old` in the result. + if_match (str | None): You can conditionally replace a document based on a + target revision id by using the "if-match" HTTP header. + + Returns: + dict: Document metadata (e.g. document id, key, revision). + If `return_new` or "return_old" are specified, the result contains + the document metadata in the "edge" field and two additional fields + ("new" and "old"). + + Raises: + DocumentRevisionError: If precondition was violated. + DocumentReplaceError: If replace fails. + + References: + - `replace-an-edge `__ + """ # noqa: E501 + params: Params = {} + if wait_for_sync is not None: + params["waitForSync"] = wait_for_sync + if keep_null is not None: + params["keepNull"] = keep_null + if return_new is not None: + params["returnNew"] = return_new + if return_old is not None: + params["returnOld"] = return_old + + headers: RequestHeaders = {} + if if_match is not None: + headers["If-Match"] = if_match + + request = Request( + method=Method.PUT, + endpoint=f"/_api/gharial/{self._graph}/edge/" + f"{self._prep_from_doc(cast(Json, edge))}", + params=params, + headers=headers, + data=self._doc_serializer.dumps(edge), + ) + + def response_handler(resp: Response) -> Json: + if resp.is_success: + return self._parse_result(self.deserializer.loads(resp.raw_body)) + msg: Optional[str] = None + if resp.status_code == HTTP_PRECONDITION_FAILED: + raise DocumentRevisionError(resp, request) + elif resp.status_code == HTTP_NOT_FOUND: + msg = ( + "The graph cannot be found or the edge collection is not " + "part of the graph. It is also possible that the vertex " + "collection referenced in the _from or _to attribute is not part " + "of the graph or the vertex collection is part of the graph, but " + "does not exist. Finally check that _from or _to vertex do exist." + ) + raise DocumentReplaceError(resp, request, msg) + + return await self._executor.execute(request, response_handler) + + async def delete( + self, + edge: T, + ignore_missing: bool = False, + wait_for_sync: Optional[bool] = None, + return_old: Optional[bool] = None, + if_match: Optional[str] = None, + ) -> Result[bool | Json]: + """Delete an edge from the graph. + + Args: + edge (dict): Partial or full document with the updated values. + It must contain the "_key" or "_id" field, along with "_from" and + "_to" fields. + ignore_missing (bool): Do not raise an exception on missing document. + wait_for_sync (bool | None): Wait until operation has been synced to disk. + return_old (bool | None): Additionally return the complete old document + under the attribute `old` in the result. + if_match (str | None): You can conditionally replace a document based on a + target revision id by using the "if-match" HTTP header. + + Returns: + bool | dict: `True` if vertex was deleted successfully, `False` if vertex + was not found and **ignore_missing** was set to `True` (does not apply + in transactions). Old document is returned if **return_old** is set + to `True`. + + Raises: + DocumentRevisionError: If precondition was violated. + DocumentDeleteError: If deletion fails. + + References: + - `remove-an-edge `__ + """ # noqa: E501 + params: Params = {} + if wait_for_sync is not None: + params["waitForSync"] = wait_for_sync + if return_old is not None: + params["returnOld"] = return_old + + headers: RequestHeaders = {} + if if_match is not None: + headers["If-Match"] = if_match + + request = Request( + method=Method.DELETE, + endpoint=f"/_api/gharial/{self._graph}/edge/" + f"{self._prep_from_doc(cast(Json, edge))}", + params=params, + headers=headers, + ) + + def response_handler(resp: Response) -> bool | Json: + if resp.is_success: + data: Json = self.deserializer.loads(resp.raw_body) + if "old" in data: + return cast(Json, data["old"]) + return True + msg: Optional[str] = None + if resp.status_code == HTTP_PRECONDITION_FAILED: + raise DocumentRevisionError(resp, request) + elif resp.status_code == HTTP_NOT_FOUND: + if resp.error_code == DOCUMENT_NOT_FOUND and ignore_missing: + return False + msg = ( + "Either the graph cannot be found, the edge collection is not " + "part of the graph, or the edge does not exist" + ) + raise DocumentDeleteError(resp, request, msg) + + return await self._executor.execute(request, response_handler) diff --git a/arangoasync/graph.py b/arangoasync/graph.py index dba49e3..30fde75 100644 --- a/arangoasync/graph.py +++ b/arangoasync/graph.py @@ -857,7 +857,7 @@ async def update_edge( Returns: dict: Document metadata (e.g. document id, key, revision). If `return_new` or "return_old" are specified, the result contains - the document metadata in the "vertex" field and two additional fields + the document metadata in the "edge" field and two additional fields ("new" and "old"). Raises: @@ -875,3 +875,94 @@ async def update_edge( return_old=return_old, if_match=if_match, ) + + async def replace_edge( + self, + edge: T, + wait_for_sync: Optional[bool] = None, + keep_null: Optional[bool] = None, + return_new: Optional[bool] = None, + return_old: Optional[bool] = None, + if_match: Optional[str] = None, + ) -> Result[Json]: + """Replace an edge in the graph. + + Args: + edge (dict): Partial or full document with the updated values. + It must contain the "_key" or "_id" field, along with "_from" and + "_to" fields. + wait_for_sync (bool | None): Wait until document has been synced to disk. + keep_null (bool | None): If the intention is to delete existing attributes + with the patch command, set this parameter to `False`. + return_new (bool | None): Additionally return the complete new document + under the attribute `new` in the result. + return_old (bool | None): Additionally return the complete old document + under the attribute `old` in the result. + if_match (str | None): You can conditionally replace a document based on a + target revision id by using the "if-match" HTTP header. + + Returns: + dict: Document metadata (e.g. document id, key, revision). + If `return_new` or "return_old" are specified, the result contains + the document metadata in the "edge" field and two additional fields + ("new" and "old"). + + Raises: + DocumentRevisionError: If precondition was violated. + DocumentReplaceError: If replace fails. + + References: + - `replace-an-edge `__ + """ # noqa: E501 + col = Collection.get_col_name(cast(Json | str, edge)) + return await self.edge_collection(col).replace( + edge, + wait_for_sync=wait_for_sync, + keep_null=keep_null, + return_new=return_new, + return_old=return_old, + if_match=if_match, + ) + + async def delete_edge( + self, + edge: T, + ignore_missing: bool = False, + wait_for_sync: Optional[bool] = None, + return_old: Optional[bool] = None, + if_match: Optional[str] = None, + ) -> Result[bool | Json]: + """Delete an edge from the graph. + + Args: + edge (dict): Partial or full document with the updated values. + It must contain the "_key" or "_id" field, along with "_from" and + "_to" fields. + ignore_missing (bool): Do not raise an exception on missing document. + wait_for_sync (bool | None): Wait until operation has been synced to disk. + return_old (bool | None): Additionally return the complete old document + under the attribute `old` in the result. + if_match (str | None): You can conditionally replace a document based on a + target revision id by using the "if-match" HTTP header. + + Returns: + bool | dict: `True` if vertex was deleted successfully, `False` if vertex + was not found and **ignore_missing** was set to `True` (does not apply + in transactions). Old document is returned if **return_old** is set + to `True`. + + Raises: + DocumentRevisionError: If precondition was violated. + DocumentDeleteError: If deletion fails. + + References: + - `remove-an-edge `__ + """ # noqa: E501 + col = Collection.get_col_name(cast(Json | str, edge)) + return await self.edge_collection(col).delete( + edge, + ignore_missing=ignore_missing, + wait_for_sync=wait_for_sync, + return_old=return_old, + if_match=if_match, + ) diff --git a/tests/test_graph.py b/tests/test_graph.py index 6700723..65d61d0 100644 --- a/tests/test_graph.py +++ b/tests/test_graph.py @@ -154,6 +154,7 @@ async def test_vertex_collections(db, docs, bad_graph): v1["text"] = "updated_text" v1_meta = await graph.update_vertex(v1, return_new=True) assert "new" in v1_meta + assert "vertex" in v1_meta v1 = await graph.vertex(v1_meta["vertex"]) assert v1["text"] == "updated_text" @@ -164,6 +165,7 @@ async def test_vertex_collections(db, docs, bad_graph): v1_meta = await graph.replace_vertex(v1, return_old=True, return_new=True) assert "old" in v1_meta assert "new" in v1_meta + assert "vertex" in v1_meta v1 = await graph.vertex(v1_meta["vertex"]) assert v1["text"] == "replaced_text" assert "additional" in v1 @@ -255,16 +257,20 @@ async def test_edge_collections(db, bad_graph): ] # Create an edge - await graph.insert_vertex(teachers_col_name, teachers[0]) - await graph.insert_vertex(students_col_name, students[0]) - edge_meta = await graph.insert_edge( - edge_col_name, - edges[0], - return_new=True, - ) - assert "new" in edge_meta + edge_metas = [] + for idx in range(len(edges)): + await graph.insert_vertex(teachers_col_name, teachers[idx]) + await graph.insert_vertex(students_col_name, students[idx]) + edge_meta = await graph.insert_edge( + edge_col_name, + edges[0], + return_new=True, + ) + assert "new" in edge_meta + edge_metas.append(edge_meta) # Check for edge existence + edge_meta = edge_metas[0] edge_id = edge_meta["new"]["_id"] assert await graph.has_edge(edge_id) is True assert await graph.has_edge(f"{edge_col_name}/bad_id") is False @@ -276,9 +282,27 @@ async def test_edge_collections(db, bad_graph): updated_edge_meta = await graph.update_edge(edge, return_new=True, return_old=True) assert "new" in updated_edge_meta assert "old" in updated_edge_meta + assert "edge" in updated_edge_meta edge = await graph.edge(edge_id) assert edge["subject"] == "Advanced Math" + # Replace an edge + edge["subject"] = "Replaced Subject" + edge["extra_info"] = "Some additional data" + replaced_edge_meta = await graph.replace_edge( + edge, return_old=True, return_new=True + ) + assert "old" in replaced_edge_meta + assert "new" in replaced_edge_meta + assert "edge" in replaced_edge_meta + edge = await graph.edge(edge_id) + assert edge["subject"] == "Replaced Subject" + + # Delete the edge + deleted_edge = await graph.delete_edge(edge_id, return_old=True) + assert "_id" in deleted_edge + assert await graph.has_edge(edge_id) is False + # Replace the edge definition new_from_collections = [students_col_name] new_to_collections = [teachers_col_name] From 00eaf49a0a90413ad79b7c8af72c580f7f95544e Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Sun, 1 Jun 2025 05:33:16 +0000 Subject: [PATCH 21/25] Extra edge methods --- arangoasync/collection.py | 138 ++++++++++++++++++++++++++++++++------ arangoasync/graph.py | 73 +++++++++++++++++++- tests/test_graph.py | 79 ++++++++++++++++++++++ 3 files changed, 269 insertions(+), 21 deletions(-) diff --git a/arangoasync/collection.py b/arangoasync/collection.py index c8f1856..179a4a5 100644 --- a/arangoasync/collection.py +++ b/arangoasync/collection.py @@ -6,7 +6,7 @@ ] -from typing import Any, Generic, List, Optional, Sequence, TypeVar, cast +from typing import Any, Generic, List, Literal, Optional, Sequence, TypeVar, cast from arangoasync.cursor import Cursor from arangoasync.errno import ( @@ -26,6 +26,7 @@ DocumentReplaceError, DocumentRevisionError, DocumentUpdateError, + EdgeListError, IndexCreateError, IndexDeleteError, IndexGetError, @@ -111,11 +112,13 @@ def _validate_id(self, doc_id: str) -> str: raise DocumentParseError(f'Bad collection name in document ID "{doc_id}"') return doc_id - def _extract_id(self, body: Json) -> str: + def _extract_id(self, body: Json, validate: bool = True) -> str: """Extract the document ID from document body. Args: body (dict): Document body. + validate (bool): Whether to validate the document ID, + checking if it belongs to the current collection. Returns: str: Document ID. @@ -125,7 +128,10 @@ def _extract_id(self, body: Json) -> str: """ try: if "_id" in body: - return self._validate_id(body["_id"]) + if validate: + return self._validate_id(body["_id"]) + else: + return cast(str, body["_id"]) else: key: str = body["_key"] return self._id_prefix + key @@ -150,28 +156,30 @@ def _ensure_key_from_id(self, body: Json) -> Json: body["_key"] = doc_id[len(self._id_prefix) :] return body - def _prep_from_doc(self, document: str | Json) -> str: + def _get_doc_id(self, document: str | Json, validate: bool = True) -> str: """Prepare document ID before a query. Args: document (str | dict): Document ID, key or body. + validate (bool): Whether to validate the document ID, + checking if it belongs to the current collection. Returns: Document ID and request headers. Raises: DocumentParseError: On missing ID and key. - TypeError: On bad document type. """ - if isinstance(document, dict): - doc_id = self._extract_id(document) - elif isinstance(document, str): + if isinstance(document, str): if "/" in document: - doc_id = self._validate_id(document) + if validate: + doc_id = self._validate_id(document) + else: + doc_id = document else: doc_id = self._id_prefix + document else: - raise TypeError("Document must be str or a dict") + doc_id = self._extract_id(document, validate) return doc_id @@ -585,7 +593,7 @@ async def has( References: - `get-a-document-header `__ """ # noqa: E501 - handle = self._prep_from_doc(document) + handle = self._get_doc_id(document) headers: RequestHeaders = {} if allow_dirty_read: @@ -1314,7 +1322,7 @@ async def get( References: - `get-a-document `__ """ # noqa: E501 - handle = self._prep_from_doc(document) + handle = self._get_doc_id(document) headers: RequestHeaders = {} if allow_dirty_read: @@ -1814,7 +1822,7 @@ async def get( References: - `get-a-vertex `__ """ # noqa: E501 - handle = self._prep_from_doc(vertex) + handle = self._get_doc_id(vertex) headers: RequestHeaders = {} if if_match is not None: @@ -1958,7 +1966,7 @@ async def update( request = Request( method=Method.PATCH, endpoint=f"/_api/gharial/{self._graph}/vertex/" - f"{self._prep_from_doc(cast(Json, vertex))}", + f"{self._get_doc_id(cast(Json, vertex))}", params=params, headers=headers, data=self._doc_serializer.dumps(vertex), @@ -2033,7 +2041,7 @@ async def replace( request = Request( method=Method.PUT, endpoint=f"/_api/gharial/{self._graph}/vertex/" - f"{self._prep_from_doc(cast(Json, vertex))}", + f"{self._get_doc_id(cast(Json, vertex))}", params=params, headers=headers, data=self._doc_serializer.dumps(vertex), @@ -2101,7 +2109,7 @@ async def delete( request = Request( method=Method.DELETE, endpoint=f"/_api/gharial/{self._graph}/vertex/" - f"{self._prep_from_doc(cast(Json, vertex))}", + f"{self._get_doc_id(cast(Json, vertex))}", params=params, headers=headers, ) @@ -2213,7 +2221,7 @@ async def get( References: - `get-an-edge `__ """ # noqa: E501 - handle = self._prep_from_doc(edge) + handle = self._get_doc_id(edge) headers: RequestHeaders = {} if if_match is not None: @@ -2362,7 +2370,7 @@ async def update( request = Request( method=Method.PATCH, endpoint=f"/_api/gharial/{self._graph}/edge/" - f"{self._prep_from_doc(cast(Json, edge))}", + f"{self._get_doc_id(cast(Json, edge))}", params=params, headers=headers, data=self._doc_serializer.dumps(edge), @@ -2441,7 +2449,7 @@ async def replace( request = Request( method=Method.PUT, endpoint=f"/_api/gharial/{self._graph}/edge/" - f"{self._prep_from_doc(cast(Json, edge))}", + f"{self._get_doc_id(cast(Json, edge))}", params=params, headers=headers, data=self._doc_serializer.dumps(edge), @@ -2512,7 +2520,7 @@ async def delete( request = Request( method=Method.DELETE, endpoint=f"/_api/gharial/{self._graph}/edge/" - f"{self._prep_from_doc(cast(Json, edge))}", + f"{self._get_doc_id(cast(Json, edge))}", params=params, headers=headers, ) @@ -2536,3 +2544,93 @@ def response_handler(resp: Response) -> bool | Json: raise DocumentDeleteError(resp, request, msg) return await self._executor.execute(request, response_handler) + + async def edges( + self, + vertex: str | Json, + direction: Optional[Literal["in", "out"]] = None, + allow_dirty_read: Optional[bool] = None, + ) -> Result[Json]: + """Return the edges starting or ending at the specified vertex. + + Args: + vertex (str | dict): Document ID, key or body. + direction (str | None): Direction of the edges to return. Selects `in` + or `out` direction for edges. If not set, any edges are returned. + allow_dirty_read (bool | None): Allow reads from followers in a cluster. + + Returns: + dict: List of edges and statistics. + + Raises: + EdgeListError: If retrieval fails. + + References: + - `get-inbound-and-outbound-edges `__ + """ # noqa: E501 + params: Params = { + "vertex": self._get_doc_id(vertex, validate=False), + } + if direction is not None: + params["direction"] = direction + + headers: RequestHeaders = {} + if allow_dirty_read is not None: + headers["x-arango-allow-dirty-read"] = "true" if allow_dirty_read else False + + request = Request( + method=Method.GET, + endpoint=f"/_api/edges/{self._name}", + params=params, + headers=headers, + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise EdgeListError(resp, request) + body = self.deserializer.loads(resp.raw_body) + for key in ("error", "code"): + body.pop(key) + return body + + return await self._executor.execute(request, response_handler) + + async def link( + self, + from_vertex: str | Json, + to_vertex: str | Json, + data: Optional[Json] = None, + wait_for_sync: Optional[bool] = None, + return_new: bool = False, + ) -> Result[Json]: + """Insert a new edge document linking the given vertices. + + Args: + from_vertex (str | dict): "_from" vertex document ID or body with "_id" + field. + to_vertex (str | dict): "_to" vertex document ID or body with "_id" field. + data (dict | None): Any extra data for the new edge document. If it has + "_key" or "_id" field, its value is used as key of the new edge document + (otherwise it is auto-generated). + wait_for_sync (bool | None): Wait until operation has been synced to disk. + return_new: Optional[bool]: Additionally return the complete new document + under the attribute `new` in the result. + + Returns: + dict: Document metadata (e.g. document id, key, revision). + If `return_new` is specified, the result contains the document + metadata in the "edge" field and the new document in the "new" field. + + Raises: + DocumentInsertError: If insertion fails. + DocumentParseError: If the document is malformed. + """ + edge: Json = { + "_from": self._get_doc_id(from_vertex, validate=False), + "_to": self._get_doc_id(to_vertex, validate=False), + } + if data is not None: + edge.update(self._ensure_key_from_id(data)) + return await self.insert( + cast(T, edge), wait_for_sync=wait_for_sync, return_new=return_new + ) diff --git a/arangoasync/graph.py b/arangoasync/graph.py index 30fde75..a7bb427 100644 --- a/arangoasync/graph.py +++ b/arangoasync/graph.py @@ -1,7 +1,7 @@ __all__ = ["Graph"] -from typing import Generic, List, Optional, Sequence, TypeVar, cast +from typing import Generic, List, Literal, Optional, Sequence, TypeVar, cast from arangoasync.collection import Collection, EdgeCollection, VertexCollection from arangoasync.exceptions import ( @@ -966,3 +966,74 @@ async def delete_edge( return_old=return_old, if_match=if_match, ) + + async def edges( + self, + collection: str, + vertex: str | Json, + direction: Optional[Literal["in", "out"]] = None, + allow_dirty_read: Optional[bool] = None, + ) -> Result[Json]: + """Return the edges starting or ending at the specified vertex. + + Args: + collection (str): Name of the edge collection to return edges from. + vertex (str | dict): Document ID, key or body. + direction (str | None): Direction of the edges to return. Selects `in` + or `out` direction for edges. If not set, any edges are returned. + allow_dirty_read (bool | None): Allow reads from followers in a cluster. + + Returns: + dict: List of edges and statistics. + + Raises: + EdgeListError: If retrieval fails. + + References: + - `get-inbound-and-outbound-edges `__ + """ # noqa: E501 + return await self.edge_collection(collection).edges( + vertex, + direction=direction, + allow_dirty_read=allow_dirty_read, + ) + + async def link( + self, + collection: str, + from_vertex: str | Json, + to_vertex: str | Json, + data: Optional[Json] = None, + wait_for_sync: Optional[bool] = None, + return_new: bool = False, + ) -> Result[Json]: + """Insert a new edge document linking the given vertices. + + Args: + collection (str): Name of the collection to insert the edge into. + from_vertex (str | dict): "_from" vertex document ID or body with "_id" + field. + to_vertex (str | dict): "_to" vertex document ID or body with "_id" field. + data (dict | None): Any extra data for the new edge document. If it has + "_key" or "_id" field, its value is used as key of the new edge document + (otherwise it is auto-generated). + wait_for_sync (bool | None): Wait until operation has been synced to disk. + return_new: Optional[bool]: Additionally return the complete new document + under the attribute `new` in the result. + + Returns: + dict: Document metadata (e.g. document id, key, revision). + If `return_new` is specified, the result contains the document + metadata in the "edge" field and the new document in the "new" field. + + Raises: + DocumentInsertError: If insertion fails. + DocumentParseError: If the document is malformed. + """ + return await self.edge_collection(collection).link( + from_vertex, + to_vertex, + data=data, + wait_for_sync=wait_for_sync, + return_new=return_new, + ) diff --git a/tests/test_graph.py b/tests/test_graph.py index 65d61d0..d51049d 100644 --- a/tests/test_graph.py +++ b/tests/test_graph.py @@ -6,6 +6,7 @@ EdgeDefinitionDeleteError, EdgeDefinitionListError, EdgeDefinitionReplaceError, + EdgeListError, GraphCreateError, GraphDeleteError, GraphListError, @@ -196,6 +197,8 @@ async def test_edge_collections(db, bad_graph): await bad_graph.replace_edge_definition("foo", ["bar1"], ["bar2"]) with pytest.raises(EdgeDefinitionDeleteError): await bad_graph.delete_edge_definition("foo") + with pytest.raises(EdgeListError): + await bad_graph.edges("col", "foo") # Create full graph name = generate_graph_name() @@ -323,3 +326,79 @@ async def test_edge_collections(db, bad_graph): # Delete the edge definition await graph.delete_edge_definition(edge_col_name) assert await graph.has_edge_definition(edge_col_name) is False + + +@pytest.mark.asyncio +async def test_edge_links(db): + # Create full graph + name = generate_graph_name() + graph = await db.create_graph(name) + + # Teachers collection + teachers_col_name = generate_col_name() + await db.create_collection(teachers_col_name) + await graph.create_vertex_collection(teachers_col_name) + + # Students collection + students_col_name = generate_col_name() + await db.create_collection(students_col_name) + await graph.create_vertex_collection(students_col_name) + + # Edges + teachers_to_students = generate_col_name() + await graph.create_edge_definition( + teachers_to_students, + from_vertex_collections=[teachers_col_name], + to_vertex_collections=[students_col_name], + ) + students_to_students = generate_col_name() + await graph.create_edge_definition( + students_to_students, + from_vertex_collections=[teachers_col_name], + to_vertex_collections=[students_col_name], + ) + + # Populate the graph + teachers = [ + {"_key": "101", "name": "Mr. Smith"}, + {"_key": "102", "name": "Ms. Johnson"}, + {"_key": "103", "name": "Dr. Brown"}, + ] + students = [ + {"_key": "123", "name": "Alice"}, + {"_key": "456", "name": "Bob"}, + {"_key": "789", "name": "Charlie"}, + ] + + docs = [] + t = await graph.insert_vertex(teachers_col_name, teachers[0]) + s = await graph.insert_vertex(students_col_name, students[0]) + await graph.link(teachers_to_students, t, s, {"subject": "Math"}) + docs.append(s) + + t = await graph.insert_vertex(teachers_col_name, teachers[1]) + s = await graph.insert_vertex(students_col_name, students[1]) + await graph.link(teachers_to_students, t["_id"], s["_id"], {"subject": "Science"}) + docs.append(s) + + t = await graph.insert_vertex(teachers_col_name, teachers[2]) + s = await graph.insert_vertex(students_col_name, students[2]) + await graph.link(teachers_to_students, t, s, {"subject": "History"}) + docs.append(s) + + await graph.link(students_to_students, docs[0], docs[1], {"friendship": "close"}) + await graph.link(students_to_students, docs[1], docs[0], {"friendship": "close"}) + + edges = await graph.edges(students_to_students, docs[0]) + assert len(edges["edges"]) == 2 + assert "stats" in edges + + await graph.link(students_to_students, docs[2], docs[0], {"friendship": "close"}) + edges = await graph.edges(students_to_students, docs[0], direction="in") + assert len(edges["edges"]) == 2 + + edges = await graph.edges(students_to_students, docs[0], direction="out") + assert len(edges["edges"]) == 1 + + edges = await graph.edges(students_to_students, docs[0]) + assert len(edges["edges"]) == 3 From 3e8530c1def8e4d54817510c2e826b797a0db51c Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Sun, 1 Jun 2025 05:36:55 +0000 Subject: [PATCH 22/25] Fixing lint --- arangoasync/collection.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/arangoasync/collection.py b/arangoasync/collection.py index 179a4a5..c742714 100644 --- a/arangoasync/collection.py +++ b/arangoasync/collection.py @@ -2576,7 +2576,9 @@ async def edges( headers: RequestHeaders = {} if allow_dirty_read is not None: - headers["x-arango-allow-dirty-read"] = "true" if allow_dirty_read else False + headers["x-arango-allow-dirty-read"] = ( + "true" if allow_dirty_read else "false" + ) request = Request( method=Method.GET, From 2cb36a33d36cd77b59eb677c0fceae8d68b5ce83 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Sun, 1 Jun 2025 05:48:35 +0000 Subject: [PATCH 23/25] Added github gist example --- docs/serialization.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/serialization.rst b/docs/serialization.rst index b6a51df..ed00702 100644 --- a/docs/serialization.rst +++ b/docs/serialization.rst @@ -183,5 +183,7 @@ You would then use the custom serializer/deserializer when working with collecti students = await col.get_many(keys) assert type(students) == pd.DataFrame +See a full example in this `gist `__. + .. _Pydantic: https://docs.pydantic.dev/latest/ .. _pandas: https://pandas.pydata.org/ From dcebf8e9b268562a9c557f7995b7e8b221844861 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Sun, 1 Jun 2025 08:42:13 +0000 Subject: [PATCH 24/25] Adding graph docs --- arangoasync/graph.py | 21 ++- docs/collection.rst | 6 +- docs/document.rst | 20 +++ docs/graph.rst | 415 +++++++++++++++++++++++++++++++++++++++++++ docs/index.rst | 1 + docs/specs.rst | 3 + tests/test_graph.py | 2 +- 7 files changed, 460 insertions(+), 8 deletions(-) create mode 100644 docs/graph.rst diff --git a/arangoasync/graph.py b/arangoasync/graph.py index a7bb427..059a53e 100644 --- a/arangoasync/graph.py +++ b/arangoasync/graph.py @@ -64,6 +64,15 @@ def name(self) -> str: """Name of the graph.""" return self._name + @property + def db_name(self) -> str: + """Return the name of the current database. + + Returns: + str: Database name. + """ + return self._executor.db_name + @property def serializer(self) -> Serializer[Json]: """Return the serializer.""" @@ -686,16 +695,16 @@ def response_handler(resp: Response) -> EdgeCollection[T, U, V]: async def delete_edge_definition( self, name: str, - purge: bool = False, + drop_collections: Optional[bool] = None, wait_for_sync: Optional[bool] = None, ) -> None: """Delete an edge definition from the graph. Args: name (str): Edge collection name. - purge (bool): If set to `True`, the edge definition is not just removed - from the graph but the edge collection is also deleted completely - from the database. + drop_collections (bool | None): If set to `True`, the edge definition is not + just removed from the graph but the edge collection is also deleted + completely from the database. wait_for_sync (bool | None): If set to `True`, the operation waits for changes to be synced to disk before returning. @@ -705,7 +714,9 @@ async def delete_edge_definition( References: - `remove-an-edge-definition `__ """ # noqa: E501 - params: Params = {"dropCollections": purge} + params: Params = {} + if drop_collections is not None: + params["dropCollections"] = drop_collections if wait_for_sync is not None: params["waitForSync"] = wait_for_sync diff --git a/docs/collection.rst b/docs/collection.rst index e6a846f..8dd3928 100644 --- a/docs/collection.rst +++ b/docs/collection.rst @@ -6,8 +6,10 @@ by its name which must consist only of hyphen, underscore and alphanumeric characters. There are three types of collections in python-arango: * **Standard Collection:** contains regular documents. -* **Vertex Collection:** contains vertex documents for graphs (not supported yet). -* **Edge Collection:** contains edge documents for graphs (not supported yet). +* **Vertex Collection:** contains vertex documents for graphs. See + :ref:`here ` for more details. +* **Edge Collection:** contains edge documents for graphs. See + :ref:`here ` for more details. Here is an example showing how you can manage standard collections: diff --git a/docs/document.rst b/docs/document.rst index ff9121e..571507e 100644 --- a/docs/document.rst +++ b/docs/document.rst @@ -42,6 +42,26 @@ collection: "friends": ["robin", "gordon"] } +.. _edge-documents: + +**Edge documents (edges)** are similar to standard documents but with two +additional required fields ``_from`` and ``_to``. Values of these fields must +be the handles of "from" and "to" vertex documents linked by the edge document +in question (see :doc:`graph` for details). Edge documents are contained in +:ref:`edge collections `. Here is an example of a valid edge +document in "friends" edge collection: + +.. code-block:: python + + { + "_id": "friends/001", + "_key": "001", + "_rev": "_Wm3d4le--_", + "_fro"': "students/john", + "_to": "students/jane", + "closeness": 9.5 + } + Standard documents are managed via collection API wrapper: .. code-block:: python diff --git a/docs/graph.rst b/docs/graph.rst new file mode 100644 index 0000000..0f0bbbf --- /dev/null +++ b/docs/graph.rst @@ -0,0 +1,415 @@ +Graphs +------ + +A **graph** consists of vertices and edges. Vertices are stored as documents in +:ref:`vertex collections ` and edges stored as documents in +:ref:`edge collections `. The collections used in a graph and +their relations are specified with :ref:`edge definitions `. +For more information, refer to `ArangoDB Manual`_. + +.. _ArangoDB Manual: https://docs.arangodb.com + +**Example:** + +.. code-block:: python + + from arangoasync import ArangoClient + from arangoasync.auth import Auth + + # Initialize the client for ArangoDB. + async with ArangoClient(hosts="http://localhost:8529") as client: + auth = Auth(username="root", password="passwd") + + # Connect to "test" database as root user. + db = await client.db("test", auth=auth) + + # List existing graphs in the database. + await db.graphs() + + # Create a new graph named "school" if it does not already exist. + # This returns an API wrapper for "school" graph. + if await db.has_graph("school"): + school = db.graph("school") + else: + school = await db.create_graph("school") + + # Retrieve various graph properties. + graph_name = school.name + db_name = school.db_name + vcols = await school.vertex_collections() + ecols = await school.edge_definitions() + + # Delete the graph. + await db.delete_graph("school") + +.. _edge-definitions: + +Edge Definitions +================ + +An **edge definition** specifies a directed relation in a graph. A graph can +have arbitrary number of edge definitions. Each edge definition consists of the +following components: + +* **From Vertex Collections:** contain "_from" vertices referencing "_to" vertices. +* **To Vertex Collections:** contain "_to" vertices referenced by "_from" vertices. +* **Edge Collection:** contains edges that link "_from" and "_to" vertices. + +Here is an example body of an edge definition: + +.. code-block:: python + + { + "edge_collection": "teach", + "from_vertex_collections": ["teachers"], + "to_vertex_collections": ["lectures"] + } + +Here is an example showing how edge definitions are managed: + +.. code-block:: python + + from arangoasync import ArangoClient + from arangoasync.auth import Auth + + # Initialize the client for ArangoDB. + async with ArangoClient(hosts="http://localhost:8529") as client: + auth = Auth(username="root", password="passwd") + + # Connect to "test" database as root user. + db = await client.db("test", auth=auth) + + # Get the API wrapper for graph "school". + if await db.has_graph("school"): + school = db.graph("school") + else: + school = await db.create_graph("school") + + # Create an edge definition named "teach". This creates any missing + # collections and returns an API wrapper for "teach" edge collection. + # At first, create a wrong teachers->teachers mapping intentionally. + if not await school.has_edge_definition("teach"): + await school.create_edge_definition( + edge_collection="teach", + from_vertex_collections=["teachers"], + to_vertex_collections=["teachers"] + ) + + # List edge definitions. + edge_defs = await school.edge_definitions() + + # Replace with the correct edge definition. + await school.replace_edge_definition( + edge_collection="teach", + from_vertex_collections=["teachers"], + to_vertex_collections=["lectures"] + ) + + # Delete the edge definition (and its collections). + await school.delete_edge_definition("teach", drop_collections=True) + +.. _vertex-collections: + +Vertex Collections +================== + +A **vertex collection** contains vertex documents, and shares its namespace +with all other types of collections. Each graph can have an arbitrary number of +vertex collections. Vertex collections that are not part of any edge definition +are called **orphan collections**. You can manage vertex documents via standard +collection API wrappers, but using vertex collection API wrappers provides +additional safeguards: + +* All modifications are executed in transactions. +* If a vertex is deleted, all connected edges are also automatically deleted. + +**Example:** + +.. code-block:: python + + from arangoasync import ArangoClient + from arangoasync.auth import Auth + + # Initialize the client for ArangoDB. + async with ArangoClient(hosts="http://localhost:8529") as client: + auth = Auth(username="root", password="passwd") + + # Connect to "test" database as root user. + db = await client.db("test", auth=auth) + + # Get the API wrapper for graph "school". + school = db.graph("school") + + # Create a new vertex collection named "teachers" if it does not exist. + # This returns an API wrapper for "teachers" vertex collection. + if await school.has_vertex_collection("teachers"): + teachers = school.vertex_collection("teachers") + else: + teachers = await school.create_vertex_collection("teachers") + + # List vertex collections in the graph. + cols = await school.vertex_collections() + + # Vertex collections have similar interface as standard collections. + props = await teachers.properties() + await teachers.insert({"_key": "jon", "name": "Jon"}) + await teachers.update({"_key": "jon", "age": 35}) + await teachers.replace({"_key": "jon", "name": "Jon", "age": 36}) + await teachers.get("jon") + await teachers.has("jon") + await teachers.delete("jon") + +You can manage vertices via graph API wrappers also, but you must use document +IDs instead of keys where applicable. + +**Example:** + +.. code-block:: python + + from arangoasync import ArangoClient + from arangoasync.auth import Auth + + # Initialize the client for ArangoDB. + async with ArangoClient(hosts="http://localhost:8529") as client: + auth = Auth(username="root", password="passwd") + + # Connect to "test" database as root user. + db = await client.db("test", auth=auth) + + # Get the API wrapper for graph "school". + school = db.graph("school") + + # Create a new vertex collection named "lectures" if it does not exist. + # This returns an API wrapper for "lectures" vertex collection. + if await school.has_vertex_collection("lectures"): + school.vertex_collection("lectures") + else: + await school.create_vertex_collection("lectures") + + # The "_id" field is required instead of "_key" field (except for insert). + await school.insert_vertex("lectures", {"_key": "CSC101"}) + await school.update_vertex({"_id": "lectures/CSC101", "difficulty": "easy"}) + await school.replace_vertex({"_id": "lectures/CSC101", "difficulty": "hard"}) + await school.has_vertex("lectures/CSC101") + await school.vertex("lectures/CSC101") + await school.delete_vertex("lectures/CSC101") + +See :class:`arangoasync.graph.Graph` and :class:`arangoasync.collection.VertexCollection` for API specification. + +.. _edge-collections: + +Edge Collections +================ + +An **edge collection** contains :ref:`edge documents `, and +shares its namespace with all other types of collections. You can manage edge +documents via standard collection API wrappers, but using edge collection API +wrappers provides additional safeguards: + +* All modifications are executed in transactions. +* Edge documents are checked against the edge definitions on insert. + +**Example:** + +.. code-block:: python + + from arangoasync import ArangoClient + from arangoasync.auth import Auth + + # Initialize the client for ArangoDB. + async with ArangoClient(hosts="http://localhost:8529") as client: + auth = Auth(username="root", password="passwd") + + # Connect to "test" database as root user. + db = await client.db("test", auth=auth) + + # Get the API wrapper for graph "school". + if await db.has_graph("school"): + school = db.graph("school") + else: + school = await db.create_graph("school") + + if not await school.has_vertex_collection("lectures"): + await school.create_vertex_collection("lectures") + await school.insert_vertex("lectures", {"_key": "CSC101"}) + + if not await school.has_vertex_collection("teachers"): + await school.create_vertex_collection("teachers") + await school.insert_vertex("teachers", {"_key": "jon"}) + + # Get the API wrapper for edge collection "teach". + if await school.has_edge_definition("teach"): + teach = school.edge_collection("teach") + else: + teach = await school.create_edge_definition( + edge_collection="teach", + from_vertex_collections=["teachers"], + to_vertex_collections=["lectures"] + ) + + # Edge collections have a similar interface as standard collections. + await teach.insert({ + "_key": "jon-CSC101", + "_from": "teachers/jon", + "_to": "lectures/CSC101" + }) + await teach.replace({ + "_key": "jon-CSC101", + "_from": "teachers/jon", + "_to": "lectures/CSC101", + "online": False + }) + await teach.update({ + "_key": "jon-CSC101", + "online": True + }) + await teach.has("jon-CSC101") + await teach.get("jon-CSC101") + await teach.delete("jon-CSC101") + + # Create an edge between two vertices (essentially the same as insert). + await teach.link("teachers/jon", "lectures/CSC101", data={"online": False}) + + # List edges going in/out of a vertex. + inbound = await teach.edges("teachers/jon", direction="in") + outbound = await teach.edges("teachers/jon", direction="out") + +You can manage edges via graph API wrappers also, but you must use document +IDs instead of keys where applicable. + +**Example:** + +.. code-block:: python + + from arangoasync import ArangoClient + from arangoasync.auth import Auth + + # Initialize the client for ArangoDB. + async with ArangoClient(hosts="http://localhost:8529") as client: + auth = Auth(username="root", password="passwd") + + # Connect to "test" database as root user. + db = await client.db("test", auth=auth) + + # Get the API wrapper for graph "school". + if await db.has_graph("school"): + school = db.graph("school") + else: + school = await db.create_graph("school") + + if not await school.has_vertex_collection("lectures"): + await school.create_vertex_collection("lectures") + await school.insert_vertex("lectures", {"_key": "CSC101"}) + + if not await school.has_vertex_collection("teachers"): + await school.create_vertex_collection("teachers") + await school.insert_vertex("teachers", {"_key": "jon"}) + + # Create the edge collection "teach". + if not await school.has_edge_definition("teach"): + await school.create_edge_definition( + edge_collection="teach", + from_vertex_collections=["teachers"], + to_vertex_collections=["lectures"] + ) + + # The "_id" field is required instead of "_key" field. + await school.insert_edge( + collection="teach", + edge={ + "_id": "teach/jon-CSC101", + "_from": "teachers/jon", + "_to": "lectures/CSC101" + } + ) + await school.replace_edge({ + "_id": "teach/jon-CSC101", + "_from": "teachers/jon", + "_to": "lectures/CSC101", + "online": False, + }) + await school.update_edge({ + "_id": "teach/jon-CSC101", + "online": True + }) + await school.has_edge("teach/jon-CSC101") + await school.edge("teach/jon-CSC101") + await school.delete_edge("teach/jon-CSC101") + await school.link("teach", "teachers/jon", "lectures/CSC101") + await school.edges("teach", "teachers/jon", direction="out") + +See :class:`arangoasync.graph.Graph` and :class:`arangoasync.graph.EdgeCollection` for API specification. + +.. _graph-traversals: + +Graph Traversals +================ + +**Graph traversals** are executed via AQL. +Each traversal can span across multiple vertex collections, and walk +over edges and vertices using various algorithms. + +**Example:** + +.. code-block:: python + + from arangoasync import ArangoClient + from arangoasync.auth import Auth + + # Initialize the client for ArangoDB. + async with ArangoClient(hosts="http://localhost:8529") as client: + auth = Auth(username="root", password="passwd") + + # Connect to "test" database as root user. + db = await client.db("test", auth=auth) + + # Get the API wrapper for graph "school". + if await db.has_graph("school"): + school = db.graph("school") + else: + school = await db.create_graph("school") + + # Create vertex collections "lectures" and "teachers" if they do not exist. + if not await school.has_vertex_collection("lectures"): + await school.create_vertex_collection("lectures") + if not await school.has_vertex_collection("teachers"): + await school.create_vertex_collection("teachers") + + # Create the edge collection "teach". + if not await school.has_edge_definition("teach"): + await school.create_edge_definition( + edge_collection="teach", + from_vertex_collections=["teachers"], + to_vertex_collections=["lectures"] + ) + + # Get API wrappers for "from" and "to" vertex collections. + teachers = school.vertex_collection("teachers") + lectures = school.vertex_collection("lectures") + + # Get the API wrapper for the edge collection. + teach = school.edge_collection("teach") + + # Insert vertices into the graph. + await teachers.insert({"_key": "jon", "name": "Professor jon"}) + await lectures.insert({"_key": "CSC101", "name": "Introduction to CS"}) + await lectures.insert({"_key": "MAT223", "name": "Linear Algebra"}) + await lectures.insert({"_key": "STA201", "name": "Statistics"}) + + # Insert edges into the graph. + await teach.insert({"_from": "teachers/jon", "_to": "lectures/CSC101"}) + await teach.insert({"_from": "teachers/jon", "_to": "lectures/STA201"}) + await teach.insert({"_from": "teachers/jon", "_to": "lectures/MAT223"}) + + # AQL to perform a graph traversal. + # Traverse 1 to 3 hops from the vertex "teachers/jon", + query = """ + FOR v, e, p IN 1..3 OUTBOUND 'teachers/jon' GRAPH 'school' + OPTIONS { bfs: true, uniqueVertices: 'global' } + RETURN {vertex: v, edge: e, path: p} + """ + + # Traverse the graph in outbound direction, breath-first. + async with await db.aql.execute(query) as cursor: + async for lecture in cursor: + print(lecture) diff --git a/docs/index.rst b/docs/index.rst index 3252629..180c0ed 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -35,6 +35,7 @@ Contents collection indexes document + graph aql **Specialized Features** diff --git a/docs/specs.rst b/docs/specs.rst index dc92bd9..9983716 100644 --- a/docs/specs.rst +++ b/docs/specs.rst @@ -19,6 +19,9 @@ python-arango-async. .. automodule:: arangoasync.aql :members: +.. automodule:: arangoasync.graph + :members: + .. automodule:: arangoasync.job :members: diff --git a/tests/test_graph.py b/tests/test_graph.py index d51049d..6d5fcbe 100644 --- a/tests/test_graph.py +++ b/tests/test_graph.py @@ -354,7 +354,7 @@ async def test_edge_links(db): students_to_students = generate_col_name() await graph.create_edge_definition( students_to_students, - from_vertex_collections=[teachers_col_name], + from_vertex_collections=[teachers_col_name, students_col_name], to_vertex_collections=[students_col_name], ) From 081eb7f550711a87c2ee2554ff90b3f1eb8c57fd Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Sun, 1 Jun 2025 09:20:34 +0000 Subject: [PATCH 25/25] Adding graphs example in the readme --- README.md | 60 ++++++++++++++++++++++++++++++++++++++++++++ docs/overview.rst | 64 ++++++++++++++++++++++++++++++++++++++++++++++- 2 files changed, 123 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 4f6cd2b..507c3e9 100644 --- a/README.md +++ b/README.md @@ -73,7 +73,67 @@ async def main(): student_names = [] async for doc in cursor: student_names.append(doc["name"]) +``` + +Another example with [graphs](https://docs.arangodb.com/stable/graphs/): +```python +async def main(): + from arangoasync import ArangoClient + from arangoasync.auth import Auth + + # Initialize the client for ArangoDB. + async with ArangoClient(hosts="http://localhost:8529") as client: + auth = Auth(username="root", password="passwd") + + # Connect to "test" database as root user. + db = await client.db("test", auth=auth) + + # Get the API wrapper for graph "school". + if await db.has_graph("school"): + graph = db.graph("school") + else: + graph = await db.create_graph("school") + + # Create vertex collections for the graph. + students = await graph.create_vertex_collection("students") + lectures = await graph.create_vertex_collection("lectures") + + # Create an edge definition (relation) for the graph. + edges = await graph.create_edge_definition( + edge_collection="register", + from_vertex_collections=["students"], + to_vertex_collections=["lectures"] + ) + + # Insert vertex documents into "students" (from) vertex collection. + await students.insert({"_key": "01", "full_name": "Anna Smith"}) + await students.insert({"_key": "02", "full_name": "Jake Clark"}) + await students.insert({"_key": "03", "full_name": "Lisa Jones"}) + + # Insert vertex documents into "lectures" (to) vertex collection. + await lectures.insert({"_key": "MAT101", "title": "Calculus"}) + await lectures.insert({"_key": "STA101", "title": "Statistics"}) + await lectures.insert({"_key": "CSC101", "title": "Algorithms"}) + + # Insert edge documents into "register" edge collection. + await edges.insert({"_from": "students/01", "_to": "lectures/MAT101"}) + await edges.insert({"_from": "students/01", "_to": "lectures/STA101"}) + await edges.insert({"_from": "students/01", "_to": "lectures/CSC101"}) + await edges.insert({"_from": "students/02", "_to": "lectures/MAT101"}) + await edges.insert({"_from": "students/02", "_to": "lectures/STA101"}) + await edges.insert({"_from": "students/03", "_to": "lectures/CSC101"}) + + # Traverse the graph in outbound direction, breath-first. + query = """ + FOR v, e, p IN 1..3 OUTBOUND 'students/01' GRAPH 'school' + OPTIONS { bfs: true, uniqueVertices: 'global' } + RETURN {vertex: v, edge: e, path: p} + """ + + async with await db.aql.execute(query) as cursor: + async for doc in cursor: + print(doc) ``` Please see the [documentation](https://python-arango-async.readthedocs.io/en/latest/) for more details. diff --git a/docs/overview.rst b/docs/overview.rst index 6f1f76a..f723234 100644 --- a/docs/overview.rst +++ b/docs/overview.rst @@ -39,7 +39,7 @@ Here is an example showing how **python-arango-async** client can be used: async for doc in cursor: student_names.append(doc["name"]) -You may also use the client without a context manager, but you must ensure to close the client when done: +You may also use the client without a context manager, but you must ensure to close the client when done. .. code-block:: python @@ -61,3 +61,65 @@ You may also use the client without a context manager, but you must ensure to cl # Close the client when done. await client.close() + +Another example with `graphs`_: + +.. _graphs: https://docs.arangodb.com/stable/graphs/ + +.. code-block:: python + + from arangoasync import ArangoClient + from arangoasync.auth import Auth + + # Initialize the client for ArangoDB. + async with ArangoClient(hosts="http://localhost:8529") as client: + auth = Auth(username="root", password="passwd") + + # Connect to "test" database as root user. + db = await client.db("test", auth=auth) + + # Get the API wrapper for graph "school". + if await db.has_graph("school"): + graph = db.graph("school") + else: + graph = await db.create_graph("school") + + # Create vertex collections for the graph. + students = await graph.create_vertex_collection("students") + lectures = await graph.create_vertex_collection("lectures") + + # Create an edge definition (relation) for the graph. + edges = await graph.create_edge_definition( + edge_collection="register", + from_vertex_collections=["students"], + to_vertex_collections=["lectures"] + ) + + # Insert vertex documents into "students" (from) vertex collection. + await students.insert({"_key": "01", "full_name": "Anna Smith"}) + await students.insert({"_key": "02", "full_name": "Jake Clark"}) + await students.insert({"_key": "03", "full_name": "Lisa Jones"}) + + # Insert vertex documents into "lectures" (to) vertex collection. + await lectures.insert({"_key": "MAT101", "title": "Calculus"}) + await lectures.insert({"_key": "STA101", "title": "Statistics"}) + await lectures.insert({"_key": "CSC101", "title": "Algorithms"}) + + # Insert edge documents into "register" edge collection. + await edges.insert({"_from": "students/01", "_to": "lectures/MAT101"}) + await edges.insert({"_from": "students/01", "_to": "lectures/STA101"}) + await edges.insert({"_from": "students/01", "_to": "lectures/CSC101"}) + await edges.insert({"_from": "students/02", "_to": "lectures/MAT101"}) + await edges.insert({"_from": "students/02", "_to": "lectures/STA101"}) + await edges.insert({"_from": "students/03", "_to": "lectures/CSC101"}) + + # Traverse the graph in outbound direction, breath-first. + query = """ + FOR v, e, p IN 1..3 OUTBOUND 'students/01' GRAPH 'school' + OPTIONS { bfs: true, uniqueVertices: 'global' } + RETURN {vertex: v, edge: e, path: p} + """ + + async with await db.aql.execute(query) as cursor: + async for doc in cursor: + print(doc) pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy